Compare commits
244 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0c1e2ceb7f | ||
|
|
5fe883f878 | ||
|
|
a7258aa7ec | ||
|
|
68f6b6b944 | ||
|
|
9e5e555552 | ||
|
|
aa4fdd3545 | ||
|
|
1b89b64356 | ||
|
|
c395c7906b | ||
|
|
cc01398a16 | ||
|
|
be228399d1 | ||
|
|
9bed829628 | ||
|
|
9c60a597f2 | ||
|
|
ebc8c98fa4 | ||
|
|
517bb8e8d4 | ||
|
|
ce4e8681ae | ||
|
|
26f5eaf438 | ||
|
|
81865a1790 | ||
|
|
92083924ea | ||
|
|
80ab0548f2 | ||
|
|
0a5b0c78d5 | ||
|
|
2a7eacf27c | ||
|
|
33628ccacf | ||
|
|
e7f5e24b34 | ||
|
|
adda72566d | ||
|
|
42939e99d7 | ||
|
|
bb169577e2 | ||
|
|
0858efbc11 | ||
|
|
51cd044742 | ||
|
|
a647317c2c | ||
|
|
098277e274 | ||
|
|
637e8ad9a0 | ||
|
|
5223e8ca40 | ||
|
|
7e63e7f0eb | ||
|
|
39b73b18bf | ||
|
|
90d504622a | ||
|
|
5f9a7a8be2 | ||
|
|
8ec081acd6 | ||
|
|
23607d9497 | ||
|
|
dd7d73e871 | ||
|
|
c1fc1a58d4 | ||
|
|
db59bd6cd9 | ||
|
|
1f592f4b2f | ||
|
|
f891aedea8 | ||
|
|
ae191f3283 | ||
|
|
4dcfe89575 | ||
|
|
e26299a768 | ||
|
|
d95c58b5ce | ||
|
|
59706ab97e | ||
|
|
92ddf2c8c3 | ||
|
|
6961ffb92d | ||
|
|
a64c90b28d | ||
|
|
6177856c9e | ||
|
|
cf95d7dce4 | ||
|
|
79a5937b74 | ||
|
|
0ffb61484e | ||
|
|
eaf88bdf40 | ||
|
|
613e0a11db | ||
|
|
570d346657 | ||
|
|
c5ac715b2b | ||
|
|
52a60ca6dd | ||
|
|
eedfb2bb86 | ||
|
|
30e9140d6c | ||
|
|
3f838080ca | ||
|
|
8c6ce1b5b9 | ||
|
|
4ee410a62e | ||
|
|
9d66d35a41 | ||
|
|
1b8512b321 | ||
|
|
f973c016bc | ||
|
|
91ebd195ce | ||
|
|
069ac68c5e | ||
|
|
300f0e4fd9 | ||
|
|
5877f71527 | ||
|
|
7ec53b1796 | ||
|
|
ae6b2f0d64 | ||
|
|
e5f905bd48 | ||
|
|
9fa6c961c4 | ||
|
|
d7ae7aaaf2 | ||
|
|
74ab0eb908 | ||
|
|
a02f8b45d3 | ||
|
|
ea3a8aa649 | ||
|
|
583437a83e | ||
|
|
4f1555a7f2 | ||
|
|
9b3352847c | ||
|
|
5e95781a0c | ||
|
|
2c821eaa64 | ||
|
|
8964d7074d | ||
|
|
18ed51e88e | ||
|
|
c43c067211 | ||
|
|
97ca612bf1 | ||
|
|
7b52f6f303 | ||
|
|
f7344eb90f | ||
|
|
1c24c49c0b | ||
|
|
6b1a92dbf6 | ||
|
|
045615d25d | ||
|
|
27af0beaad | ||
|
|
18cfb0d35a | ||
|
|
b021e1ebc3 | ||
|
|
2c679bec5c | ||
|
|
282abf6920 | ||
|
|
144f1b3a40 | ||
|
|
07a19cfd1d | ||
|
|
db8995a2ab | ||
|
|
cfc373c87a | ||
|
|
baa01835b3 | ||
|
|
b9985ab166 | ||
|
|
1c7feb7d11 | ||
|
|
f873b8d3e4 | ||
|
|
09a66d1d07 | ||
|
|
f4f40edf43 | ||
|
|
84eff97870 | ||
|
|
1bcc169e93 | ||
|
|
ab72a3bb8e | ||
|
|
aff453ad8b | ||
|
|
750ef6092e | ||
|
|
17df8ecab5 | ||
|
|
8a2415f5a7 | ||
|
|
4884e9f50a | ||
|
|
bce6e7971f | ||
|
|
68edfb500d | ||
|
|
790621b4b9 | ||
|
|
dd4fe23419 | ||
|
|
56fa53c566 | ||
|
|
0cc348dd6e | ||
|
|
7952e33152 | ||
|
|
b0878e4107 | ||
|
|
873441374f | ||
|
|
23bc3b20f6 | ||
|
|
1c64a57dbe | ||
|
|
d6b35caa0e | ||
|
|
8e3acaead9 | ||
|
|
fff032b1e8 | ||
|
|
4bceac5a4c | ||
|
|
e09cdad6ec | ||
|
|
6384229a53 | ||
|
|
7e419c1fb4 | ||
|
|
bda3f1caf4 | ||
|
|
e76885847c | ||
|
|
32d5084fdc | ||
|
|
f2055a355f | ||
|
|
3d366dd996 | ||
|
|
6fcc5c6dab | ||
|
|
b761a8f4eb | ||
|
|
65a8d1d324 | ||
|
|
57515de719 | ||
|
|
ca78d00f28 | ||
|
|
ea84da989f | ||
|
|
c562645db7 | ||
|
|
eb04533714 | ||
|
|
7bc0fccaca | ||
|
|
84ae9bcfa0 | ||
|
|
fad30adc56 | ||
|
|
f961219dbe | ||
|
|
c02010f2e5 | ||
|
|
d55c680ef9 | ||
|
|
c1eb0978ba | ||
|
|
c6b8aecfd0 | ||
|
|
32e842e882 | ||
|
|
5738d569ea | ||
|
|
9dd2d4bbe4 | ||
|
|
0b35ead2a0 | ||
|
|
a64ebd80f3 | ||
|
|
be8387dfa4 | ||
|
|
d6c4cf5029 | ||
|
|
0ada89b2fc | ||
|
|
7eb209de13 | ||
|
|
5965dbddbc | ||
|
|
e40c5021bb | ||
|
|
65a3df891d | ||
|
|
43d77ee91e | ||
|
|
3e7a434d19 | ||
|
|
28eeeafd98 | ||
|
|
92350a8643 | ||
|
|
cd5bd923b8 | ||
|
|
b787f36a75 | ||
|
|
b19b0a0496 | ||
|
|
6aa302d074 | ||
|
|
305175ab5f | ||
|
|
07f2eba137 | ||
|
|
4b0c7dc619 | ||
|
|
a217fd07cc | ||
|
|
8b5ece2ba9 | ||
|
|
d79634eea7 | ||
|
|
24f920d65b | ||
|
|
bbc08fd268 | ||
|
|
5852f22023 | ||
|
|
c2acd3b6cc | ||
|
|
bd83a19eca | ||
|
|
35e7d3ad3c | ||
|
|
b91a550644 | ||
|
|
e72ce3576f | ||
|
|
7e9beead57 | ||
|
|
bca0fb3c37 | ||
|
|
171b11f7bb | ||
|
|
4fe68cbf38 | ||
|
|
6e7573c8b3 | ||
|
|
8fb9dd8cf4 | ||
|
|
d8a27e23d7 | ||
|
|
c5d2b49ab3 | ||
|
|
9de3f0996c | ||
|
|
fc4baff226 | ||
|
|
959e292be9 | ||
|
|
768f951b6f | ||
|
|
0d8a667b3f | ||
|
|
f673ab8ba1 | ||
|
|
091420ae26 | ||
|
|
eb32254379 | ||
|
|
f9210ca7e1 | ||
|
|
47c9630cf5 | ||
|
|
48f228da44 | ||
|
|
87045ba8b0 | ||
|
|
a58916a3b9 | ||
|
|
e06c841537 | ||
|
|
f139444213 | ||
|
|
c4a9de598d | ||
|
|
709d65bd04 | ||
|
|
95766990fa | ||
|
|
4e8700c4f7 | ||
|
|
3ed5269f1d | ||
|
|
997295b3fc | ||
|
|
359a150eb4 | ||
|
|
304014c300 | ||
|
|
a91c8ac4d0 | ||
|
|
de37008261 | ||
|
|
4b506c8371 | ||
|
|
41c6e24c84 | ||
|
|
79cb82615f | ||
|
|
4cfa8ed9d6 | ||
|
|
47d53d22ad | ||
|
|
710680742a | ||
|
|
3a41b2161c | ||
|
|
3b1536b070 | ||
|
|
3a37f4b6f7 | ||
|
|
c2b7d5aa98 | ||
|
|
269cbf1185 | ||
|
|
c54aa2165c | ||
|
|
3268d030b6 | ||
|
|
86e7fc6d4b | ||
|
|
af6a75af32 | ||
|
|
4303c8aa25 | ||
|
|
b49a38c77e | ||
|
|
f322f28a5c | ||
|
|
52fca55aa8 | ||
|
|
b78ac80093 | ||
|
|
4f7ce8b4b2 |
23
.github/workflows/chatgpt-cr.yml
vendored
Normal file
23
.github/workflows/chatgpt-cr.yml
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
name: 🤖 ChatGPT Code Review
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, reopened, synchronize]
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: anc95/ChatGPT-CodeReview@main
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
# Optional
|
||||
LANGUAGE: Chinese
|
||||
MODEL:
|
||||
top_p: 1
|
||||
temperature: 1
|
||||
60
.github/workflows/nodejs.yml
vendored
60
.github/workflows/nodejs.yml
vendored
@@ -3,17 +3,7 @@
|
||||
|
||||
name: Node.js CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- master
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- master
|
||||
schedule:
|
||||
- cron: '0 2 * * *'
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
test-mysql57-fs-nfs:
|
||||
@@ -28,44 +18,43 @@ jobs:
|
||||
ports:
|
||||
- 3306:3306
|
||||
options: --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=5
|
||||
redis:
|
||||
# https://docs.github.com/en/actions/using-containerized-services/about-service-containers#example-mapping-redis-ports
|
||||
image: redis
|
||||
ports:
|
||||
# Opens tcp port 6379 on the host and service container
|
||||
- 6379:6379
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [16, 18]
|
||||
node-version: [16, 18, 20]
|
||||
os: [ubuntu-latest]
|
||||
|
||||
steps:
|
||||
- name: Checkout Git Source
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v1
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
# https://github.com/marketplace/actions/redis-server-in-github-actions#usage
|
||||
- name: Start Redis
|
||||
uses: supercharge/redis-github-action@1.4.0
|
||||
with:
|
||||
redis-version: 6
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm i
|
||||
run: npm i -g npminstall && npminstall
|
||||
|
||||
- name: Continuous Integration
|
||||
run: npm run ci
|
||||
|
||||
- name: Code Coverage
|
||||
uses: codecov/codecov-action@v1
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
test-mysql57-oss-nfs:
|
||||
runs-on: ${{ matrix.os }}
|
||||
if:
|
||||
if: |
|
||||
contains('
|
||||
refs/heads/main
|
||||
refs/heads/master
|
||||
refs/heads/dev
|
||||
', github.ref)
|
||||
@@ -80,27 +69,28 @@ jobs:
|
||||
- 3306:3306
|
||||
options: --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=5
|
||||
|
||||
redis:
|
||||
# https://docs.github.com/en/actions/using-containerized-services/about-service-containers#example-mapping-redis-ports
|
||||
image: redis
|
||||
ports:
|
||||
# Opens tcp port 6379 on the host and service container
|
||||
- 6379:6379
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [16, 18]
|
||||
node-version: [16, 18, 20]
|
||||
os: [ubuntu-latest]
|
||||
|
||||
steps:
|
||||
- name: Checkout Git Source
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v1
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
# https://github.com/marketplace/actions/redis-server-in-github-actions#usage
|
||||
- name: Start Redis
|
||||
uses: supercharge/redis-github-action@1.4.0
|
||||
with:
|
||||
redis-version: 6
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm i
|
||||
|
||||
@@ -108,12 +98,12 @@ jobs:
|
||||
run: npm run ci
|
||||
env:
|
||||
CNPMCORE_NFS_TYPE: oss
|
||||
CNPMCORE_NFS_OSS_BUCKET: cnpmcore-unittest-github
|
||||
CNPMCORE_NFS_OSS_BUCKET: cnpmcore-unittest-github-nodejs-${{ matrix.node-version }}
|
||||
CNPMCORE_NFS_OSS_ENDPOINT: https://oss-us-west-1.aliyuncs.com
|
||||
CNPMCORE_NFS_OSS_ID: ${{ secrets.CNPMCORE_NFS_OSS_ID }}
|
||||
CNPMCORE_NFS_OSS_SECRET: ${{ secrets.CNPMCORE_NFS_OSS_SECRET }}
|
||||
|
||||
- name: Code Coverage
|
||||
uses: codecov/codecov-action@v1
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
14
.github/workflows/release.yml
vendored
Normal file
14
.github/workflows/release.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
name: Release
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
|
||||
jobs:
|
||||
release:
|
||||
name: Node.js
|
||||
uses: node-modules/github-actions/.github/workflows/node-release.yml@master
|
||||
secrets:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
GIT_TOKEN: ${{ secrets.GIT_TOKEN }}
|
||||
with:
|
||||
checkTest: false
|
||||
27
.github/workflows/stale.yml
vendored
27
.github/workflows/stale.yml
vendored
@@ -1,27 +0,0 @@
|
||||
# This workflow warns and then closes issues and PRs that have had no activity for a specified amount of time.
|
||||
#
|
||||
# You can adjust the behavior by modifying this file.
|
||||
# For more information, see:
|
||||
# https://github.com/actions/stale
|
||||
name: Mark stale issues and pull requests
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '45 15 * * *'
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/stale@v3
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
stale-issue-message: 'Stale issue message'
|
||||
stale-pr-message: 'Stale pull request message'
|
||||
stale-issue-label: 'no-issue-activity'
|
||||
stale-pr-label: 'no-pr-activity'
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -13,6 +13,7 @@ config/config.prod.ts
|
||||
config/**/*.js
|
||||
app/**/*.js
|
||||
test/**/*.js
|
||||
app.js
|
||||
|
||||
.cnpmcore
|
||||
.cnpmcore_unittest
|
||||
@@ -75,7 +76,7 @@ typings/
|
||||
|
||||
# Output of 'npm pack'
|
||||
*.tgz
|
||||
!test/fixtures/*.tgz
|
||||
!test/fixtures/**/*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
.yarn-integrity
|
||||
@@ -116,4 +117,6 @@ dist
|
||||
.tern-port
|
||||
|
||||
.idea
|
||||
.DS_Store
|
||||
run
|
||||
!test/ctx_register.js
|
||||
|
||||
40
.vscode/launch.json
vendored
Normal file
40
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "Egg Debug",
|
||||
"runtimeExecutable": "npm",
|
||||
"runtimeArgs": [
|
||||
"run",
|
||||
"dev",
|
||||
"--",
|
||||
"--inspect-brk"
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"restart": true,
|
||||
"protocol": "auto",
|
||||
"port": 9229,
|
||||
"autoAttachChildProcesses": true
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "Egg Test",
|
||||
"runtimeExecutable": "npm",
|
||||
"runtimeArgs": [
|
||||
"run",
|
||||
"test-local",
|
||||
"--",
|
||||
"--inspect-brk"
|
||||
],
|
||||
"protocol": "auto",
|
||||
"port": 9229,
|
||||
"autoAttachChildProcesses": true
|
||||
}
|
||||
]
|
||||
}
|
||||
422
CHANGELOG.md
Normal file
422
CHANGELOG.md
Normal file
@@ -0,0 +1,422 @@
|
||||
# Changelog
|
||||
|
||||
## [3.23.2](https://github.com/cnpm/cnpmcore/compare/v3.23.1...v3.23.2) (2023-05-31)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* unpkg support non-npm publish tgz file ([#485](https://github.com/cnpm/cnpmcore/issues/485)) ([5fe883f](https://github.com/cnpm/cnpmcore/commit/5fe883f878014639d9978aadec152d54e1d9ff3e)), closes [/github.com/cnpm/cnpmcore/issues/452#issuecomment-1570077310](https://github.com/cnpm//github.com/cnpm/cnpmcore/issues/452/issues/issuecomment-1570077310)
|
||||
|
||||
## [3.23.1](https://github.com/cnpm/cnpmcore/compare/v3.23.0...v3.23.1) (2023-05-30)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* use package version publishTime instead of file mtime ([#483](https://github.com/cnpm/cnpmcore/issues/483)) ([68f6b6b](https://github.com/cnpm/cnpmcore/commit/68f6b6b94406c446f639161fc995efe11d1aeb6d))
|
||||
|
||||
## [3.23.0](https://github.com/cnpm/cnpmcore/compare/v3.22.3...v3.23.0) (2023-05-29)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* export getUserAndToken ([#480](https://github.com/cnpm/cnpmcore/issues/480)) ([aa4fdd3](https://github.com/cnpm/cnpmcore/commit/aa4fdd3545caeca7ad80c9fee3d2e1b7862ebef3))
|
||||
|
||||
## [3.22.3](https://github.com/cnpm/npmcore/compare/v3.22.2...v3.22.3) (2023-05-29)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* unpkg redirect ([#479](https://github.com/cnpm/npmcore/issues/479)) ([c395c79](https://github.com/cnpm/npmcore/commit/c395c7906b8ab355743f04f0a1edce2ff3571979))
|
||||
|
||||
## [3.22.2](https://github.com/cnpm/npmcore/compare/v3.22.1...v3.22.2) (2023-05-25)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* use S3 url to download file ([#477](https://github.com/cnpm/npmcore/issues/477)) ([9bed829](https://github.com/cnpm/npmcore/commit/9bed8296285bdb2f4273f77f89ddb9ec6c31693b)), closes [/github.com/cnpm/cnpmcore/issues/472#issuecomment-1562452369](https://github.com/cnpm//github.com/cnpm/cnpmcore/issues/472/issues/issuecomment-1562452369)
|
||||
|
||||
## [3.22.1](https://github.com/cnpm/npmcore/compare/v3.22.0...v3.22.1) (2023-05-25)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* refactor config type ([#476](https://github.com/cnpm/npmcore/issues/476)) ([ebc8c98](https://github.com/cnpm/npmcore/commit/ebc8c98fa48c589657cade62d4f88bb7e52b62d1))
|
||||
|
||||
## [3.22.0](https://github.com/cnpm/npmcore/compare/v3.21.0...v3.22.0) (2023-05-25)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* sync prisma binary from R2 ([#474](https://github.com/cnpm/npmcore/issues/474)) ([ce4e868](https://github.com/cnpm/npmcore/commit/ce4e8681aeab9f3a45b467806e3c0dcc035db587))
|
||||
|
||||
## [3.21.0](https://github.com/cnpm/npmcore/compare/v3.20.3...v3.21.0) (2023-05-21)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* easy config ([#468](https://github.com/cnpm/npmcore/issues/468)) ([9208392](https://github.com/cnpm/npmcore/commit/92083924eaa3fbcd5f3c651d0ddc056d9affba30))
|
||||
|
||||
## [3.20.3](https://github.com/cnpm/npmcore/compare/v3.20.2...v3.20.3) (2023-05-18)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* unpublish idempotent ([#466](https://github.com/cnpm/npmcore/issues/466)) ([2a7eacf](https://github.com/cnpm/npmcore/commit/2a7eacf27c38ca5443f8b04b0c6abfd500869807))
|
||||
|
||||
## [3.20.2](https://github.com/cnpm/npmcore/compare/v3.20.1...v3.20.2) (2023-05-06)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* set cache-control default value to "public, max-age=300" ([#462](https://github.com/cnpm/npmcore/issues/462)) ([adda725](https://github.com/cnpm/npmcore/commit/adda72566d270171cad18f3fabe203cae2e6e34f))
|
||||
|
||||
## [3.20.1](https://github.com/cnpm/npmcore/compare/v3.20.0...v3.20.1) (2023-05-06)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* use nfs download api ([#461](https://github.com/cnpm/npmcore/issues/461)) ([bb16957](https://github.com/cnpm/npmcore/commit/bb169577e2be56c2ac3e9ca509b6a3cfb2d28cdc))
|
||||
|
||||
## [3.20.0](https://github.com/cnpm/npmcore/compare/v3.19.3...v3.20.0) (2023-05-06)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* enable sql logger ([#460](https://github.com/cnpm/npmcore/issues/460)) ([51cd044](https://github.com/cnpm/npmcore/commit/51cd0447425ca0a96f328bd05d21168206274289))
|
||||
|
||||
## [3.19.3](https://github.com/cnpm/npmcore/compare/v3.19.2...v3.19.3) (2023-05-06)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* ignore hidden dir files ([#459](https://github.com/cnpm/npmcore/issues/459)) ([637e8ad](https://github.com/cnpm/npmcore/commit/637e8ad9a04d36370ce6044b67c7a3ba4f89dd1b))
|
||||
|
||||
## [3.19.2](https://github.com/cnpm/npmcore/compare/v3.19.1...v3.19.2) (2023-05-05)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* ignore non-file on tar entry filter ([#458](https://github.com/cnpm/npmcore/issues/458)) ([7e63e7f](https://github.com/cnpm/npmcore/commit/7e63e7f0eb2d324275d72293c40d3d7ac060bf73))
|
||||
|
||||
## [3.19.1](https://github.com/cnpm/npmcore/compare/v3.19.0...v3.19.1) (2023-05-05)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* download tgz file to local file before untar it ([#457](https://github.com/cnpm/npmcore/issues/457)) ([90d5046](https://github.com/cnpm/npmcore/commit/90d504622a6ed911e3df3f0c4204ef82b75be714))
|
||||
|
||||
## [3.19.0](https://github.com/cnpm/npmcore/compare/v3.18.0...v3.19.0) (2023-05-05)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* support unpkg features ([#456](https://github.com/cnpm/npmcore/issues/456)) ([8ec081a](https://github.com/cnpm/npmcore/commit/8ec081acd675e9738647f5b8791c89aa905dee5d))
|
||||
|
||||
## [3.18.0](https://github.com/cnpm/npmcore/compare/v3.17.1...v3.18.0) (2023-05-05)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* sync chrome-for-testing binary ([#455](https://github.com/cnpm/npmcore/issues/455)) ([dd7d73e](https://github.com/cnpm/npmcore/commit/dd7d73e871659401e14d528b9e31b7caa01e66fa))
|
||||
|
||||
## [3.17.1](https://github.com/cnpm/npmcore/compare/v3.17.0...v3.17.1) (2023-05-04)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* calculate _hasShrinkwrap on server-side if not present ([#450](https://github.com/cnpm/npmcore/issues/450)) ([db59bd6](https://github.com/cnpm/npmcore/commit/db59bd6cd9ebf678ea16d739b1d7ef11c5349f2f))
|
||||
|
||||
## [3.17.0](https://github.com/cnpm/npmcore/compare/v3.16.0...v3.17.0) (2023-04-25)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add source registry name in manifest ([#448](https://github.com/cnpm/npmcore/issues/448)) ([f891aed](https://github.com/cnpm/npmcore/commit/f891aedea822eeef6e5ffa7956423bda845fc696))
|
||||
|
||||
## [3.16.0](https://github.com/cnpm/npmcore/compare/v3.15.0...v3.16.0) (2023-04-21)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add health checker for slb ([#445](https://github.com/cnpm/npmcore/issues/445)) ([4dcfe89](https://github.com/cnpm/npmcore/commit/4dcfe89575bd2bedbd34228020e3f7f9dfdf38b9))
|
||||
|
||||
## [3.15.0](https://github.com/cnpm/npmcore/compare/v3.14.0...v3.15.0) (2023-04-21)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* create sync task with auth header ([#442](https://github.com/cnpm/npmcore/issues/442)) ([d95c58b](https://github.com/cnpm/npmcore/commit/d95c58b5ce1f6c3137f3d8b09c10a12ed3a8af5e))
|
||||
|
||||
## [3.14.0](https://github.com/cnpm/npmcore/compare/v3.13.2...v3.14.0) (2023-04-20)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* support granular token ([#443](https://github.com/cnpm/npmcore/issues/443)) ([92ddf2c](https://github.com/cnpm/npmcore/commit/92ddf2c8c35fbf9dee458926e7a6d505fbbe06f1))
|
||||
|
||||
## [3.13.2](https://github.com/cnpm/npmcore/compare/v3.13.1...v3.13.2) (2023-04-10)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* skip tag name out of utf8mb3 ([#440](https://github.com/cnpm/npmcore/issues/440)) ([a64c90b](https://github.com/cnpm/npmcore/commit/a64c90b28de658f9933fa95ff89d272a8a97f95d))
|
||||
|
||||
## [3.13.1](https://github.com/cnpm/npmcore/compare/v3.13.0...v3.13.1) (2023-04-10)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* update webauth default URL to registry ([#432](https://github.com/cnpm/npmcore/issues/432)) ([cf95d7d](https://github.com/cnpm/npmcore/commit/cf95d7dce4d7a05056eadc09024958e9b35df9b9))
|
||||
|
||||
## [3.13.0](https://github.com/cnpm/npmcore/compare/v3.12.1...v3.13.0) (2023-04-07)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* support npm access command ([#436](https://github.com/cnpm/npmcore/issues/436)) ([0ffb614](https://github.com/cnpm/npmcore/commit/0ffb61484eed78e1a819cb2a3af3f225183246cb))
|
||||
|
||||
## [3.12.1](https://github.com/cnpm/npmcore/compare/v3.12.0...v3.12.1) (2023-04-07)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* allow to remove the package entity ([#437](https://github.com/cnpm/npmcore/issues/437)) ([613e0a1](https://github.com/cnpm/npmcore/commit/613e0a11db65d6222eefb18462fceaf1023231d3))
|
||||
|
||||
## [3.12.0](https://github.com/cnpm/npmcore/compare/v3.11.2...v3.12.0) (2023-04-06)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* allow admin to sync package only ([#434](https://github.com/cnpm/npmcore/issues/434)) ([c5ac715](https://github.com/cnpm/npmcore/commit/c5ac715b2b48af8a353a0374631f35f46c66a740)), closes [#412](https://github.com/cnpm/npmcore/issues/412)
|
||||
|
||||
## [3.11.2](https://github.com/cnpm/npmcore/compare/v3.11.1...v3.11.2) (2023-04-03)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* init sync spec registry ([#433](https://github.com/cnpm/npmcore/issues/433)) ([eedfb2b](https://github.com/cnpm/npmcore/commit/eedfb2bb86e535ad8258d4dbb85e43917ac023e1))
|
||||
|
||||
## [3.11.1](https://github.com/cnpm/npmcore/compare/v3.11.0...v3.11.1) (2023-03-30)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* timeout handler not work ([#430](https://github.com/cnpm/npmcore/issues/430)) ([3f83808](https://github.com/cnpm/npmcore/commit/3f838080cac7ecbf572105fa3869a62a0400d3a7))
|
||||
* update login assets cdn url ([#429](https://github.com/cnpm/npmcore/issues/429)) ([4ee410a](https://github.com/cnpm/npmcore/commit/4ee410a62eea250f2db9ef26c8508eae43a27a83))
|
||||
|
||||
## [3.11.1](https://github.com/cnpm/npmcore/compare/v3.11.0...v3.11.1) (2023-03-28)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* update login assets cdn url ([#429](https://github.com/cnpm/npmcore/issues/429)) ([4ee410a](https://github.com/cnpm/npmcore/commit/4ee410a62eea250f2db9ef26c8508eae43a27a83))
|
||||
|
||||
## [3.11.0](https://github.com/cnpm/npmcore/compare/v3.10.0...v3.11.0) (2023-03-27)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* support webauthn ([#422](https://github.com/cnpm/npmcore/issues/422)) ([1b8512b](https://github.com/cnpm/npmcore/commit/1b8512b3218e05d440f7cff6b95e0a1f65c0557d))
|
||||
|
||||
## [3.10.0](https://github.com/cnpm/npmcore/compare/v3.9.0...v3.10.0) (2023-03-27)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* redirect not found can be false when syncMode='none' ([#428](https://github.com/cnpm/npmcore/issues/428)) ([91ebd19](https://github.com/cnpm/npmcore/commit/91ebd195ce34c895feff70a71abacba8df2a7538))
|
||||
|
||||
## [3.9.0](https://github.com/cnpm/npmcore/compare/v3.8.0...v3.9.0) (2023-03-20)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* redis weak ([#426](https://github.com/cnpm/npmcore/issues/426)) ([300f0e4](https://github.com/cnpm/npmcore/commit/300f0e4fd97e1e3f181991841442e771b1451185))
|
||||
|
||||
## [3.8.0](https://github.com/cnpm/npmcore/compare/v3.7.0...v3.8.0) (2023-03-08)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Support for migrating packages into current registry ([#417](https://github.com/cnpm/npmcore/issues/417)) ([e5f905b](https://github.com/cnpm/npmcore/commit/e5f905bd4834ae31580ed0bc2d8e5b750800275f))
|
||||
|
||||
## [3.7.0](https://github.com/cnpm/npmcore/compare/v3.6.0...v3.7.0) (2023-03-01)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* retry changes task when current work error ([#414](https://github.com/cnpm/npmcore/issues/414)) ([d7ae7aa](https://github.com/cnpm/npmcore/commit/d7ae7aaaf2322985945967dc9e849c2fd798fc77))
|
||||
|
||||
## [3.6.0](https://github.com/cnpm/npmcore/compare/v3.5.0...v3.6.0) (2023-02-27)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add integrate doc ([#413](https://github.com/cnpm/npmcore/issues/413)) ([a02f8b4](https://github.com/cnpm/npmcore/commit/a02f8b45d3f1436f392330b85b68101b74c43332))
|
||||
|
||||
## [3.5.0](https://github.com/cnpm/npmcore/compare/v3.4.3...v3.5.0) (2023-02-21)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* support webauth infra ([#411](https://github.com/cnpm/npmcore/issues/411)) ([583437a](https://github.com/cnpm/npmcore/commit/583437a83ea8cb04667629b70d637891808ae3dc))
|
||||
|
||||
## [3.4.3](https://github.com/cnpm/npmcore/compare/v3.4.2...v3.4.3) (2023-02-15)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* changesStream suspend ([#408](https://github.com/cnpm/npmcore/issues/408)) ([2c821ea](https://github.com/cnpm/npmcore/commit/2c821eaa64a98b5515327ae5ffad0af2358a8554))
|
||||
|
||||
## [3.4.2](https://github.com/cnpm/npmcore/compare/v3.4.1...v3.4.2) (2023-02-14)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* distinct processing task ([#406](https://github.com/cnpm/npmcore/issues/406)) ([c43c067](https://github.com/cnpm/npmcore/commit/c43c067211e80f402aa645cd9da36ae1e8c42153))
|
||||
|
||||
## [3.4.1](https://github.com/cnpm/npmcore/compare/v3.4.0...v3.4.1) (2023-02-13)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* forbidden non-ascii binary subpath ([#405](https://github.com/cnpm/npmcore/issues/405)) ([7b52f6f](https://github.com/cnpm/npmcore/commit/7b52f6f30332a9d83be4f958bd3c9b0577021507))
|
||||
|
||||
## [3.4.0](https://github.com/cnpm/npmcore/compare/v3.3.2...v3.4.0) (2023-02-10)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* sync delete mode ([#398](https://github.com/cnpm/npmcore/issues/398)) ([27af0be](https://github.com/cnpm/npmcore/commit/27af0beaadba4e83177946100c3d47391c1c6b18))
|
||||
|
||||
## [3.3.2](https://github.com/cnpm/npmcore/compare/v3.3.1...v3.3.2) (2023-02-10)
|
||||
|
||||
|
||||
### Reverts
|
||||
|
||||
* Revert "fix: should sync package deps by default (#400)" (#401) ([b021e1e](https://github.com/cnpm/npmcore/commit/b021e1ebc31b2eea118694b0816eeb99e5112f7d)), closes [#400](https://github.com/cnpm/npmcore/issues/400) [#401](https://github.com/cnpm/npmcore/issues/401)
|
||||
|
||||
## [3.3.1](https://github.com/cnpm/npmcore/compare/v3.3.0...v3.3.1) (2023-02-10)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* should sync package deps by default ([#400](https://github.com/cnpm/npmcore/issues/400)) ([282abf6](https://github.com/cnpm/npmcore/commit/282abf692045f4660831ceacf7e1e7851ff58241))
|
||||
|
||||
## [3.3.0](https://github.com/cnpm/npmcore/compare/v3.2.6...v3.3.0) (2023-02-09)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* auto sync package's optionalDependencies ([#399](https://github.com/cnpm/npmcore/issues/399)) ([07a19cf](https://github.com/cnpm/npmcore/commit/07a19cfd1df84b4dce79e3fad666c91635d13d6e))
|
||||
|
||||
## [3.2.6](https://github.com/cnpm/npmcore/compare/v3.2.5...v3.2.6) (2023-02-05)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* should init binary adapter before reuse it ([#393](https://github.com/cnpm/npmcore/issues/393)) ([b9985ab](https://github.com/cnpm/npmcore/commit/b9985ab1660a4b5a7957988d33be273c07ac2f9d))
|
||||
|
||||
## [3.2.5](https://github.com/cnpm/npmcore/compare/v3.2.4...v3.2.5) (2023-02-03)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* allow publish 10mb tarball package by default ([#391](https://github.com/cnpm/npmcore/issues/391)) ([f873b8d](https://github.com/cnpm/npmcore/commit/f873b8d3e419fba22e9a3bbf906a7c2b5a3db14d))
|
||||
|
||||
## [3.2.4](https://github.com/cnpm/npmcore/compare/v3.2.3...v3.2.4) (2023-02-02)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* skip download exists binary file ([#389](https://github.com/cnpm/npmcore/issues/389)) ([f4f40ed](https://github.com/cnpm/npmcore/commit/f4f40edf43452e2ffdaa626d3dd4281cf5391d7d))
|
||||
|
||||
## [3.2.3](https://github.com/cnpm/npmcore/compare/v3.2.2...v3.2.3) (2023-01-30)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* config path ([#385](https://github.com/cnpm/npmcore/issues/385)) ([ab72a3b](https://github.com/cnpm/npmcore/commit/ab72a3bb8e0d429d1c96adbbc2a95ccf3ef11388))
|
||||
|
||||
## [3.2.2](https://github.com/cnpm/npmcore/compare/v3.2.1...v3.2.2) (2023-01-29)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* import path ([#384](https://github.com/cnpm/npmcore/issues/384)) ([750ef60](https://github.com/cnpm/npmcore/commit/750ef6092ef35c73056081c620ff83bdc200bd52))
|
||||
|
||||
## [3.2.1](https://github.com/cnpm/npmcore/compare/v3.2.0...v3.2.1) (2023-01-29)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* api binary host config ([#383](https://github.com/cnpm/npmcore/issues/383)) ([8a2415f](https://github.com/cnpm/npmcore/commit/8a2415f5a7e3e6ac5fb8df48ae2c2bd51ebf460e))
|
||||
|
||||
## [3.2.0](https://github.com/cnpm/npmcore/compare/v3.1.2...v3.2.0) (2023-01-28)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* update index json ([#379](https://github.com/cnpm/npmcore/issues/379)) ([bce6e79](https://github.com/cnpm/npmcore/commit/bce6e7971f21a9a3bad9a70d85214ce04462f0c4))
|
||||
|
||||
## [3.1.2](https://github.com/cnpm/npmcore/compare/v3.1.1...v3.1.2) (2023-01-28)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* binary path ([#381](https://github.com/cnpm/npmcore/issues/381)) ([790621b](https://github.com/cnpm/npmcore/commit/790621b4b941f06ac075423c139c365bd440fb9e))
|
||||
|
||||
## [3.1.1](https://github.com/cnpm/npmcore/compare/v3.1.0...v3.1.1) (2023-01-18)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* not exists binary should return 404 ([#377](https://github.com/cnpm/npmcore/issues/377)) ([0cc348d](https://github.com/cnpm/npmcore/commit/0cc348dd6e92ef8666d98991e8a6135a267ac2a6))
|
||||
|
||||
## [3.1.0](https://github.com/cnpm/npmcore/compare/v3.0.1...v3.1.0) (2023-01-18)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* support auto sync when package not found ([#337](https://github.com/cnpm/npmcore/issues/337)) ([8734413](https://github.com/cnpm/npmcore/commit/873441374fa67c2ec827ad7b9157d6f2f5dec217)), closes [#335](https://github.com/cnpm/npmcore/issues/335) [/github.com/cnpm/cnpmcore/pull/50/files#diff-97cbafa75ed0bae6a1f0a2df0676c00f56b9cf8944b04ddb82d6dd0ab141961](https://github.com/cnpm//github.com/cnpm/cnpmcore/pull/50/files/issues/diff-97cbafa75ed0bae6a1f0a2df0676c00f56b9cf8944b04ddb82d6dd0ab141961)
|
||||
|
||||
## [3.0.1](https://github.com/cnpm/npmcore/compare/v3.0.0...v3.0.1) (2023-01-18)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* try to show latest version on sync log ([#375](https://github.com/cnpm/npmcore/issues/375)) ([1c64a57](https://github.com/cnpm/npmcore/commit/1c64a57dbe65f062751b11df7e5aa698e8fb1c77))
|
||||
|
||||
## [3.0.0](https://github.com/cnpm/npmcore/compare/v2.10.1...v3.0.0) (2023-01-17)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* use SingletonProto instead of ContextProto
|
||||
|
||||
Co-authored-by: killagu <killa123@126.com>
|
||||
|
||||
### Code Refactoring
|
||||
|
||||
* use tegg v3 ([#370](https://github.com/cnpm/npmcore/issues/370)) ([8e3acae](https://github.com/cnpm/npmcore/commit/8e3acaead9d0b9d54f0d62444d51d8a34e0842ef))
|
||||
|
||||
## [2.10.1](https://github.com/cnpm/npmcore/compare/v2.10.0...v2.10.1) (2023-01-08)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* export _cnpmcore_publish_time on abbreviated manifests ([#374](https://github.com/cnpm/npmcore/issues/374)) ([4bceac5](https://github.com/cnpm/npmcore/commit/4bceac5a4c94f8e8624ae1113ad1c5e69a5a2ae1))
|
||||
|
||||
## [2.10.0](https://github.com/cnpm/npmcore/compare/v2.9.1...v2.10.0) (2023-01-05)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* unpublish pkg when upstream block ([#372](https://github.com/cnpm/npmcore/issues/372)) ([7e419c1](https://github.com/cnpm/npmcore/commit/7e419c1fb4fe297adea86cb5d9eae4c8e77e2aec))
|
||||
|
||||
## [2.9.1](https://github.com/cnpm/npmcore/compare/v2.9.0...v2.9.1) (2022-12-17)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Auto enable npm publish on github action ([3d366dd](https://github.com/cnpm/npmcore/commit/3d366dd996161f8f08ae43bde29b7768f5a5241c))
|
||||
* fix tsc:prod ([ca78d00](https://github.com/cnpm/npmcore/commit/ca78d00f28930180a9374c01d2a9b3b47d6e9db3))
|
||||
116
DEVELOPER.md
116
DEVELOPER.md
@@ -8,31 +8,30 @@
|
||||
|
||||
```bash
|
||||
# 启动本地依赖服务
|
||||
$ docker-compose up -d
|
||||
docker-compose up -d
|
||||
|
||||
# 关闭本地依赖服务
|
||||
$ docker-compose down
|
||||
docker-compose down
|
||||
```
|
||||
|
||||
> 手动初始化依赖服务参见[文档](./docs/setup.md)
|
||||
|
||||
|
||||
## 本地开发
|
||||
|
||||
### 安装依赖
|
||||
|
||||
```bash
|
||||
$ npm install
|
||||
npm install
|
||||
```
|
||||
|
||||
### 开发运行
|
||||
|
||||
```bash
|
||||
# 初始化数据库
|
||||
$ MYSQL_DATABASE=cnpmcore npm run prepare-database
|
||||
MYSQL_DATABASE=cnpmcore bash ./prepare-database.sh
|
||||
|
||||
# 启动 Web 服务
|
||||
$ DEBUG_LOCAL_SQL=true npm run dev
|
||||
npm run dev
|
||||
|
||||
# 访问
|
||||
curl -v http://127.0.0.1:7001
|
||||
@@ -41,25 +40,12 @@ curl -v http://127.0.0.1:7001
|
||||
### 单元测试
|
||||
|
||||
```bash
|
||||
$ npm run test
|
||||
npm run test
|
||||
```
|
||||
|
||||
编写单测规范:
|
||||
|
||||
- assert 断言库必须使用 require 引入
|
||||
|
||||
```ts
|
||||
import assert = require('assert');
|
||||
```
|
||||
|
||||
> CAUTION: don't use `import assert from 'assert'`
|
||||
> Just use old style import assert = require('assert') for assert module. This is limitation.
|
||||
> See https://github.com/power-assert-js/espower-typescript#caution-dont-use-import-assert-from-assert
|
||||
|
||||
|
||||
## 项目结构
|
||||
|
||||
```
|
||||
```txt
|
||||
app
|
||||
├── common
|
||||
│ └── adapter
|
||||
@@ -72,6 +58,8 @@ app
|
||||
│ └── controller
|
||||
├── repository
|
||||
│ └── model
|
||||
├── infra
|
||||
│ └── NFSClientAdapter.ts
|
||||
└── test
|
||||
├── control
|
||||
│ └── response_time.test.js
|
||||
@@ -80,31 +68,69 @@ app
|
||||
```
|
||||
|
||||
common:
|
||||
|
||||
- util:全局工具类
|
||||
- adapter:外部服务调用
|
||||
|
||||
core:
|
||||
|
||||
- entity:核心模型,实现业务行为
|
||||
- event:异步事件定义,以及消费,串联业务
|
||||
- service:核心业务
|
||||
- util:服务 core 内部,不对外暴露
|
||||
|
||||
repository:
|
||||
|
||||
- model:ORM 模型,数据定义
|
||||
- XXXRepository: 仓储接口,存储、查询过程
|
||||
|
||||
port:
|
||||
|
||||
- controller:HTTP controller
|
||||
|
||||
infra:
|
||||
|
||||
基于 PaaS 基础设置实现各种 adapter 真实适配实现,cnpmcore 会内置一种实现,企业自定义的 cnpmcore 应该自行基于自身的
|
||||
PaaS 环境实现自己的 infra module。
|
||||
|
||||
- NFSClientAdapter.ts
|
||||
- QueueAdapter.ts
|
||||
- AuthAdapter.ts
|
||||
|
||||
## 架构分层依赖图
|
||||
|
||||
```txt
|
||||
+--------------------------------+ +--------+ +----------+
|
||||
| Controller | | | | |
|
||||
+----^-------------^-------------+ | | | |
|
||||
| | | | | |
|
||||
| inject | inject | | | |
|
||||
| | | | | |
|
||||
| +----------+-------------+ | | | |
|
||||
| | Service | | Entity | | |
|
||||
| +-----------^------------+ | | | |
|
||||
| | | | | Common |
|
||||
| | inject | | | |
|
||||
| | | | | |
|
||||
+----+--------------+------------+ | | | |
|
||||
| Repository | | | | |
|
||||
+-------------------^------------+ +---^----| | |
|
||||
| | | |
|
||||
| inject ORM | | |
|
||||
| | | |
|
||||
+-----------+------------+ | | |
|
||||
| Model +<-----+ | |
|
||||
+------------------------+ +----------+
|
||||
```
|
||||
|
||||
## Controller 开发指南
|
||||
|
||||
目前只支持 HTTP 协议的 Controller,代码在 `app/port/controller` 目录下。
|
||||
基于类继承的模式来实现,类关系大致如下:
|
||||
|
||||
```
|
||||
```txt
|
||||
+----------------------+ +----------------------+ +---------------+
|
||||
| PackageController.ts | | PackageTagController | | XxxController |
|
||||
| PackageController | | PackageTagController | | XxxController |
|
||||
+---------------+------+ +---+------------------+ +--+------------+
|
||||
| | |
|
||||
| extends | extends | extends
|
||||
@@ -130,15 +156,15 @@ port:
|
||||
例如会封装 PackageEntity、PackageVersionEntity 等查询方法。
|
||||
|
||||
```ts
|
||||
// try to get package entity, throw NotFoundError when package not exists
|
||||
private async getPackageEntity(scope: string, name: string) {
|
||||
const packageEntity = await this.packageRepository.findPackage(scope, name);
|
||||
if (!packageEntity) {
|
||||
const fullname = getFullname(scope, name);
|
||||
throw new NotFoundError(`${fullname} not found`);
|
||||
}
|
||||
return packageEntity;
|
||||
// try to get package entity, throw NotFoundError when package not exists
|
||||
private async getPackageEntity(scope: string, name: string) {
|
||||
const packageEntity = await this.packageRepository.findPackage(scope, name);
|
||||
if (!packageEntity) {
|
||||
const fullname = getFullname(scope, name);
|
||||
throw new NotFoundError(`${fullname} not found`);
|
||||
}
|
||||
return packageEntity;
|
||||
}
|
||||
```
|
||||
|
||||
### 请求合法性校验三部曲
|
||||
@@ -192,13 +218,39 @@ await this.userRoleManager.requiredPackageMaintainer(pkg, authorizedUser);
|
||||
当然,大部分对包进行写操作的请求下,我们在 AbstractController 里面抽取了一个更加简便的方法,一次性将数据获取和权限校验包含在一起:
|
||||
|
||||
```ts
|
||||
const pkg = await this.getPackageEntityAndRequiredMaintainer(ctx, fullname);
|
||||
const { pkg } = await this.ensurePublishAccess(ctx, fullname);
|
||||
```
|
||||
|
||||
## Service 开发指南
|
||||
|
||||
Service 依赖 Repository,然后被 Controller 依赖
|
||||
|
||||
```txt
|
||||
+---------------------------+ +----------------------+ +-------------+
|
||||
| PackageVersionFileService | | PackageSyncerService | | XxxService |
|
||||
+---------------^-----------+ +---^------------------+ +--^----------+
|
||||
| | |
|
||||
| inject | inject | inject
|
||||
| | |
|
||||
+---+-------------------+-------------------------+--+
|
||||
| PackageManagerService |
|
||||
+-----------------------^----------------------------+
|
||||
|
|
||||
| inject
|
||||
|
|
||||
+---------+--------+
|
||||
| XxxRepository |
|
||||
+------------------+
|
||||
```
|
||||
|
||||
### PackageManagerService 管理所有包以及版本信息
|
||||
|
||||
它会被其他 Service 依赖
|
||||
|
||||
## Repository 开发指南
|
||||
|
||||
Repository 依赖 Model,然后被 Service 和 Controller 依赖
|
||||
|
||||
### Repository 类方法命名规则
|
||||
|
||||
- `findSomething` 查询一个模型数据
|
||||
|
||||
249
History.md
249
History.md
@@ -1,4 +1,253 @@
|
||||
|
||||
2.9.0 / 2022-12-15
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`c562645`](http://github.com/cnpm/cnpmcore/commit/c562645db7c88f9c3c5787fd450b457574d1cce6)] - feat: suspend task before app close (#365) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
2.8.1 / 2022-12-05
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`fad30ad`](http://github.com/cnpm/cnpmcore/commit/fad30adc564c931c0bf63828d83bab84105aaef0)] - feat: npm command support npm v6 (#356) (laibao101 <<369632567@qq.com>>)
|
||||
|
||||
**fixes**
|
||||
* [[`f961219`](http://github.com/cnpm/cnpmcore/commit/f961219dbe4676156e1766db82379ee40087bcd8)] - fix: Sync save ignore ER_DUP_ENTRY error (#364) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
**others**
|
||||
* [[`7bc0fcc`](http://github.com/cnpm/cnpmcore/commit/7bc0fccaca880efe08228b4109953bd3974d2eb9)] - 🤖 TEST: Fix async function mock (fengmk2 <<fengmk2@gmail.com>>)
|
||||
* [[`84ae9bc`](http://github.com/cnpm/cnpmcore/commit/84ae9bcfa06124255703b926f83fb5e6a6bf9d6b)] - 📖 DOC: Update contributors (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
2.8.0 / 2022-11-29
|
||||
==================
|
||||
|
||||
**others**
|
||||
* [[`d55c680`](http://github.com/cnpm/cnpmcore/commit/d55c680ef906ecb27f7967782ad7d25987cef7d4)] - Event cork (#361) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
2.7.1 / 2022-11-25
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`c6b8aec`](http://github.com/cnpm/cnpmcore/commit/c6b8aecfd0c2b0d454389e931747c431dac5742b)] - fix: request binary error (#360) (Ke Wu <<gemwuu@163.com>>)
|
||||
|
||||
2.7.0 / 2022-11-25
|
||||
==================
|
||||
|
||||
**others**
|
||||
* [[`5738d56`](http://github.com/cnpm/cnpmcore/commit/5738d569ea691c05c3f3b0b74a454a33fefb8fc7)] - refactor: binary sync task use binaryName by default (#358) (Ke Wu <<gemwuu@163.com>>)
|
||||
|
||||
2.6.1 / 2022-11-23
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`0b35ead`](http://github.com/cnpm/cnpmcore/commit/0b35ead2a0cd73b89d2d961bafec13d7250fe805)] - 🐛 FIX: typo for canvas (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
2.6.0 / 2022-11-23
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`be8387d`](http://github.com/cnpm/cnpmcore/commit/be8387dfa48b9487156542000a93081fa823694a)] - feat: Support canvas sync from different binary (#357) (Ke Wu <<gemwuu@163.com>>)
|
||||
|
||||
**fixes**
|
||||
* [[`d6c4cf5`](http://github.com/cnpm/cnpmcore/commit/d6c4cf5029ca6450064fc05696a8624b6c36f0b2)] - fix: duplicate binary task (#354) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
2.5.2 / 2022-11-11
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`7eb209d`](http://github.com/cnpm/cnpmcore/commit/7eb209de1332417db2070846891d78f5afa0cd10)] - fix: create task when waiting (#352) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
2.5.1 / 2022-11-07
|
||||
==================
|
||||
|
||||
**others**
|
||||
* [[`e40c502`](http://github.com/cnpm/cnpmcore/commit/e40c5021bb2ba78f8879d19bc477883168560b85)] - 🐛 FIX: Mirror cypress arm64 binary (#351) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
2.5.0 / 2022-11-04
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`43d77ee`](http://github.com/cnpm/cnpmcore/commit/43d77ee91e52bd74594d9d569b839c1a4b7fbac6)] - feat: long description (#349) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
2.4.1 / 2022-10-28
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`92350a8`](http://github.com/cnpm/cnpmcore/commit/92350a864313ee42a048d9e83886ef42db3419de)] - 👌 IMPROVE: Show changes stream create task log (#347) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
**fixes**
|
||||
* [[`28eeeaf`](http://github.com/cnpm/cnpmcore/commit/28eeeafd9870c6b1c5b4f4c23916f6ae73ddda12)] - fix: registry host config (#346) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
* [[`cd5bd92`](http://github.com/cnpm/cnpmcore/commit/cd5bd923b8d47bf90b5f077ce04777b38653b850)] - 🐛 FIX: Catch all error on changes stream handler (#344) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
2.4.0 / 2022-10-25
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`6aa302d`](http://github.com/cnpm/cnpmcore/commit/6aa302d074f2c84f39e2065fa20853b007f6fa3b)] - 📦 NEW: Use oss-cnpm v4 (#340) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
* [[`a217fd0`](http://github.com/cnpm/cnpmcore/commit/a217fd07ccad3fe5058881654a13e0c69c758717)] - 👌 IMPROVE: Reduce warning log (#326) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
**fixes**
|
||||
* [[`b19b0a0`](http://github.com/cnpm/cnpmcore/commit/b19b0a0496e35ac1c6b3de746b9221990ba9dc93)] - fix: Lazy set registryId when executeTask (#341) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
**others**
|
||||
* [[`305175a`](http://github.com/cnpm/cnpmcore/commit/305175ab5fcdc3ad3b60055d45cfcacb23065a80)] - 🤖 TEST: Use enum define on unittest (#333) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
* [[`07f2eba`](http://github.com/cnpm/cnpmcore/commit/07f2eba137ba625b2d422677a465920617141b87)] - 🤖 TEST: Mock all binary http requests (#328) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
* [[`4b0c7dc`](http://github.com/cnpm/cnpmcore/commit/4b0c7dc6196960d34b2529bfde724e97f1af8444)] - 🤖 TEST: Mock all httpclient request (#327) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
2.3.1 / 2022-10-06
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`bbc08fd`](http://github.com/cnpm/cnpmcore/commit/bbc08fd26887d55b98b70d1ed210caf81f9d5c22)] - 👌 IMPROVE: syncPackageWorkerMaxConcurrentTasks up to 20 (#322) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
* [[`5852f22`](http://github.com/cnpm/cnpmcore/commit/5852f22023525d857ff1ceea205e4315c8079877)] - feat: support sync exist mode (#275) (zhangyuantao <<zhangyuantao@163.com>>)
|
||||
|
||||
**fixes**
|
||||
* [[`d79634e`](http://github.com/cnpm/cnpmcore/commit/d79634eea749fef1a420988a8599f156f28ee85a)] - 🐛 FIX: Should sync package when registry id is null (#324) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
* [[`24f920d`](http://github.com/cnpm/cnpmcore/commit/24f920d65b31f9eb83c1ecda36adf7f9e2c379c3)] - 🐛 FIX: Should run sync package on all worker (#323) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
2.3.0 / 2022-09-24
|
||||
==================
|
||||
|
||||
**others**
|
||||
* [[`bd83a19`](http://github.com/cnpm/cnpmcore/commit/bd83a19eca761c96bcee04e6ae91e68eac3cb6bf)] - 👌 IMPROVE: use urllib3 instead (#302) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
* [[`35e7d3a`](http://github.com/cnpm/cnpmcore/commit/35e7d3ad3c78712b507d522a0b72b5a6a5a4ec1c)] - 👌 IMPROVE: Enable phpmyadmin and DEBUG_LOCAL_SQL by default (#320) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
2.2.0 / 2022-09-22
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`bca0fb3`](http://github.com/cnpm/cnpmcore/commit/bca0fb3c37b9f74f3c41ab181dd3113d9dab4c05)] - feat: only allow pkg sync from registry it belong (#317) (killa <<killa123@126.com>>)
|
||||
|
||||
**fixes**
|
||||
* [[`7e9beea`](http://github.com/cnpm/cnpmcore/commit/7e9beead576a41de3aa042b92b788bde5d55f44a)] - fix: only append / if path is not empty and not ends with / (#316) (killa <<killa123@126.com>>)
|
||||
* [[`4fe68cb`](http://github.com/cnpm/cnpmcore/commit/4fe68cbf38f303e797b80b88407f714ec76bfae0)] - fix: fix directory path (#313) (killa <<killa123@126.com>>)
|
||||
|
||||
**others**
|
||||
* [[`e72ce35`](http://github.com/cnpm/cnpmcore/commit/e72ce3576f9a3cda095e3feac59eeb1d8c1e8033)] - 🤖 TEST: Skip unstable tests (#318) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
* [[`171b11f`](http://github.com/cnpm/cnpmcore/commit/171b11f7bba534c993af4088b00f8545216734a9)] - Revert "fix: fix directory path (#313)" (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
2.1.1 / 2022-09-08
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`8fb9dd8`](http://github.com/cnpm/cnpmcore/commit/8fb9dd8cf4800afe3f54aba9ee4c0ae05efb4f1d)] - fix: findExecuteTask only return waiting task (#312) (killa <<killa123@126.com>>)
|
||||
|
||||
2.1.0 / 2022-09-05
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`c5d2b49`](http://github.com/cnpm/cnpmcore/commit/c5d2b49ab3a0ce0d67f6e7cc19e0be867c92d04c)] - feat: auto get next valid task (#311) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
2.0.0 / 2022-09-05
|
||||
==================
|
||||
|
||||
**others**
|
||||
* [[`fc4baff`](http://github.com/cnpm/cnpmcore/commit/fc4baff226540e7cfee9adc069e17a59f4050a43)] - chore: refactor schedule with @Schedule (#309) (killa <<killa123@126.com>>)
|
||||
|
||||
1.11.6 / 2022-09-04
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`768f951`](http://github.com/cnpm/cnpmcore/commit/768f951b6f2509f14c30a70d86a6719107d963a4)] - fix: cnpmjsorg changesstream limit (#310) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
1.11.5 / 2022-09-02
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`f673ab8`](http://github.com/cnpm/cnpmcore/commit/f673ab8ba1545909ff6b8e445364646511930891)] - fix: execute state check (#308) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
**others**
|
||||
* [[`091420a`](http://github.com/cnpm/cnpmcore/commit/091420ae2677ecedd1a26a238921321c2a191675)] - 🤖 TEST: Add SQL Review Action (#307) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
1.11.4 / 2022-08-30
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`f9210ca`](http://github.com/cnpm/cnpmcore/commit/f9210ca7e180e19bce08da9ef33e46e990b86ef1)] - fix: changes stream empty (#306) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
1.11.3 / 2022-08-29
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`48f228d`](http://github.com/cnpm/cnpmcore/commit/48f228da447d8cde62849fa52cf43bae7754e2e3)] - fix: changes stream updatedAt (#304) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
* [[`87045ba`](http://github.com/cnpm/cnpmcore/commit/87045ba8b0e14547c93689600eb7e2c1de2a611b)] - fix: task updatedAt save (#305) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
1.11.2 / 2022-08-28
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`4e8700c`](http://github.com/cnpm/cnpmcore/commit/4e8700c4f7c6fb5c4f4d4a2b9a9546096c5d10e2)] - fix: only create createHookTask if hook enable (#299) (killa <<killa123@126.com>>)
|
||||
|
||||
**others**
|
||||
* [[`e06c841`](http://github.com/cnpm/cnpmcore/commit/e06c841537113fdb0c00beb22b0a55378c61ce80)] - 🐛 FIX: Should sync public package when registryName not exists (#303) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
* [[`f139444`](http://github.com/cnpm/cnpmcore/commit/f139444213403494ebe9bf073df62125413892d9)] - 📖 DOC: Update contributors (fengmk2 <<fengmk2@gmail.com>>)
|
||||
* [[`c4a9de5`](http://github.com/cnpm/cnpmcore/commit/c4a9de598dce9a1b82bbcdd91968a15bbc5a4b6b)] - Create SECURITY.md (fengmk2 <<fengmk2@gmail.com>>)
|
||||
* [[`709d65b`](http://github.com/cnpm/cnpmcore/commit/709d65bd0473856c9bfc4416ea2ca375136e354f)] - 🤖 TEST: Use diff bucket on OSS test (#301) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
* [[`9576699`](http://github.com/cnpm/cnpmcore/commit/95766990fa9c4c2c43d462f6b151557425b0c741)] - chore: use AsyncGenerator insteadof Transform stream (#300) (killa <<killa123@126.com>>)
|
||||
* [[`3ed5269`](http://github.com/cnpm/cnpmcore/commit/3ed5269f1d22ca3aaca89a90a4fff90f293e2464)] - 📦 NEW: Mirror better-sqlite3 binary (#296) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
1.11.1 / 2022-08-24
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`359a150`](http://github.com/cnpm/cnpmcore/commit/359a150eb450d69e6523b20efcc5c7cfe3efab4d)] - fix: changes stream (#297) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
1.11.0 / 2022-08-23
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`a91c8ac`](http://github.com/cnpm/cnpmcore/commit/a91c8ac4d05dc903780fda516b09364a05a2b1e6)] - feat: sync package from spec regsitry (#293) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
* [[`de37008`](http://github.com/cnpm/cnpmcore/commit/de37008261b05845f392d66764cdfe14ae324756)] - feat: changesStream adapter & needSync() method (#292) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
* [[`4b506c8`](http://github.com/cnpm/cnpmcore/commit/4b506c8371697ddacdbe99a8ecb330bfc1911ec6)] - feat: init registry & scope (#286) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
* [[`41c6e24`](http://github.com/cnpm/cnpmcore/commit/41c6e24c84d546eb9d5515cc0940cc3e4274687b)] - feat: impl trigger Hooks (#289) (killa <<killa123@126.com>>)
|
||||
* [[`79cb826`](http://github.com/cnpm/cnpmcore/commit/79cb82615f04bdb3da3ccbe09bb6a861608b69c5)] - feat: impl migration sql (#290) (killa <<killa123@126.com>>)
|
||||
* [[`4cfa8ed`](http://github.com/cnpm/cnpmcore/commit/4cfa8ed9d687ce7d950d7d20c0ea28221763ba5f)] - feat: impl hooks api (#287) (killa <<killa123@126.com>>)
|
||||
* [[`47d53d2`](http://github.com/cnpm/cnpmcore/commit/47d53d22ad03c02ee9cb9035a38ae205a6d38381)] - feat: add bizId for task (#285) (killa <<killa123@126.com>>)
|
||||
* [[`3b1536b`](http://github.com/cnpm/cnpmcore/commit/3b1536b070b2f9062bc2cc377db96d2f4a160efc)] - feat: add node-webrtc mirror (#274) (Opportunity <<opportunity@live.in>>)
|
||||
|
||||
**others**
|
||||
* [[`7106807`](http://github.com/cnpm/cnpmcore/commit/710680742a078b2faf4cb18c3a39c0397308712e)] - 🐛 FIX: Should show queue size on logging (#280) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
* [[`3a41b21`](http://github.com/cnpm/cnpmcore/commit/3a41b2161cc99bb2f6f6dd7cbaa7abef25ff4393)] - 🐛 FIX: Handle binary configuration value (#278) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
1.10.0 / 2022-08-04
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`c2b7d5a`](http://github.com/cnpm/cnpmcore/commit/c2b7d5aa98b5ba8649ec246c616574a22e9a74b8)] - feat: use sort set to impl queue (#277) (killa <<killa123@126.com>>)
|
||||
|
||||
1.9.1 / 2022-07-29
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`c54aa21`](http://github.com/cnpm/cnpmcore/commit/c54aa2165c3938dcbb5a2b3b54e66a0d961cc813)] - fix: check executingCount after task is done (#276) (killa <<killa123@126.com>>)
|
||||
|
||||
**others**
|
||||
* [[`3268d03`](http://github.com/cnpm/cnpmcore/commit/3268d030b620825c8c2e6331e1745c1788066c61)] - 🤖 TEST: show package not use cache if isSync (#273) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
1.9.0 / 2022-07-25
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`af6a75a`](http://github.com/cnpm/cnpmcore/commit/af6a75af32ea04c90fda82be3a56c99ec77e5807)] - feat: add forceSyncHistory options (#271) (killa <<killa123@126.com>>)
|
||||
|
||||
1.8.0 / 2022-07-21
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`b49a38c`](http://github.com/cnpm/cnpmcore/commit/b49a38c77e044c978e6de32a9d3e257cc90ea7c1)] - feat: use Model with inject (#269) (killa <<killa123@126.com>>)
|
||||
|
||||
1.7.1 / 2022-07-20
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`52fca55`](http://github.com/cnpm/cnpmcore/commit/52fca55aa883865f0ae70bfc1ff274c313b8f76a)] - fix: show package not use cache if isSync (#268) (killa <<killa123@126.com>>)
|
||||
|
||||
1.7.0 / 2022-07-12
|
||||
==================
|
||||
|
||||
**others**
|
||||
* [[`4f7ce8b`](http://github.com/cnpm/cnpmcore/commit/4f7ce8b4b2a5806a225ce67228388e14388b7059)] - deps: upgrade leoric to 2.x (#262) (killa <<killa123@126.com>>)
|
||||
|
||||
1.6.0 / 2022-07-11
|
||||
==================
|
||||
|
||||
|
||||
230
INTEGRATE.md
Normal file
230
INTEGRATE.md
Normal file
@@ -0,0 +1,230 @@
|
||||
# 🥚 如何在 [tegg](https://github.com/eggjs/tegg) 中集成 cnpmcore
|
||||
> 文档中的示例项目可以在 [这里](https://github.com/eggjs/examples/commit/bed580fe053ae573f8b63f6788002ff9c6e7a142) 查看,在开始前请确保已阅读 [DEVELOPER.md](DEVELOPER.md) 中的相关文档,完成本地开发环境搭建。
|
||||
|
||||
在生产环境中,我们也可以直接部署 cnpmcore 系统,实现完整的 Registry 镜像功能。
|
||||
但通常,在企业内部会有一些内部的中间件服务或限制,例如文件存储、缓存服务、登录鉴权流程等。
|
||||
|
||||
除了源码部署、二次开发的方式,我们还提供了 npm 包的方式,便于 [tegg](https://github.com/eggjs/tegg) 应用集成。
|
||||
这样既可以享受到丰富的自定义扩展能力,又可以享受到 cnpmcore 持续迭代的功能演进。
|
||||
|
||||
下面,让我们以 [tegg](https://github.com/eggjs/tegg) 初始化的应用为例,以 npm 包的方式集成 cnpmcore,并扩展登录功能,以支持企业内 [SSO](https://en.wikipedia.org/wiki/Single_sign-on) 登录。
|
||||
|
||||
## 🚀 快速开始
|
||||
|
||||
### 🆕 新建一个 tegg 应用
|
||||
> 我们以 https://github.com/eggjs/examples/tree/master/hello-tegg 为例
|
||||
|
||||
```shell
|
||||
.
|
||||
├── app
|
||||
│ ├── biz
|
||||
│ ├── controller
|
||||
│ └── middleware
|
||||
├── config
|
||||
│ ├── config.default.ts
|
||||
│ └── plugin.ts
|
||||
├── package.json
|
||||
├── test
|
||||
│ ├── biz
|
||||
│ └── controller
|
||||
└── tsconfig.json
|
||||
```
|
||||
|
||||
### 📦︎ 安装 cnpmcore 修改对应配置
|
||||
|
||||
```shell
|
||||
npm i cnpmcore -S
|
||||
```
|
||||
|
||||
1. 修改 `ts-config.json` 配置,这是因为 cnpmcore 使用了 [subPath](https://nodejs.org/api/packages.html#subpath-exports)
|
||||
```json
|
||||
{
|
||||
"extends": "@eggjs/tsconfig",
|
||||
"compilerOptions": {
|
||||
"baseUrl": "./",
|
||||
"moduleResolution": "NodeNext",
|
||||
"target": "ES2020",
|
||||
"module": "Node16"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
2. 修改 `config/plugin.ts` 文件,开启 cnpmcore 依赖的一些插件
|
||||
```typescript
|
||||
// 开启如下插件
|
||||
{
|
||||
redis: {
|
||||
enable: true,
|
||||
package: 'egg-redis',
|
||||
},
|
||||
teggOrm: {
|
||||
enable: true,
|
||||
package: '@eggjs/tegg-orm-plugin',
|
||||
},
|
||||
eventbusModule: {
|
||||
enable: true,
|
||||
package: '@eggjs/tegg-eventbus-plugin',
|
||||
},
|
||||
tracer: {
|
||||
enable: true,
|
||||
package: 'egg-tracer',
|
||||
},
|
||||
typeboxValidate: {
|
||||
enable: true,
|
||||
package: 'egg-typebox-validate',
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
3. 修改 `config.default.ts` 文件,可以直接覆盖默认配置
|
||||
```typescript
|
||||
import { SyncMode } from 'cnpmcore/common/constants';
|
||||
import { cnpmcoreConfig } from 'cnpmcore/common/config';
|
||||
|
||||
export default () => {
|
||||
const config = {};
|
||||
config.cnpmcore = {
|
||||
...cnpmcoreConfig,
|
||||
enableChangesStream: false,
|
||||
syncMode: SyncMode.all,
|
||||
};
|
||||
return config;
|
||||
}
|
||||
```
|
||||
|
||||
### 🧑🤝🧑 集成 cnpmcore
|
||||
|
||||
1. 创建文件夹,用于存放自定义的 infra module,这里以 app/infra 为例
|
||||
|
||||
```shell
|
||||
├── infra
|
||||
│ ├── AuthAdapter.ts
|
||||
│ ├── NFSAdapter.ts
|
||||
│ ├── QueueAdapter.ts
|
||||
│ └── package.json
|
||||
```
|
||||
|
||||
* 添加 `package.json` ,声明 infra 作为一个 eggModule 单元
|
||||
|
||||
```JSON
|
||||
{
|
||||
"name": "infra",
|
||||
"eggModule": {
|
||||
"name": "infra"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
* 添加 `XXXAdapter.ts` 在对应的 Adapter 中继承 cnpmcore 默认的 Adapter,以 AuthAdapter 为例
|
||||
|
||||
```typescript
|
||||
import { AccessLevel, SingletonProto } from '@eggjs/tegg';
|
||||
import { AuthAdapter } from 'cnpmcore/infra/AuthAdapter';
|
||||
|
||||
@SingletonProto({
|
||||
name: 'authAdapter',
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
})
|
||||
export class MyAuthAdapter extends AuthAdapter {
|
||||
}
|
||||
```
|
||||
|
||||
2. 添加 `config/module.json`,将 cnpmcore 作为一个 module 集成进我们新增的 tegg 应用中
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"path": "../app/biz"
|
||||
},
|
||||
{
|
||||
"path": "../app/infra"
|
||||
},
|
||||
{
|
||||
"package": "cnpmcore/common"
|
||||
},
|
||||
{
|
||||
"package": "cnpmcore/core"
|
||||
},
|
||||
{
|
||||
"package": "cnpmcore/port"
|
||||
},
|
||||
{
|
||||
"package": "cnpmcore/repository"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
### ✍🏻 重载 AuthAdapter 实现
|
||||
|
||||
我们以 AuthAdapter 为例,来实现 npm cli 的 SSO 登录的功能。
|
||||
|
||||
我们需要实现了 getAuthUrl 和 ensureCurrentUser 这两个方法:
|
||||
1. getAuthUrl 引导用户访问企业内实际的登录中心。
|
||||
2. ensureCurrentUser 当用户完成访问后,需要回调到应用进行鉴权流程。
|
||||
我们约定通过 `POST /-/v1/login/sso/:sessionId` 这个路由来进行登录验证。
|
||||
当然,你也可以任意修改地址和登录回调,只需保证更新 redis 中的 token 状态即可。
|
||||
|
||||
修改 AuthAdapter.ts 文件
|
||||
```typescript
|
||||
import { AccessLevel, EggContext, SingletonProto } from '@eggjs/tegg';
|
||||
import { AuthAdapter } from 'cnpmcore/infra/AuthAdapter';
|
||||
import { randomUUID } from 'crypto';
|
||||
import { AuthUrlResult, userResult } from 'node_modules/cnpmcore/dist/app/common/typing';
|
||||
|
||||
const ONE_DAY = 3600 * 24;
|
||||
|
||||
@SingletonProto({
|
||||
name: 'authAdapter',
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
})
|
||||
export class MyAuthAdapter extends AuthAdapter {
|
||||
async getAuthUrl(ctx: EggContext): Promise<AuthUrlResult> {
|
||||
const sessionId = randomUUID();
|
||||
await this.redis.setex(sessionId, ONE_DAY, '');
|
||||
return {
|
||||
// 替换实际企业内的登录中心地址,这里我们以系统内默认的 hello 路由为例
|
||||
loginUrl: `${ctx.origin}/hello?name=${sessionId}`,
|
||||
doneUrl: `${ctx.href}/done/session/${sessionId}`,
|
||||
};
|
||||
}
|
||||
|
||||
async ensureCurrentUser(): Promise<userResult | null> {
|
||||
return {
|
||||
name: 'hello',
|
||||
email: 'hello@cnpmjs.org',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
修改 HelloController 的实现,实际也可以通过登录中心回调、页面确认等方式实现
|
||||
```typescript
|
||||
// 触发回调接口,会自动完成用户创建
|
||||
await this.httpclient.request(`${ctx.origin}/-/v1/login/sso/${name}`, { method: 'POST' });
|
||||
```
|
||||
|
||||
## 🎉 功能验证
|
||||
|
||||
1. 在命令行输入 `npm login --registry=http://127.0.0.1:7001`
|
||||
|
||||
```shell
|
||||
$ npm login --registry=http://127.0.0.1:7001
|
||||
$ npm notice Log in on http://127.0.0.1:7001/
|
||||
$ Login at:
|
||||
$ http://127.0.0.1:7001/hello?name=e44e8c43-211a-4bcd-ae78-c4cbb1a78ae7
|
||||
$ Press ENTER to open in the browser...
|
||||
```
|
||||
|
||||
2. 界面提示回车打开浏览器访问登录中心,也就是我们在 getAuthUrl,返回的 loginUrl 配置
|
||||
|
||||
3. 由于我们 mock 了对应实现,界面会直接显示登录成功
|
||||
```shell
|
||||
Logged in on http://127.0.0.1:7001/.
|
||||
```
|
||||
|
||||
4. 在命令行输入 `npm whoami --registry=http://127.0.0.1:7001` 验证
|
||||
```shell
|
||||
$ npm whoami --registry=http://127.0.0.1:7001
|
||||
$ hello
|
||||
```
|
||||
13
README.md
13
README.md
@@ -16,6 +16,10 @@ See https://github.com/cnpm/cnpmjs.org/blob/master/docs/registry-api.md#npm-regi
|
||||
|
||||
See [DEVELOPER.md](DEVELOPER.md)
|
||||
|
||||
## How to integrate
|
||||
|
||||
See [INTEGRATE.md](INTEGRATE.md)
|
||||
|
||||
## License
|
||||
|
||||
[MIT](LICENSE)
|
||||
@@ -24,12 +28,13 @@ See [DEVELOPER.md](DEVELOPER.md)
|
||||
|
||||
## Contributors
|
||||
|
||||
|[<img src="https://avatars.githubusercontent.com/u/156269?v=4" width="100px;"/><br/><sub><b>fengmk2</b></sub>](https://github.com/fengmk2)<br/>|[<img src="https://avatars.githubusercontent.com/u/6897780?v=4" width="100px;"/><br/><sub><b>killagu</b></sub>](https://github.com/killagu)<br/>|[<img src="https://avatars.githubusercontent.com/u/26033663?v=4" width="100px;"/><br/><sub><b>Zian502</b></sub>](https://github.com/Zian502)<br/>|[<img src="https://avatars.githubusercontent.com/u/13284978?v=4" width="100px;"/><br/><sub><b>Beace</b></sub>](https://github.com/Beace)<br/>|[<img src="https://avatars.githubusercontent.com/u/227713?v=4" width="100px;"/><br/><sub><b>atian25</b></sub>](https://github.com/atian25)<br/>|[<img src="https://avatars.githubusercontent.com/u/17879221?v=4" width="100px;"/><br/><sub><b>laibao101</b></sub>](https://github.com/laibao101)<br/>|
|
||||
|[<img src="https://avatars.githubusercontent.com/u/156269?v=4" width="100px;"/><br/><sub><b>fengmk2</b></sub>](https://github.com/fengmk2)<br/>|[<img src="https://avatars.githubusercontent.com/u/6897780?v=4" width="100px;"/><br/><sub><b>killagu</b></sub>](https://github.com/killagu)<br/>|[<img src="https://avatars.githubusercontent.com/u/32174276?v=4" width="100px;"/><br/><sub><b>semantic-release-bot</b></sub>](https://github.com/semantic-release-bot)<br/>|[<img src="https://avatars.githubusercontent.com/u/5574625?v=4" width="100px;"/><br/><sub><b>elrrrrrrr</b></sub>](https://github.com/elrrrrrrr)<br/>|[<img src="https://avatars.githubusercontent.com/u/35598090?v=4" width="100px;"/><br/><sub><b>hezhengxu2018</b></sub>](https://github.com/hezhengxu2018)<br/>|[<img src="https://avatars.githubusercontent.com/u/26033663?v=4" width="100px;"/><br/><sub><b>Zian502</b></sub>](https://github.com/Zian502)<br/>|
|
||||
| :---: | :---: | :---: | :---: | :---: | :---: |
|
||||
|[<img src="https://avatars.githubusercontent.com/u/8198408?v=4" width="100px;"/><br/><sub><b>BlackHole1</b></sub>](https://github.com/BlackHole1)<br/>|[<img src="https://avatars.githubusercontent.com/u/1814071?v=4" width="100px;"/><br/><sub><b>xiekw2010</b></sub>](https://github.com/xiekw2010)<br/>|[<img src="https://avatars.githubusercontent.com/u/958063?v=4" width="100px;"/><br/><sub><b>thonatos</b></sub>](https://github.com/thonatos)<br/>|[<img src="https://avatars.githubusercontent.com/u/11039003?v=4" width="100px;"/><br/><sub><b>chenpx976</b></sub>](https://github.com/chenpx976)<br/>|[<img src="https://avatars.githubusercontent.com/u/29791463?v=4" width="100px;"/><br/><sub><b>fossabot</b></sub>](https://github.com/fossabot)<br/>|[<img src="https://avatars.githubusercontent.com/u/1119126?v=4" width="100px;"/><br/><sub><b>looksgood</b></sub>](https://github.com/looksgood)<br/>|
|
||||
[<img src="https://avatars.githubusercontent.com/u/3478550?v=4" width="100px;"/><br/><sub><b>coolyuantao</b></sub>](https://github.com/coolyuantao)<br/>
|
||||
|[<img src="https://avatars.githubusercontent.com/u/4635838?v=4" width="100px;"/><br/><sub><b>gemwuu</b></sub>](https://github.com/gemwuu)<br/>|[<img src="https://avatars.githubusercontent.com/u/17879221?v=4" width="100px;"/><br/><sub><b>laibao101</b></sub>](https://github.com/laibao101)<br/>|[<img src="https://avatars.githubusercontent.com/u/3478550?v=4" width="100px;"/><br/><sub><b>coolyuantao</b></sub>](https://github.com/coolyuantao)<br/>|[<img src="https://avatars.githubusercontent.com/u/13284978?v=4" width="100px;"/><br/><sub><b>Beace</b></sub>](https://github.com/Beace)<br/>|[<img src="https://avatars.githubusercontent.com/u/10163680?v=4" width="100px;"/><br/><sub><b>Wellaiyo</b></sub>](https://github.com/Wellaiyo)<br/>|[<img src="https://avatars.githubusercontent.com/u/227713?v=4" width="100px;"/><br/><sub><b>atian25</b></sub>](https://github.com/atian25)<br/>|
|
||||
|[<img src="https://avatars.githubusercontent.com/u/8198408?v=4" width="100px;"/><br/><sub><b>BlackHole1</b></sub>](https://github.com/BlackHole1)<br/>|[<img src="https://avatars.githubusercontent.com/u/1814071?v=4" width="100px;"/><br/><sub><b>xiekw2010</b></sub>](https://github.com/xiekw2010)<br/>|[<img src="https://avatars.githubusercontent.com/u/13471233?v=4" width="100px;"/><br/><sub><b>OpportunityLiu</b></sub>](https://github.com/OpportunityLiu)<br/>|[<img src="https://avatars.githubusercontent.com/u/958063?v=4" width="100px;"/><br/><sub><b>thonatos</b></sub>](https://github.com/thonatos)<br/>|[<img src="https://avatars.githubusercontent.com/u/11039003?v=4" width="100px;"/><br/><sub><b>chenpx976</b></sub>](https://github.com/chenpx976)<br/>|[<img src="https://avatars.githubusercontent.com/u/29791463?v=4" width="100px;"/><br/><sub><b>fossabot</b></sub>](https://github.com/fossabot)<br/>|
|
||||
[<img src="https://avatars.githubusercontent.com/u/1119126?v=4" width="100px;"/><br/><sub><b>looksgood</b></sub>](https://github.com/looksgood)<br/>|[<img src="https://avatars.githubusercontent.com/u/23701019?v=4" width="100px;"/><br/><sub><b>laoboxie</b></sub>](https://github.com/laoboxie)<br/>|[<img src="https://avatars.githubusercontent.com/u/5550931?v=4" width="100px;"/><br/><sub><b>shinima</b></sub>](https://github.com/shinima)<br/>
|
||||
|
||||
This project follows the git-contributor [spec](https://github.com/xudafeng/git-contributor), auto updated at `Sat Jul 09 2022 08:59:28 GMT+0800`.
|
||||
This project follows the git-contributor [spec](https://github.com/xudafeng/git-contributor), auto updated at `Sat May 06 2023 12:40:20 GMT+0800`.
|
||||
|
||||
<!-- GITCONTRIBUTOR_END -->
|
||||
|
||||
|
||||
41
SECURITY.md
Normal file
41
SECURITY.md
Normal file
@@ -0,0 +1,41 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
Currently being supported with security updates.
|
||||
|
||||
| Version | Supported |
|
||||
| -------- | ------------------ |
|
||||
| >= 1.0.0 | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
The cnpmcore OSS team and community take all security vulnerabilities seriously.
|
||||
Thank you for improving the security of our open source software.
|
||||
We appreciate your efforts and responsible disclosure and will make every effort to acknowledge your contributions.
|
||||
|
||||
Report security vulnerabilities by emailing the cnpmcore security team at:
|
||||
|
||||
```
|
||||
fengmk2+cnpmcoresecurity@gmail.com
|
||||
killa123@126.com
|
||||
```
|
||||
|
||||
The lead maintainer will acknowledge your email within 48 hours,
|
||||
and will send a more detailed response within 72 hours indicating the next steps in handling your report.
|
||||
After the initial reply to your report,
|
||||
the security team will endeavor to keep you informed of the progress towards a fix and full announcement,
|
||||
and may ask for additional information or guidance.
|
||||
|
||||
Report security vulnerabilities in third-party modules to the person or team maintaining the module.
|
||||
|
||||
## Disclosure Policy
|
||||
|
||||
When the security team receives a security bug report, they will assign it
|
||||
to a primary handler. This person will coordinate the fix and release
|
||||
process, involving the following steps:
|
||||
|
||||
* Confirm the problem and determine the affected versions.
|
||||
* Audit code to find any potential similar problems.
|
||||
* Prepare fixes for all releases still under maintenance. These fixes
|
||||
will be released as fast as possible to NPM.
|
||||
21
app.ts
21
app.ts
@@ -1,7 +1,7 @@
|
||||
import path from 'path';
|
||||
import { readFile } from 'fs/promises';
|
||||
import { Application } from 'egg';
|
||||
|
||||
import { ChangesStreamService } from './app/core/service/ChangesStreamService';
|
||||
declare module 'egg' {
|
||||
interface Application {
|
||||
binaryHTML: string;
|
||||
@@ -16,6 +16,18 @@ export default class CnpmcoreAppHook {
|
||||
this.app.binaryHTML = '';
|
||||
}
|
||||
|
||||
async configWillLoad() {
|
||||
const app = this.app;
|
||||
// https://github.com/eggjs/tegg/blob/master/plugin/orm/app.ts#L37
|
||||
// store query sql to log
|
||||
app.config.orm.logger = {
|
||||
...app.config.orm.logger,
|
||||
logQuery(sql: string, duration: number) {
|
||||
app.getLogger('sqlLogger').info('[%s] %s', duration, sql);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// https://eggjs.org/zh-cn/basics/app-start.html
|
||||
async didReady() {
|
||||
// ready binary.html and replace registry
|
||||
@@ -23,4 +35,11 @@ export default class CnpmcoreAppHook {
|
||||
const text = await readFile(filepath, 'utf-8');
|
||||
this.app.binaryHTML = text.replace('{{registry}}', this.app.config.cnpmcore.registry);
|
||||
}
|
||||
|
||||
// 应用退出时执行
|
||||
// 需要暂停当前执行的 changesStream task
|
||||
async beforeClose() {
|
||||
const changesStreamService = await this.app.getEggObject(ChangesStreamService);
|
||||
await changesStreamService.suspendSync(true);
|
||||
}
|
||||
}
|
||||
|
||||
33
app/common/CryptoUtil.ts
Normal file
33
app/common/CryptoUtil.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { generateKeyPairSync, publicEncrypt, privateDecrypt, constants } from 'crypto';
|
||||
|
||||
// generate rsa key pair
|
||||
export function genRSAKeys(): { publicKey: string, privateKey: string } {
|
||||
const key = generateKeyPairSync('rsa', {
|
||||
modulusLength: 512,
|
||||
});
|
||||
const publicKey = key.publicKey.export({
|
||||
type: 'pkcs1',
|
||||
format: 'pem',
|
||||
}).toString('base64');
|
||||
const privateKey = key.privateKey.export({
|
||||
type: 'pkcs1',
|
||||
format: 'pem',
|
||||
}).toString('base64');
|
||||
return { publicKey, privateKey };
|
||||
}
|
||||
|
||||
// encrypt rsa private key
|
||||
export function encryptRSA(publicKey: string, data: string): string {
|
||||
return publicEncrypt({
|
||||
key: publicKey,
|
||||
padding: constants.RSA_PKCS1_PADDING,
|
||||
}, Buffer.from(data, 'utf8')).toString('base64');
|
||||
}
|
||||
|
||||
// decrypt rsa private key
|
||||
export function decryptRSA(privateKey: string, data: string) {
|
||||
return privateDecrypt({
|
||||
key: privateKey,
|
||||
padding: constants.RSA_PKCS1_PADDING,
|
||||
}, Buffer.from(data, 'base64')).toString('utf8');
|
||||
}
|
||||
@@ -1,17 +1,31 @@
|
||||
import { mkdir, rm } from 'fs/promises';
|
||||
import { createWriteStream } from 'fs';
|
||||
import { setTimeout } from 'timers/promises';
|
||||
import path from 'path';
|
||||
import url from 'url';
|
||||
import { randomBytes } from 'crypto';
|
||||
import { EggContextHttpClient } from 'egg';
|
||||
import { mkdir, rm } from 'node:fs/promises';
|
||||
import { createWriteStream } from 'node:fs';
|
||||
import { setTimeout } from 'node:timers/promises';
|
||||
import path from 'node:path';
|
||||
import url from 'node:url';
|
||||
import { randomBytes } from 'node:crypto';
|
||||
import { EggContextHttpClient, HttpClientResponse } from 'egg';
|
||||
import mime from 'mime-types';
|
||||
import dayjs from './dayjs';
|
||||
|
||||
export async function createTempfile(dataDir: string, filename: string) {
|
||||
// will auto clean on CleanTempDir Schedule
|
||||
const tmpdir = path.join(dataDir, 'downloads', dayjs().format('YYYY/MM/DD'));
|
||||
await mkdir(tmpdir, { recursive: true });
|
||||
interface DownloadToTempfileOptionalConfig {
|
||||
retries?: number,
|
||||
ignoreDownloadStatuses?: number[],
|
||||
remoteAuthToken?: string
|
||||
}
|
||||
|
||||
export async function createTempDir(dataDir: string, dirname?: string) {
|
||||
// will auto clean on CleanTempDir Schedule
|
||||
let tmpdir = path.join(dataDir, 'downloads', dayjs().format('YYYY/MM/DD'));
|
||||
if (dirname) {
|
||||
tmpdir = path.join(tmpdir, dirname);
|
||||
}
|
||||
await mkdir(tmpdir, { recursive: true });
|
||||
return tmpdir;
|
||||
}
|
||||
|
||||
export async function createTempfile(dataDir: string, filename: string) {
|
||||
const tmpdir = await createTempDir(dataDir);
|
||||
// The filename is a URL (from dist.tarball), which needs to be truncated, (`getconf NAME_MAX /` # max filename length: 255 bytes)
|
||||
// https://github.com/cnpm/cnpmjs.org/pull/1345
|
||||
const tmpfile = path.join(tmpdir, `${randomBytes(10).toString('hex')}-${path.basename(url.parse(filename).pathname!)}`);
|
||||
@@ -19,11 +33,12 @@ export async function createTempfile(dataDir: string, filename: string) {
|
||||
}
|
||||
|
||||
export async function downloadToTempfile(httpclient: EggContextHttpClient,
|
||||
dataDir: string, url: string, ignoreDownloadStatuses?: number[], retries = 3) {
|
||||
dataDir: string, url: string, optionalConfig?: DownloadToTempfileOptionalConfig) {
|
||||
let retries = optionalConfig?.retries || 3;
|
||||
let lastError: any;
|
||||
while (retries > 0) {
|
||||
try {
|
||||
return await _downloadToTempfile(httpclient, dataDir, url, ignoreDownloadStatuses);
|
||||
return await _downloadToTempfile(httpclient, dataDir, url, optionalConfig);
|
||||
} catch (err: any) {
|
||||
if (err.name === 'DownloadNotFoundError') throw err;
|
||||
lastError = err;
|
||||
@@ -31,26 +46,33 @@ export async function downloadToTempfile(httpclient: EggContextHttpClient,
|
||||
retries--;
|
||||
if (retries > 0) {
|
||||
// sleep 1s ~ 4s in random
|
||||
await setTimeout(1000 + Math.random() * 4000);
|
||||
const delay = process.env.NODE_ENV === 'test' ? 1 : 1000 + Math.random() * 4000;
|
||||
await setTimeout(delay);
|
||||
}
|
||||
}
|
||||
throw lastError;
|
||||
}
|
||||
|
||||
export interface Tempfile {
|
||||
tmpfile: string;
|
||||
headers: HttpClientResponse['res']['headers'];
|
||||
timing: HttpClientResponse['res']['timing'];
|
||||
}
|
||||
async function _downloadToTempfile(httpclient: EggContextHttpClient,
|
||||
dataDir: string, url: string, ignoreDownloadStatuses?: number[]) {
|
||||
dataDir: string, url: string, optionalConfig?: DownloadToTempfileOptionalConfig): Promise<Tempfile> {
|
||||
const tmpfile = await createTempfile(dataDir, url);
|
||||
const writeStream = createWriteStream(tmpfile);
|
||||
try {
|
||||
// max 10 mins to download
|
||||
// FIXME: should show download progress
|
||||
const authorization = optionalConfig?.remoteAuthToken ? `Bearer ${optionalConfig?.remoteAuthToken}` : '';
|
||||
const { status, headers, res } = await httpclient.request(url, {
|
||||
timeout: 60000 * 10,
|
||||
headers: { authorization },
|
||||
writeStream,
|
||||
timing: true,
|
||||
followRedirect: true,
|
||||
});
|
||||
if (status === 404 || (ignoreDownloadStatuses && ignoreDownloadStatuses.includes(status))) {
|
||||
}) as HttpClientResponse;
|
||||
if (status === 404 || (optionalConfig?.ignoreDownloadStatuses && optionalConfig.ignoreDownloadStatuses.includes(status))) {
|
||||
const err = new Error(`Not found, status(${status})`);
|
||||
err.name = 'DownloadNotFoundError';
|
||||
throw err;
|
||||
@@ -70,3 +92,26 @@ async function _downloadToTempfile(httpclient: EggContextHttpClient,
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
const DEFAULT_CONTENT_TYPE = 'application/octet-stream';
|
||||
const PLAIN_TEXT = 'text/plain';
|
||||
const WHITE_FILENAME_CONTENT_TYPES = {
|
||||
license: PLAIN_TEXT,
|
||||
readme: PLAIN_TEXT,
|
||||
history: PLAIN_TEXT,
|
||||
changelog: PLAIN_TEXT,
|
||||
'.npmignore': PLAIN_TEXT,
|
||||
'.jshintignore': PLAIN_TEXT,
|
||||
'.eslintignore': PLAIN_TEXT,
|
||||
'.jshintrc': 'application/json',
|
||||
'.eslintrc': 'application/json',
|
||||
};
|
||||
|
||||
export function mimeLookup(filepath: string) {
|
||||
const filename = path.basename(filepath).toLowerCase();
|
||||
if (filename.endsWith('.ts')) return PLAIN_TEXT;
|
||||
if (filename.endsWith('.lock')) return PLAIN_TEXT;
|
||||
return mime.lookup(filename) ||
|
||||
WHITE_FILENAME_CONTENT_TYPES[filename] ||
|
||||
DEFAULT_CONTENT_TYPE;
|
||||
}
|
||||
|
||||
3
app/common/LogUtil.ts
Normal file
3
app/common/LogUtil.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export function isoNow() {
|
||||
return new Date().toISOString();
|
||||
}
|
||||
@@ -1,5 +1,8 @@
|
||||
import { createReadStream } from 'fs';
|
||||
import { createReadStream } from 'node:fs';
|
||||
import { Readable } from 'node:stream';
|
||||
import { pipeline } from 'node:stream/promises';
|
||||
import * as ssri from 'ssri';
|
||||
import tar from 'tar';
|
||||
|
||||
// /@cnpm%2ffoo
|
||||
// /@cnpm%2Ffoo
|
||||
@@ -20,6 +23,10 @@ export function getFullname(scope: string, name: string): string {
|
||||
return scope ? `${scope}/${name}` : name;
|
||||
}
|
||||
|
||||
export function cleanUserPrefix(username: string): string {
|
||||
return username.replace(/^.*:/, '');
|
||||
}
|
||||
|
||||
export async function calculateIntegrity(contentOrFile: Uint8Array | string) {
|
||||
let integrityObj;
|
||||
if (typeof contentOrFile === 'string') {
|
||||
@@ -53,3 +60,41 @@ export function detectInstallScript(manifest: any) {
|
||||
}
|
||||
return hasInstallScript;
|
||||
}
|
||||
|
||||
/** 判断一个版本压缩包中是否包含 npm-shrinkwrap.json */
|
||||
export async function hasShrinkWrapInTgz(contentOrFile: Uint8Array | string): Promise<boolean> {
|
||||
let readable: Readable;
|
||||
if (typeof contentOrFile === 'string') {
|
||||
readable = createReadStream(contentOrFile);
|
||||
} else {
|
||||
readable = new Readable({
|
||||
read() {
|
||||
this.push(contentOrFile);
|
||||
this.push(null);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
let hasShrinkWrap = false;
|
||||
const abortController = new AbortController();
|
||||
const parser = tar.t({
|
||||
// options.strict 默认为 false,会忽略 Recoverable errors,例如 tar 解析失败
|
||||
// 详见 https://github.com/isaacs/node-tar#warnings-and-errors
|
||||
onentry(entry) {
|
||||
if (entry.path === 'package/npm-shrinkwrap.json') {
|
||||
hasShrinkWrap = true;
|
||||
abortController.abort();
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
await pipeline(readable, parser, { signal: abortController.signal });
|
||||
return hasShrinkWrap;
|
||||
} catch (e) {
|
||||
if (e.code === 'ABORT_ERR') {
|
||||
return hasShrinkWrap;
|
||||
}
|
||||
throw Object.assign(new Error('[hasShrinkWrapInTgz] Fail to parse input file'), { cause: e });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ export function isSyncWorkerRequest(ctx: EggContext) {
|
||||
if (!isSyncWorkerRequest) {
|
||||
const ua = ctx.headers['user-agent'] || '';
|
||||
// old sync client will request with these user-agent
|
||||
if (ua.indexOf('npm_service.cnpmjs.org/') !== -1) {
|
||||
if (ua.includes('npm_service.cnpmjs.org/')) {
|
||||
isSyncWorkerRequest = true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import crypto from 'crypto';
|
||||
import base from 'base-x';
|
||||
import { crc32 } from '@node-rs/crc32';
|
||||
import * as ssri from 'ssri';
|
||||
import UAParser from 'ua-parser-js';
|
||||
|
||||
const base62 = base('0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ');
|
||||
|
||||
@@ -39,3 +40,17 @@ export function checkIntegrity(plain: string, expectedIntegrity: string): boolea
|
||||
export function sha512(plain: string): string {
|
||||
return crypto.createHash('sha512').update(plain).digest('hex');
|
||||
}
|
||||
|
||||
export function getUAInfo(userAgent?: string) {
|
||||
if (!userAgent) return null;
|
||||
return new UAParser(userAgent);
|
||||
}
|
||||
|
||||
export function getBrowserTypeForWebauthn(userAgent?: string) {
|
||||
const ua = getUAInfo(userAgent);
|
||||
if (!ua) return null;
|
||||
const os = ua.getOS();
|
||||
if (os.name === 'iOS' || os.name === 'Android') return 'mobile';
|
||||
if (os.name === 'Mac OS') return ua.getBrowser().name;
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -1,18 +1,20 @@
|
||||
import {
|
||||
ContextProto,
|
||||
SingletonProto,
|
||||
AccessLevel,
|
||||
Inject,
|
||||
} from '@eggjs/tegg';
|
||||
import { Redis } from 'ioredis';
|
||||
// FIXME: egg-redis should use ioredis v5
|
||||
// https://github.com/eggjs/egg-redis/issues/35
|
||||
import type { Redis } from 'ioredis';
|
||||
|
||||
const ONE_DAY = 3600 * 24;
|
||||
|
||||
@ContextProto({
|
||||
@SingletonProto({
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
})
|
||||
export class CacheAdapter {
|
||||
@Inject()
|
||||
private readonly redis: Redis;
|
||||
private readonly redis: Redis; // 由 redis 插件引入
|
||||
|
||||
async setBytes(key: string, bytes: Buffer) {
|
||||
await this.redis.setex(key, ONE_DAY, bytes);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Readable } from 'stream';
|
||||
import {
|
||||
ContextProto,
|
||||
SingletonProto,
|
||||
AccessLevel,
|
||||
Inject,
|
||||
} from '@eggjs/tegg';
|
||||
@@ -12,7 +12,7 @@ import { IncomingHttpHeaders } from 'http';
|
||||
|
||||
const INSTANCE_NAME = 'nfsAdapter';
|
||||
|
||||
@ContextProto({
|
||||
@SingletonProto({
|
||||
name: INSTANCE_NAME,
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
})
|
||||
@@ -49,9 +49,16 @@ export class NFSAdapter {
|
||||
await this.nfsClient.upload(file, { key: storeKey });
|
||||
}
|
||||
|
||||
@Pointcut(AsyncTimer)
|
||||
async downloadFile(storeKey: string, file: string, timeout: number) {
|
||||
this.logger.info('[%s:downloadFile] key: %s, file: %s, timeout: %s',
|
||||
INSTANCE_NAME, storeKey, file, timeout);
|
||||
await this.nfsClient.download(storeKey, file, { timeout });
|
||||
}
|
||||
|
||||
@Pointcut(AsyncTimer)
|
||||
async remove(storeKey: string) {
|
||||
this.logger.info('[%s:remove] key: %s, file: %s', INSTANCE_NAME, storeKey);
|
||||
this.logger.info('[%s:remove] key: %s', INSTANCE_NAME, storeKey);
|
||||
await this.nfsClient.remove(storeKey);
|
||||
}
|
||||
|
||||
|
||||
@@ -8,11 +8,16 @@ import {
|
||||
EggLogger,
|
||||
EggContextHttpClient,
|
||||
EggAppConfig,
|
||||
HttpClientRequestOptions,
|
||||
HttpClientResponse,
|
||||
} from 'egg';
|
||||
import { HttpMethod } from 'urllib';
|
||||
|
||||
type HttpMethod = HttpClientRequestOptions['method'];
|
||||
|
||||
const INSTANCE_NAME = 'npmRegistry';
|
||||
|
||||
export type RegistryResponse = { method: HttpMethod } & HttpClientResponse;
|
||||
|
||||
@ContextProto({
|
||||
name: INSTANCE_NAME,
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
@@ -25,12 +30,18 @@ export class NPMRegistry {
|
||||
@Inject()
|
||||
private config: EggAppConfig;
|
||||
private timeout = 10000;
|
||||
public registryHost: string;
|
||||
|
||||
get registry(): string {
|
||||
return this.config.cnpmcore.sourceRegistry;
|
||||
return this.registryHost || this.config.cnpmcore.sourceRegistry;
|
||||
}
|
||||
|
||||
public async getFullManifests(fullname: string, retries = 3) {
|
||||
public setRegistryHost(registryHost = '') {
|
||||
this.registryHost = registryHost;
|
||||
}
|
||||
|
||||
public async getFullManifests(fullname: string, optionalConfig?: {retries?:number, remoteAuthToken?:string}): Promise<RegistryResponse> {
|
||||
let retries = optionalConfig?.retries || 3;
|
||||
// set query t=timestamp, make sure CDN cache disable
|
||||
// cache=0 is sync worker request flag
|
||||
const url = `${this.registry}/${encodeURIComponent(fullname)}?t=${Date.now()}&cache=0`;
|
||||
@@ -39,7 +50,8 @@ export class NPMRegistry {
|
||||
try {
|
||||
// large package: https://r.cnpmjs.org/%40procore%2Fcore-icons
|
||||
// https://r.cnpmjs.org/intraactive-sdk-ui 44s
|
||||
return await this.request('GET', url, undefined, { timeout: 120000 });
|
||||
const authorization = this.genAuthorizationHeader(optionalConfig?.remoteAuthToken);
|
||||
return await this.request('GET', url, undefined, { timeout: 120000, headers: { authorization } });
|
||||
} catch (err: any) {
|
||||
if (err.name === 'ResponseTimeoutError') throw err;
|
||||
lastError = err;
|
||||
@@ -47,35 +59,39 @@ export class NPMRegistry {
|
||||
retries--;
|
||||
if (retries > 0) {
|
||||
// sleep 1s ~ 4s in random
|
||||
await setTimeout(1000 + Math.random() * 4000);
|
||||
const delay = process.env.NODE_ENV === 'test' ? 1 : 1000 + Math.random() * 4000;
|
||||
await setTimeout(delay);
|
||||
}
|
||||
}
|
||||
throw lastError;
|
||||
}
|
||||
|
||||
// app.put('/:name/sync', sync.sync);
|
||||
public async createSyncTask(fullname: string) {
|
||||
public async createSyncTask(fullname: string, optionalConfig?: { remoteAuthToken?:string}): Promise<RegistryResponse> {
|
||||
const authorization = this.genAuthorizationHeader(optionalConfig?.remoteAuthToken);
|
||||
const url = `${this.registry}/${encodeURIComponent(fullname)}/sync?sync_upstream=true&nodeps=true`;
|
||||
// {
|
||||
// ok: true,
|
||||
// logId: logId
|
||||
// };
|
||||
return await this.request('PUT', url);
|
||||
return await this.request('PUT', url, undefined, { authorization });
|
||||
}
|
||||
|
||||
// app.get('/:name/sync/log/:id', sync.getSyncLog);
|
||||
public async getSyncTask(fullname: string, id: string, offset: number) {
|
||||
public async getSyncTask(fullname: string, id: string, offset: number, optionalConfig?:{ remoteAuthToken?:string }): Promise<RegistryResponse> {
|
||||
const authorization = this.genAuthorizationHeader(optionalConfig?.remoteAuthToken);
|
||||
const url = `${this.registry}/${encodeURIComponent(fullname)}/sync/log/${id}?offset=${offset}`;
|
||||
// { ok: true, syncDone: syncDone, log: log }
|
||||
return await this.request('GET', url);
|
||||
return await this.request('GET', url, undefined, { authorization });
|
||||
}
|
||||
|
||||
public async getDownloadRanges(registry: string, fullname: string, start: string, end: string) {
|
||||
public async getDownloadRanges(registry: string, fullname: string, start: string, end: string, optionalConfig?:{ remoteAuthToken?:string }): Promise<RegistryResponse> {
|
||||
const authorization = this.genAuthorizationHeader(optionalConfig?.remoteAuthToken);
|
||||
const url = `${registry}/downloads/range/${start}:${end}/${encodeURIComponent(fullname)}`;
|
||||
return await this.request('GET', url);
|
||||
return await this.request('GET', url, undefined, { authorization });
|
||||
}
|
||||
|
||||
private async request(method: HttpMethod, url: string, params?: object, options?: object) {
|
||||
private async request(method: HttpMethod, url: string, params?: object, options?: object): Promise<RegistryResponse> {
|
||||
const res = await this.httpclient.request(url, {
|
||||
method,
|
||||
data: params,
|
||||
@@ -85,12 +101,15 @@ export class NPMRegistry {
|
||||
followRedirect: true,
|
||||
gzip: true,
|
||||
...options,
|
||||
});
|
||||
}) as HttpClientResponse;
|
||||
this.logger.info('[NPMRegistry:request] %s %s, status: %s', method, url, res.status);
|
||||
return {
|
||||
method,
|
||||
url,
|
||||
...res,
|
||||
};
|
||||
}
|
||||
|
||||
private genAuthorizationHeader(remoteAuthToken?:string) {
|
||||
return remoteAuthToken ? `Bearer ${remoteAuthToken}` : '';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,32 +0,0 @@
|
||||
import {
|
||||
AccessLevel,
|
||||
Inject,
|
||||
ContextProto,
|
||||
} from '@eggjs/tegg';
|
||||
import { Redis } from 'ioredis';
|
||||
|
||||
@ContextProto({
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
})
|
||||
export class QueueAdapter {
|
||||
@Inject()
|
||||
private readonly redis: Redis;
|
||||
|
||||
private getQueueName(key: string) {
|
||||
return `CNPMCORE_Q_${key}`;
|
||||
}
|
||||
|
||||
async push<T>(key: string, item: T) {
|
||||
return await this.redis.lpush(this.getQueueName(key), JSON.stringify(item));
|
||||
}
|
||||
|
||||
async pop<T>(key: string) {
|
||||
const json = await this.redis.rpop(this.getQueueName(key));
|
||||
if (!json) return null;
|
||||
return JSON.parse(json) as T;
|
||||
}
|
||||
|
||||
async length(key: string) {
|
||||
return await this.redis.llen(this.getQueueName(key));
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,7 @@
|
||||
import { EggContextHttpClient, EggLogger } from 'egg';
|
||||
import { BinaryTaskConfig } from '../../../../config/binaries';
|
||||
import { ImplDecorator, Inject, QualifierImplDecoratorUtil } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import { EggHttpClient, EggLogger } from 'egg';
|
||||
import { BinaryName, BinaryTaskConfig } from '../../../../config/binaries';
|
||||
|
||||
export type BinaryItem = {
|
||||
name: string;
|
||||
@@ -15,22 +17,21 @@ export type FetchResult = {
|
||||
nextParams?: any;
|
||||
};
|
||||
|
||||
export const BINARY_ADAPTER_ATTRIBUTE = Symbol('BINARY_ADAPTER_ATTRIBUTE');
|
||||
|
||||
export abstract class AbstractBinary {
|
||||
protected httpclient: EggContextHttpClient;
|
||||
@Inject()
|
||||
protected logger: EggLogger;
|
||||
protected binaryConfig: BinaryTaskConfig;
|
||||
|
||||
constructor(httpclient: EggContextHttpClient, logger: EggLogger, binaryConfig: BinaryTaskConfig) {
|
||||
this.httpclient = httpclient;
|
||||
this.logger = logger;
|
||||
this.binaryConfig = binaryConfig;
|
||||
}
|
||||
@Inject()
|
||||
protected httpclient: EggHttpClient;
|
||||
|
||||
abstract fetch(dir: string, params?: any): Promise<FetchResult | undefined>;
|
||||
abstract initFetch(binaryName: BinaryName): Promise<void>;
|
||||
abstract fetch(dir: string, binaryName: BinaryName): Promise<FetchResult | undefined>;
|
||||
|
||||
protected async requestXml(url: string) {
|
||||
const { status, data, headers } = await this.httpclient.request(url, {
|
||||
timeout: 20000,
|
||||
timeout: 30000,
|
||||
followRedirect: true,
|
||||
gzip: true,
|
||||
});
|
||||
@@ -44,7 +45,7 @@ export abstract class AbstractBinary {
|
||||
|
||||
protected async requestJSON(url: string) {
|
||||
const { status, data, headers } = await this.httpclient.request(url, {
|
||||
timeout: 20000,
|
||||
timeout: 30000,
|
||||
dataType: 'json',
|
||||
followRedirect: true,
|
||||
gzip: true,
|
||||
@@ -76,8 +77,8 @@ export abstract class AbstractBinary {
|
||||
return [ 'darwin', 'linux', 'win32' ];
|
||||
}
|
||||
|
||||
protected listNodeArchs() {
|
||||
if (this.binaryConfig.options?.nodeArchs) return this.binaryConfig.options.nodeArchs;
|
||||
protected listNodeArchs(binaryConfig?: BinaryTaskConfig) {
|
||||
if (binaryConfig?.options?.nodeArchs) return binaryConfig.options.nodeArchs;
|
||||
// https://nodejs.org/api/os.html#osarch
|
||||
return {
|
||||
linux: [ 'arm', 'arm64', 's390x', 'ia32', 'x64' ],
|
||||
@@ -95,3 +96,6 @@ export abstract class AbstractBinary {
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export const BinaryAdapter: ImplDecorator<AbstractBinary, typeof BinaryType> =
|
||||
QualifierImplDecoratorUtil.generatorDecorator(AbstractBinary, BINARY_ADAPTER_ATTRIBUTE);
|
||||
|
||||
@@ -1,16 +1,22 @@
|
||||
import { EggContextHttpClient, EggLogger } from 'egg';
|
||||
import { AbstractBinary, FetchResult, BinaryItem } from './AbstractBinary';
|
||||
import { BinaryTaskConfig } from '../../../../config/binaries';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
import { Inject, SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import { EggAppConfig } from 'egg';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Api)
|
||||
export class ApiBinary extends AbstractBinary {
|
||||
private apiUrl: string;
|
||||
constructor(httpclient: EggContextHttpClient, logger: EggLogger, binaryConfig: BinaryTaskConfig, apiUrl: string) {
|
||||
super(httpclient, logger, binaryConfig);
|
||||
this.apiUrl = apiUrl;
|
||||
@Inject()
|
||||
private readonly config: EggAppConfig;
|
||||
|
||||
async initFetch() {
|
||||
// do nothing
|
||||
return;
|
||||
}
|
||||
|
||||
async fetch(dir: string): Promise<FetchResult | undefined> {
|
||||
const url = `${this.apiUrl}/${this.binaryConfig.category}${dir}`;
|
||||
async fetch(dir: string, binaryName: string): Promise<FetchResult | undefined> {
|
||||
const apiUrl = this.config.cnpmcore.syncBinaryFromAPISource || `${this.config.cnpmcore.sourceRegistry}/-/binary`;
|
||||
const url = `${apiUrl}/${binaryName}${dir}`;
|
||||
const data = await this.requestJSON(url);
|
||||
if (!Array.isArray(data)) {
|
||||
this.logger.warn('[ApiBinary.fetch:response-data-not-array] data: %j', data);
|
||||
|
||||
@@ -1,16 +1,27 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import binaries, { BinaryName, BinaryTaskConfig } from '../../../../config/binaries';
|
||||
import path from 'path';
|
||||
import { AbstractBinary, FetchResult, BinaryItem } from './AbstractBinary';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Bucket)
|
||||
export class BucketBinary extends AbstractBinary {
|
||||
async fetch(dir: string): Promise<FetchResult | undefined> {
|
||||
// /foo/ => foo/
|
||||
const subDir = dir.substring(1);
|
||||
const url = `${this.binaryConfig.distUrl}?delimiter=/&prefix=${encodeURIComponent(subDir)}`;
|
||||
const xml = await this.requestXml(url);
|
||||
return { items: this.parseItems(xml, dir), nextParams: null };
|
||||
async initFetch() {
|
||||
// do nothing
|
||||
return;
|
||||
}
|
||||
|
||||
protected parseItems(xml: string, dir: string) {
|
||||
async fetch(dir: string, binaryName: BinaryName): Promise<FetchResult | undefined> {
|
||||
// /foo/ => foo/
|
||||
const binaryConfig = binaries[binaryName];
|
||||
const subDir = dir.substring(1);
|
||||
const url = `${binaryConfig.distUrl}?delimiter=/&prefix=${encodeURIComponent(subDir)}`;
|
||||
const xml = await this.requestXml(url);
|
||||
return { items: this.parseItems(xml, dir, binaryConfig), nextParams: null };
|
||||
}
|
||||
|
||||
protected parseItems(xml: string, dir: string, binaryConfig: BinaryTaskConfig): BinaryItem[] {
|
||||
const items: BinaryItem[] = [];
|
||||
// https://nwjs2.s3.amazonaws.com/?prefix=v0.59.0%2Fx64%2F
|
||||
// https://chromedriver.storage.googleapis.com/?delimiter=/&prefix=
|
||||
@@ -35,7 +46,7 @@ export class BucketBinary extends AbstractBinary {
|
||||
items.push({
|
||||
name,
|
||||
isDir: false,
|
||||
url: `${this.binaryConfig.distUrl}${fullname}`,
|
||||
url: `${binaryConfig.distUrl}${fullname}`,
|
||||
size,
|
||||
date,
|
||||
});
|
||||
@@ -50,7 +61,7 @@ export class BucketBinary extends AbstractBinary {
|
||||
const fullname = m[1].trim();
|
||||
const name = `${path.basename(fullname)}/`;
|
||||
const fullpath = `${dir}${name}`;
|
||||
if (this.binaryConfig.ignoreDirs?.includes(fullpath)) continue;
|
||||
if (binaryConfig.ignoreDirs?.includes(fullpath)) continue;
|
||||
let date = '-';
|
||||
// root dir children, should set date to '2022-04-19T01:00:00Z', sync per hour
|
||||
if (dir === '/') {
|
||||
|
||||
69
app/common/adapter/binary/ChromeForTestingBinary.ts
Normal file
69
app/common/adapter/binary/ChromeForTestingBinary.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.ChromeForTesting)
|
||||
export class ChromeForTestingBinary extends AbstractBinary {
|
||||
private dirItems?: {
|
||||
[key: string]: BinaryItem[];
|
||||
};
|
||||
|
||||
async initFetch() {
|
||||
this.dirItems = undefined;
|
||||
}
|
||||
|
||||
async fetch(dir: string): Promise<FetchResult | undefined> {
|
||||
if (!this.dirItems) {
|
||||
this.dirItems = {};
|
||||
this.dirItems['/'] = [];
|
||||
let chromeVersion = '';
|
||||
|
||||
// exports.PUPPETEER_REVISIONS = Object.freeze({
|
||||
// chrome: '113.0.5672.63',
|
||||
// firefox: 'latest',
|
||||
// });
|
||||
const unpkgURL = 'https://unpkg.com/puppeteer-core@latest/lib/cjs/puppeteer/revisions.js';
|
||||
const text = await this.requestXml(unpkgURL);
|
||||
const m = /chrome:\s+\'([\d\.]+)\'\,/.exec(text);
|
||||
if (m) {
|
||||
chromeVersion = m[1];
|
||||
}
|
||||
|
||||
const platforms = [ 'linux64', 'mac-arm64', 'mac-x64', 'win32', 'win64' ];
|
||||
const date = new Date().toISOString();
|
||||
this.dirItems['/'].push({
|
||||
name: `${chromeVersion}/`,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: true,
|
||||
url: '',
|
||||
});
|
||||
this.dirItems[`/${chromeVersion}/`] = [];
|
||||
|
||||
for (const platform of platforms) {
|
||||
this.dirItems[`/${chromeVersion}/`].push({
|
||||
name: `${platform}/`,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: true,
|
||||
url: '',
|
||||
});
|
||||
|
||||
// https://edgedl.me.gvt1.com/edgedl/chrome/chrome-for-testing/113.0.5672.63/mac-arm64/chrome-mac-arm64.zip
|
||||
const name = `chrome-${platform}.zip`;
|
||||
this.dirItems[`/${chromeVersion}/${platform}/`] = [
|
||||
{
|
||||
name,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `https://edgedl.me.gvt1.com/edgedl/chrome/chrome-for-testing/${chromeVersion}/${platform}/${name}`,
|
||||
},
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
return { items: this.dirItems[dir], nextParams: null };
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,17 @@
|
||||
import { AbstractBinary, FetchResult, BinaryItem } from './AbstractBinary';
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Cypress)
|
||||
export class CypressBinary extends AbstractBinary {
|
||||
private dirItems: {
|
||||
private dirItems?: {
|
||||
[key: string]: BinaryItem[];
|
||||
};
|
||||
} | null;
|
||||
|
||||
async initFetch() {
|
||||
this.dirItems = undefined;
|
||||
}
|
||||
|
||||
async fetch(dir: string): Promise<FetchResult | undefined> {
|
||||
if (!this.dirItems) {
|
||||
@@ -31,10 +39,11 @@ export class CypressBinary extends AbstractBinary {
|
||||
// "https://cdn.cypress.io/desktop/4.0.0/darwin-x64/cypress.zip"
|
||||
// "https://cdn.cypress.io/desktop/4.0.0/linux-x64/cypress.zip"
|
||||
// "https://cdn.cypress.io/desktop/4.0.0/win32-x64/cypress.zip"
|
||||
// "https://cdn.cypress.io/desktop/9.2.0/darwin-arm64/cypress.zip"
|
||||
// "https://cdn.cypress.io/desktop/9.2.0/darwin-x64/cypress.zip"
|
||||
// "https://cdn.cypress.io/desktop/9.2.0/linux-x64/cypress.zip"
|
||||
// "https://cdn.cypress.io/desktop/9.2.0/win32-x64/cypress.zip"
|
||||
const platforms = [ 'darwin-x64', 'linux-x64', 'win32-x64' ];
|
||||
const platforms = [ 'darwin-x64', 'darwin-arm64', 'linux-x64', 'win32-x64' ];
|
||||
for (const platform of platforms) {
|
||||
this.dirItems[subDir].push({
|
||||
name: `${platform}/`,
|
||||
|
||||
@@ -1,9 +1,14 @@
|
||||
import { BinaryItem, FetchResult } from './AbstractBinary';
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import binaries, { BinaryName } from '../../../../config/binaries';
|
||||
import { BinaryAdapter, BinaryItem, FetchResult } from './AbstractBinary';
|
||||
import { GithubBinary } from './GithubBinary';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Electron)
|
||||
export class ElectronBinary extends GithubBinary {
|
||||
async fetch(dir: string): Promise<FetchResult | undefined> {
|
||||
const releases = await this.initReleases();
|
||||
async fetch(dir: string, binaryName: BinaryName = 'electron'): Promise<FetchResult | undefined> {
|
||||
const releases = await this.initReleases(binaryName, binaries.electron);
|
||||
if (!releases) return;
|
||||
|
||||
let items: BinaryItem[] = [];
|
||||
@@ -30,7 +35,7 @@ export class ElectronBinary extends GithubBinary {
|
||||
} else {
|
||||
for (const item of releases) {
|
||||
if (dir === `/${item.tag_name}/` || dir === `/${item.tag_name.substring(1)}/`) {
|
||||
items = this.formatItems(item);
|
||||
items = this.formatItems(item, binaries.electron);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,30 +1,44 @@
|
||||
import { AbstractBinary, FetchResult, BinaryItem } from './AbstractBinary';
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import binaries, { BinaryName, BinaryTaskConfig } from '../../../../config/binaries';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.GitHub)
|
||||
export class GithubBinary extends AbstractBinary {
|
||||
private releases?: any[];
|
||||
private releases: Record<string, any[]> = {};
|
||||
|
||||
protected async initReleases() {
|
||||
if (!this.releases) {
|
||||
async initFetch(binaryName: BinaryName) {
|
||||
delete this.releases[binaryName];
|
||||
}
|
||||
|
||||
protected async initReleases(binaryName: BinaryName, binaryConfig: BinaryTaskConfig) {
|
||||
if (!this.releases[binaryName]) {
|
||||
// https://docs.github.com/en/rest/reference/releases get three pages
|
||||
// https://api.github.com/repos/electron/electron/releases
|
||||
// https://api.github.com/repos/electron/electron/releases?per_page=100&page=3
|
||||
let releases: any[] = [];
|
||||
const maxPage = this.binaryConfig.options?.maxPage || 1;
|
||||
const maxPage = binaryConfig.options?.maxPage || 1;
|
||||
for (let i = 0; i < maxPage; i++) {
|
||||
const url = `https://api.github.com/repos/${this.binaryConfig.repo}/releases?per_page=100&page=${i + 1}`;
|
||||
const url = `https://api.github.com/repos/${binaryConfig.repo}/releases?per_page=100&page=${i + 1}`;
|
||||
const data = await this.requestJSON(url);
|
||||
if (!Array.isArray(data)) {
|
||||
// {"message":"API rate limit exceeded for 47.57.239.54. (But here's the good news: Authenticated requests get a higher rate limit. Check out the documentation for more details.)","documentation_url":"https://docs.github.com/rest/overview/resources-in-the-rest-api#rate-limiting"}
|
||||
if (typeof data?.message === 'string' && data.message.includes('rate limit')) {
|
||||
this.logger.info('[GithubBinary.fetch:hit-rate-limit] skip sync this time, data: %j, url: %s', data, url);
|
||||
return;
|
||||
}
|
||||
this.logger.warn('[GithubBinary.fetch:response-data-not-array] data: %j, url: %s', data, url);
|
||||
return;
|
||||
}
|
||||
releases = releases.concat(data);
|
||||
}
|
||||
this.releases = releases;
|
||||
this.releases[binaryName] = releases;
|
||||
}
|
||||
return this.releases;
|
||||
return this.releases[binaryName];
|
||||
}
|
||||
|
||||
protected formatItems(releaseItem: any) {
|
||||
protected formatItems(releaseItem: any, binaryConfig: BinaryTaskConfig) {
|
||||
const items: BinaryItem[] = [];
|
||||
// 200MB
|
||||
const maxFileSize = 1024 * 1024 * 200;
|
||||
@@ -45,7 +59,7 @@ export class GithubBinary extends AbstractBinary {
|
||||
items.push({
|
||||
name: `${releaseItem.tag_name}.tar.gz`,
|
||||
isDir: false,
|
||||
url: `https://github.com/${this.binaryConfig.repo}/archive/${releaseItem.tag_name}.tar.gz`,
|
||||
url: `https://github.com/${binaryConfig.repo}/archive/${releaseItem.tag_name}.tar.gz`,
|
||||
size: '-',
|
||||
date: releaseItem.published_at,
|
||||
});
|
||||
@@ -54,7 +68,7 @@ export class GithubBinary extends AbstractBinary {
|
||||
items.push({
|
||||
name: `${releaseItem.tag_name}.zip`,
|
||||
isDir: false,
|
||||
url: `https://github.com/${this.binaryConfig.repo}/archive/${releaseItem.tag_name}.zip`,
|
||||
url: `https://github.com/${binaryConfig.repo}/archive/${releaseItem.tag_name}.zip`,
|
||||
size: '-',
|
||||
date: releaseItem.published_at,
|
||||
});
|
||||
@@ -62,8 +76,9 @@ export class GithubBinary extends AbstractBinary {
|
||||
return items;
|
||||
}
|
||||
|
||||
async fetch(dir: string): Promise<FetchResult | undefined> {
|
||||
const releases = await this.initReleases();
|
||||
async fetch(dir: string, binaryName: BinaryName): Promise<FetchResult | undefined> {
|
||||
const binaryConfig = binaries[binaryName];
|
||||
const releases = await this.initReleases(binaryName, binaryConfig);
|
||||
if (!releases) return;
|
||||
|
||||
let items: BinaryItem[] = [];
|
||||
@@ -80,7 +95,7 @@ export class GithubBinary extends AbstractBinary {
|
||||
} else {
|
||||
for (const item of releases) {
|
||||
if (dir === `/${item.tag_name}/`) {
|
||||
items = this.formatItems(item);
|
||||
items = this.formatItems(item, binaryConfig);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,94 +1,100 @@
|
||||
import { AbstractBinary, FetchResult, BinaryItem } from './AbstractBinary';
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import binaries, { BinaryName } from '../../../../config/binaries';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Imagemin)
|
||||
export class ImageminBinary extends AbstractBinary {
|
||||
private dirItems: {
|
||||
[key: string]: BinaryItem[];
|
||||
};
|
||||
async initFetch() {
|
||||
// do nothing
|
||||
return;
|
||||
}
|
||||
|
||||
async fetch(dir: string): Promise<FetchResult | undefined> {
|
||||
if (!this.dirItems) {
|
||||
this.dirItems = {};
|
||||
const npmPackageName = this.binaryConfig.options?.npmPackageName ?? this.binaryConfig.category;
|
||||
const pkgUrl = `https://registry.npmjs.com/${npmPackageName}`;
|
||||
const data = await this.requestJSON(pkgUrl);
|
||||
this.dirItems = {};
|
||||
this.dirItems['/'] = [];
|
||||
// mini version 4.0.0
|
||||
// https://github.com/imagemin/jpegtran-bin/blob/v4.0.0/lib/index.js
|
||||
// https://github.com/imagemin/pngquant-bin/blob/v4.0.0/lib/index.js
|
||||
for (const version in data.versions) {
|
||||
const major = parseInt(version.split('.', 1)[0]);
|
||||
if (major < 4) continue;
|
||||
// >= 4.0.0
|
||||
const date = data.time[version];
|
||||
// https://raw.githubusercontent.com/imagemin/jpegtran-bin/v${pkg.version}/vendor/`
|
||||
this.dirItems['/'].push({
|
||||
name: `v${version}/`,
|
||||
async fetch(dir: string, binaryName: BinaryName): Promise<FetchResult | undefined> {
|
||||
const binaryConfig = binaries[binaryName];
|
||||
const dirItems: {
|
||||
[key: string]: BinaryItem[];
|
||||
} = {};
|
||||
const npmPackageName = binaryConfig.options?.npmPackageName ?? binaryName;
|
||||
const pkgUrl = `https://registry.npmjs.com/${npmPackageName}`;
|
||||
const data = await this.requestJSON(pkgUrl);
|
||||
dirItems['/'] = [];
|
||||
// mini version 4.0.0
|
||||
// https://github.com/imagemin/jpegtran-bin/blob/v4.0.0/lib/index.js
|
||||
// https://github.com/imagemin/pngquant-bin/blob/v4.0.0/lib/index.js
|
||||
for (const version in data.versions) {
|
||||
const major = parseInt(version.split('.', 1)[0]);
|
||||
if (major < 4) continue;
|
||||
// >= 4.0.0
|
||||
const date = data.time[version];
|
||||
// https://raw.githubusercontent.com/imagemin/jpegtran-bin/v${pkg.version}/vendor/`
|
||||
dirItems['/'].push({
|
||||
name: `v${version}/`,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: true,
|
||||
url: '',
|
||||
});
|
||||
const versionDir = `/v${version}/`;
|
||||
dirItems[versionDir] = [];
|
||||
dirItems[versionDir].push({
|
||||
name: 'vendor/',
|
||||
date,
|
||||
size: '-',
|
||||
isDir: true,
|
||||
url: '',
|
||||
});
|
||||
const versionVendorDir = `/v${version}/vendor/`;
|
||||
dirItems[versionVendorDir] = [];
|
||||
for (const platform of binaryConfig.options!.nodePlatforms!) {
|
||||
dirItems[versionVendorDir].push({
|
||||
name: `${platform}/`,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: true,
|
||||
url: '',
|
||||
});
|
||||
const versionDir = `/v${version}/`;
|
||||
this.dirItems[versionDir] = [];
|
||||
this.dirItems[versionDir].push({
|
||||
name: 'vendor/',
|
||||
date,
|
||||
size: '-',
|
||||
isDir: true,
|
||||
url: '',
|
||||
});
|
||||
const versionVendorDir = `/v${version}/vendor/`;
|
||||
this.dirItems[versionVendorDir] = [];
|
||||
for (const platform of this.binaryConfig.options!.nodePlatforms!) {
|
||||
this.dirItems[versionVendorDir].push({
|
||||
name: `${platform}/`,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: true,
|
||||
url: '',
|
||||
});
|
||||
const platformDir = `/v${version}/vendor/${platform}/`;
|
||||
this.dirItems[platformDir] = [];
|
||||
const archs = this.binaryConfig.options!.nodeArchs![platform];
|
||||
if (archs.length === 0) {
|
||||
for (const name of this.binaryConfig.options!.binFiles![platform]) {
|
||||
this.dirItems[platformDir].push({
|
||||
const platformDir = `/v${version}/vendor/${platform}/`;
|
||||
dirItems[platformDir] = [];
|
||||
const archs = binaryConfig.options!.nodeArchs![platform];
|
||||
if (archs.length === 0) {
|
||||
for (const name of binaryConfig.options!.binFiles![platform]) {
|
||||
dirItems[platformDir].push({
|
||||
name,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `${binaryConfig.distUrl}/${binaryConfig.repo}${platformDir}${name}`,
|
||||
ignoreDownloadStatuses: [ 404 ],
|
||||
});
|
||||
}
|
||||
} else {
|
||||
for (const arch of archs) {
|
||||
dirItems[platformDir].push({
|
||||
name: `${arch}/`,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: true,
|
||||
url: '',
|
||||
});
|
||||
const platformArchDir = `/v${version}/vendor/${platform}/${arch}/`;
|
||||
dirItems[platformArchDir] = [];
|
||||
|
||||
for (const name of binaryConfig.options!.binFiles![platform]) {
|
||||
dirItems[platformArchDir].push({
|
||||
name,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `${this.binaryConfig.distUrl}/${this.binaryConfig.repo}${platformDir}${name}`,
|
||||
url: `${binaryConfig.distUrl}/${binaryConfig.repo}${platformArchDir}${name}`,
|
||||
ignoreDownloadStatuses: [ 404 ],
|
||||
});
|
||||
}
|
||||
} else {
|
||||
for (const arch of archs) {
|
||||
this.dirItems[platformDir].push({
|
||||
name: `${arch}/`,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: true,
|
||||
url: '',
|
||||
});
|
||||
const platformArchDir = `/v${version}/vendor/${platform}/${arch}/`;
|
||||
this.dirItems[platformArchDir] = [];
|
||||
|
||||
for (const name of this.binaryConfig.options!.binFiles![platform]) {
|
||||
this.dirItems[platformArchDir].push({
|
||||
name,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `${this.binaryConfig.distUrl}/${this.binaryConfig.repo}${platformArchDir}${name}`,
|
||||
ignoreDownloadStatuses: [ 404 ],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return { items: this.dirItems[dir] };
|
||||
return { items: dirItems[dir] };
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,19 @@
|
||||
import { AbstractBinary, FetchResult, BinaryItem } from './AbstractBinary';
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import binaries, { BinaryName } from '../../../../config/binaries';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Node)
|
||||
export class NodeBinary extends AbstractBinary {
|
||||
async fetch(dir: string): Promise<FetchResult | undefined> {
|
||||
const url = `${this.binaryConfig.distUrl}${dir}`;
|
||||
async initFetch() {
|
||||
// do nothing
|
||||
return;
|
||||
}
|
||||
|
||||
async fetch(dir: string, binaryName: BinaryName): Promise<FetchResult | undefined> {
|
||||
const binaryConfig = binaries[binaryName];
|
||||
const url = `${binaryConfig.distUrl}${dir}`;
|
||||
const html = await this.requestXml(url);
|
||||
// <a href="v9.8.0/">v9.8.0/</a> 08-Mar-2018 01:55 -
|
||||
// <a href="v9.9.0/">v9.9.0/</a> 21-Mar-2018 15:47 -
|
||||
@@ -20,7 +31,7 @@ export class NodeBinary extends AbstractBinary {
|
||||
const date = m[2];
|
||||
const size = m[3];
|
||||
if (size === '0') continue;
|
||||
if (this.binaryConfig.ignoreFiles?.includes(`${dir}${name}`)) continue;
|
||||
if (binaryConfig.ignoreFiles?.includes(`${dir}${name}`)) continue;
|
||||
|
||||
items.push({
|
||||
name,
|
||||
@@ -28,6 +39,7 @@ export class NodeBinary extends AbstractBinary {
|
||||
url: fileUrl,
|
||||
size,
|
||||
date,
|
||||
ignoreDownloadStatuses: binaryConfig.options?.ignoreDownloadStatuses,
|
||||
});
|
||||
}
|
||||
return { items, nextParams: null };
|
||||
|
||||
@@ -1,171 +1,229 @@
|
||||
import { AbstractBinary, FetchResult, BinaryItem } from './AbstractBinary';
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import binaries, { BinaryName } from '../../../../config/binaries';
|
||||
import { join } from 'path';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.NodePreGyp)
|
||||
export class NodePreGypBinary extends AbstractBinary {
|
||||
private dirItems: {
|
||||
[key: string]: BinaryItem[];
|
||||
};
|
||||
async initFetch() {
|
||||
// do nothing
|
||||
return;
|
||||
}
|
||||
|
||||
// https://github.com/mapbox/node-pre-gyp
|
||||
async fetch(dir: string): Promise<FetchResult | undefined> {
|
||||
if (!this.dirItems) {
|
||||
this.dirItems = {};
|
||||
const pkgUrl = `https://registry.npmjs.com/${this.binaryConfig.category}`;
|
||||
const data = await this.requestJSON(pkgUrl);
|
||||
this.dirItems = {};
|
||||
this.dirItems['/'] = [];
|
||||
const nodeABIVersions = await this.listNodeABIVersions();
|
||||
const nodePlatforms = this.listNodePlatforms();
|
||||
const nodeArchs = this.listNodeArchs();
|
||||
const nodeLibcs = this.listNodeLibcs();
|
||||
for (const version in data.versions) {
|
||||
const date = data.time[version];
|
||||
const pkgVersion = data.versions[version];
|
||||
if (!pkgVersion.binary) continue;
|
||||
// https://github.com/mapbox/node-pre-gyp#package_name
|
||||
// defaults to {module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz
|
||||
let binaryFile = pkgVersion.binary.package_name
|
||||
|| '{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz';
|
||||
if (!binaryFile) continue;
|
||||
const moduleName = pkgVersion.binary.module_name || pkgVersion.name;
|
||||
binaryFile = binaryFile.replace('{version}', version)
|
||||
.replace('{module_name}', moduleName);
|
||||
async fetch(dir: string, binaryName: BinaryName): Promise<FetchResult | undefined> {
|
||||
const binaryConfig = binaries[binaryName];
|
||||
const pkgUrl = `https://registry.npmjs.com/${binaryName}`;
|
||||
const data = await this.requestJSON(pkgUrl);
|
||||
const dirItems: {
|
||||
[key: string]: BinaryItem[];
|
||||
} = {
|
||||
'/': [],
|
||||
};
|
||||
const nodeABIVersions = await this.listNodeABIVersions();
|
||||
const nodePlatforms = this.listNodePlatforms();
|
||||
const nodeArchs = this.listNodeArchs(binaryConfig);
|
||||
const nodeLibcs = this.listNodeLibcs();
|
||||
for (const version in data.versions) {
|
||||
const date = data.time[version];
|
||||
const pkgVersion = data.versions[version];
|
||||
if (!pkgVersion.binary) continue;
|
||||
// https://github.com/mapbox/node-pre-gyp#package_name
|
||||
// defaults to {module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz
|
||||
let binaryFile = pkgVersion.binary.package_name
|
||||
|| '{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz';
|
||||
if (!binaryFile) continue;
|
||||
const moduleName = pkgVersion.binary.module_name || pkgVersion.name;
|
||||
binaryFile = binaryFile.replace('{version}', version)
|
||||
.replace('{module_name}', moduleName);
|
||||
|
||||
let currentDir = this.dirItems['/'];
|
||||
let versionPrefix = '';
|
||||
const remotePath = pkgVersion.binary.remote_path;
|
||||
const napiVersions = pkgVersion.binary.napi_versions ?? [];
|
||||
if (this.binaryConfig.options?.requiredNapiVersions && napiVersions.length === 0) continue;
|
||||
if (remotePath?.includes('{version}')) {
|
||||
const dirName = remotePath.includes('v{version}') ? `v${version}` : version;
|
||||
versionPrefix = `/${dirName}`;
|
||||
this.dirItems['/'].push({
|
||||
name: `${dirName}/`,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: true,
|
||||
url: '',
|
||||
});
|
||||
currentDir = this.dirItems[`/${dirName}/`] = [];
|
||||
}
|
||||
let currentDir = dirItems['/'];
|
||||
let versionPrefix = '';
|
||||
let remotePath = pkgVersion.binary.remote_path;
|
||||
const napiVersions = pkgVersion.binary.napi_versions ?? [];
|
||||
if (binaryConfig.options?.requiredNapiVersions && napiVersions.length === 0) continue;
|
||||
if (remotePath?.includes('{version}')) {
|
||||
const dirName = remotePath.includes('v{version}') ? `v${version}` : version;
|
||||
versionPrefix = `/${dirName}`;
|
||||
dirItems['/'].push({
|
||||
name: `${dirName}/`,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: true,
|
||||
url: '',
|
||||
});
|
||||
currentDir = dirItems[`/${dirName}/`] = [];
|
||||
}
|
||||
|
||||
// https://node-precompiled-binaries.grpc.io/?delimiter=/&prefix=grpc/v1.24.11/
|
||||
// https://github.com/grpc/grpc-node/blob/grpc%401.24.x/packages/grpc-native-core/package.json#L50
|
||||
// "binary": {
|
||||
// "module_name": "grpc_node",
|
||||
// "module_path": "src/node/extension_binary/{node_abi}-{platform}-{arch}-{libc}",
|
||||
// "host": "https://node-precompiled-binaries.grpc.io/",
|
||||
// "remote_path": "{name}/v{version}",
|
||||
// "package_name": "{node_abi}-{platform}-{arch}-{libc}.tar.gz"
|
||||
// },
|
||||
if (binaryFile.includes('{node_abi}')
|
||||
&& binaryFile.includes('{platform}')
|
||||
&& binaryFile.includes('{arch}')
|
||||
&& binaryFile.includes('{libc}')) {
|
||||
for (const nodeAbi of nodeABIVersions) {
|
||||
for (const platform of nodePlatforms) {
|
||||
const archs = nodeArchs[platform];
|
||||
const libcs = nodeLibcs[platform];
|
||||
for (const arch of archs) {
|
||||
for (const libc of libcs) {
|
||||
const name = binaryFile.replace('{node_abi}', `node-v${nodeAbi}`)
|
||||
.replace('{platform}', platform)
|
||||
.replace('{arch}', arch)
|
||||
.replace('{libc}', libc);
|
||||
currentDir.push({
|
||||
name,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `${this.binaryConfig.distUrl}/${this.binaryConfig.category}${versionPrefix}/${name}`,
|
||||
ignoreDownloadStatuses: [ 404 ],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (binaryFile.includes('{node_abi}')
|
||||
&& binaryFile.includes('{platform}')
|
||||
&& binaryFile.includes('{arch}')) {
|
||||
for (const nodeAbi of nodeABIVersions) {
|
||||
for (const platform of nodePlatforms) {
|
||||
const archs = nodeArchs[platform];
|
||||
for (const arch of archs) {
|
||||
const name = binaryFile.replace('{node_abi}', `node-v${nodeAbi}`)
|
||||
.replace('{platform}', platform)
|
||||
.replace('{arch}', arch);
|
||||
currentDir.push({
|
||||
name,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `${this.binaryConfig.distUrl}/${this.binaryConfig.category}${versionPrefix}/${name}`,
|
||||
ignoreDownloadStatuses: [ 404 ],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (binaryFile.includes('{platform}-{arch}-{node_napi_label}-{libc}') && napiVersions.length > 0) {
|
||||
// https://skia-canvas.s3.us-east-1.amazonaws.com/v0.9.30/darwin-arm64-napi-v6-unknown.tar.gz
|
||||
// https://github.com/samizdatco/skia-canvas/blob/2a75801d7cce3b4e4e6ad015a173daefaa8465e6/package.json#L48
|
||||
// "binary": {
|
||||
// "module_name": "index",
|
||||
// "module_path": "./lib/v{napi_build_version}",
|
||||
// "remote_path": "./v{version}",
|
||||
// "package_name": "{platform}-{arch}-{node_napi_label}-{libc}.tar.gz",
|
||||
// "host": "https://skia-canvas.s3.us-east-1.amazonaws.com",
|
||||
// "napi_versions": [
|
||||
// 6
|
||||
// ]
|
||||
// },
|
||||
// https://node-precompiled-binaries.grpc.io/?delimiter=/&prefix=grpc/v1.24.11/
|
||||
// https://github.com/grpc/grpc-node/blob/grpc%401.24.x/packages/grpc-native-core/package.json#L50
|
||||
// "binary": {
|
||||
// "module_name": "grpc_node",
|
||||
// "module_path": "src/node/extension_binary/{node_abi}-{platform}-{arch}-{libc}",
|
||||
// "host": "https://node-precompiled-binaries.grpc.io/",
|
||||
// "remote_path": "{name}/v{version}",
|
||||
// "package_name": "{node_abi}-{platform}-{arch}-{libc}.tar.gz"
|
||||
// },
|
||||
if (binaryFile.includes('{node_abi}')
|
||||
&& binaryFile.includes('{platform}')
|
||||
&& binaryFile.includes('{arch}')
|
||||
&& binaryFile.includes('{libc}')) {
|
||||
for (const nodeAbi of nodeABIVersions) {
|
||||
for (const platform of nodePlatforms) {
|
||||
const archs = nodeArchs[platform];
|
||||
const libcs = nodeLibcs[platform];
|
||||
for (const arch of archs) {
|
||||
for (const libc of libcs) {
|
||||
for (const napiVersion of napiVersions) {
|
||||
const name = binaryFile.replace('{platform}', platform)
|
||||
.replace('{arch}', arch)
|
||||
.replace('{node_napi_label}', `napi-v${napiVersion}`)
|
||||
.replace('{libc}', libc);
|
||||
currentDir.push({
|
||||
name,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `${this.binaryConfig.distUrl}${versionPrefix}/${name}`,
|
||||
ignoreDownloadStatuses: [ 404, 403 ],
|
||||
});
|
||||
}
|
||||
const name = binaryFile.replace('{node_abi}', `node-v${nodeAbi}`)
|
||||
.replace('{platform}', platform)
|
||||
.replace('{arch}', arch)
|
||||
.replace('{libc}', libc);
|
||||
currentDir.push({
|
||||
name,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `${binaryConfig.distUrl}/${binaryName}${versionPrefix}/${name}`,
|
||||
ignoreDownloadStatuses: [ 404 ],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (binaryFile.includes('{platform}') && binaryFile.includes('{arch}')) {
|
||||
// https://github.com/grpc/grpc-node/blob/master/packages/grpc-tools/package.json#L29
|
||||
// "binary": {
|
||||
// "module_name": "grpc_tools",
|
||||
// "host": "https://node-precompiled-binaries.grpc.io/",
|
||||
// "remote_path": "{name}/v{version}",
|
||||
// "package_name": "{platform}-{arch}.tar.gz",
|
||||
// "module_path": "bin"
|
||||
// },
|
||||
}
|
||||
} else if (binaryFile.includes('{node_abi}')
|
||||
&& binaryFile.includes('{platform}')
|
||||
&& binaryFile.includes('{arch}')) {
|
||||
for (const nodeAbi of nodeABIVersions) {
|
||||
for (const platform of nodePlatforms) {
|
||||
const archs = nodeArchs[platform];
|
||||
for (const arch of archs) {
|
||||
const name = binaryFile.replace('{platform}', platform)
|
||||
const name = binaryFile.replace('{node_abi}', `node-v${nodeAbi}`)
|
||||
.replace('{platform}', platform)
|
||||
.replace('{arch}', arch);
|
||||
currentDir.push({
|
||||
name,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `${this.binaryConfig.distUrl}/${this.binaryConfig.category}${versionPrefix}/${name}`,
|
||||
url: `${binaryConfig.distUrl}/${binaryName}${versionPrefix}/${name}`,
|
||||
ignoreDownloadStatuses: [ 404 ],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (binaryFile.includes('{platform}-{arch}-{node_napi_label}-{libc}') && napiVersions.length > 0) {
|
||||
// https://skia-canvas.s3.us-east-1.amazonaws.com/v0.9.30/darwin-arm64-napi-v6-unknown.tar.gz
|
||||
// https://github.com/samizdatco/skia-canvas/blob/2a75801d7cce3b4e4e6ad015a173daefaa8465e6/package.json#L48
|
||||
// "binary": {
|
||||
// "module_name": "index",
|
||||
// "module_path": "./lib/v{napi_build_version}",
|
||||
// "remote_path": "./v{version}",
|
||||
// "package_name": "{platform}-{arch}-{node_napi_label}-{libc}.tar.gz",
|
||||
// "host": "https://skia-canvas.s3.us-east-1.amazonaws.com",
|
||||
// "napi_versions": [
|
||||
// 6
|
||||
// ]
|
||||
// },
|
||||
for (const platform of nodePlatforms) {
|
||||
const archs = nodeArchs[platform];
|
||||
const libcs = nodeLibcs[platform];
|
||||
for (const arch of archs) {
|
||||
for (const libc of libcs) {
|
||||
for (const napiVersion of napiVersions) {
|
||||
const name = binaryFile.replace('{platform}', platform)
|
||||
.replace('{arch}', arch)
|
||||
.replace('{node_napi_label}', `napi-v${napiVersion}`)
|
||||
.replace('{libc}', libc);
|
||||
currentDir.push({
|
||||
name,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `${binaryConfig.distUrl}${versionPrefix}/${name}`,
|
||||
ignoreDownloadStatuses: [ 404, 403 ],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (binaryFile.includes('{platform}-{arch}-{node_napi_label}')) {
|
||||
// "_id": "skia-canvas@0.9.22",
|
||||
// "binary": {
|
||||
// "module_name": "index",
|
||||
// "module_path": "./lib/v{napi_build_version}",
|
||||
// "remote_path": "./v{version}",
|
||||
// "package_name": "{platform}-{arch}-{node_napi_label}.tar.gz",
|
||||
// "host": "https://skia-canvas.s3.us-east-1.amazonaws.com",
|
||||
// "napi_versions": [
|
||||
// 6
|
||||
// ]
|
||||
// },
|
||||
for (const platform of nodePlatforms) {
|
||||
const archs = nodeArchs[platform];
|
||||
for (const arch of archs) {
|
||||
for (const napiVersion of napiVersions) {
|
||||
const binaryFileName = binaryFile.replace('{platform}', platform)
|
||||
.replace('{arch}', arch)
|
||||
.replace('{node_napi_label}', napiVersion);
|
||||
remotePath = remotePath.replace('{module_name}', moduleName)
|
||||
.replace('{name}', binaryName)
|
||||
.replace('{version}', version)
|
||||
.replace('{configuration}', 'Release');
|
||||
const binaryFilePath = join('/', remotePath, binaryFileName);
|
||||
const remoteUrl = `${binaryConfig.distUrl}${binaryFilePath}`;
|
||||
currentDir.push({
|
||||
name: binaryFileName,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: remoteUrl,
|
||||
ignoreDownloadStatuses: [ 404 ],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (binaryFile.includes('{platform}') && binaryFile.includes('{arch}')) {
|
||||
// https://github.com/grpc/grpc-node/blob/master/packages/grpc-tools/package.json#L29
|
||||
// "binary": {
|
||||
// "module_name": "grpc_tools",
|
||||
// "host": "https://node-precompiled-binaries.grpc.io/",
|
||||
// "remote_path": "{name}/v{version}",
|
||||
// "package_name": "{platform}-{arch}.tar.gz",
|
||||
// "module_path": "bin"
|
||||
// },
|
||||
// handle {configuration}
|
||||
// "binary": {
|
||||
// "module_name": "wrtc",
|
||||
// "module_path": "./build/{configuration}/",
|
||||
// "remote_path": "./{module_name}/v{version}/{configuration}/",
|
||||
// "package_name": "{platform}-{arch}.tar.gz",
|
||||
// "host": "https://node-webrtc.s3.amazonaws.com"
|
||||
// },
|
||||
for (const platform of nodePlatforms) {
|
||||
const archs = nodeArchs[platform];
|
||||
for (const arch of archs) {
|
||||
const binaryFileName = binaryFile.replace('{platform}', platform)
|
||||
.replace('{arch}', arch);
|
||||
remotePath = remotePath.replace('{module_name}', moduleName)
|
||||
.replace('{name}', binaryName)
|
||||
.replace('{version}', version)
|
||||
.replace('{configuration}', 'Release');
|
||||
const binaryFilePath = join('/', remotePath, binaryFileName);
|
||||
const remoteUrl = `${binaryConfig.distUrl}${binaryFilePath}`;
|
||||
currentDir.push({
|
||||
name: binaryFileName,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: remoteUrl,
|
||||
ignoreDownloadStatuses: [ 404 ],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return { items: this.dirItems[dir] };
|
||||
return { items: dirItems[dir] };
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,20 @@
|
||||
import { FetchResult, BinaryItem } from './AbstractBinary';
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import binaries from '../../../../config/binaries';
|
||||
import { FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
import { BucketBinary } from './BucketBinary';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Nwjs)
|
||||
export class NwjsBinary extends BucketBinary {
|
||||
private s3Url = 'https://nwjs2.s3.amazonaws.com/?delimiter=/&prefix=';
|
||||
|
||||
async fetch(dir: string): Promise<FetchResult | undefined> {
|
||||
const binaryConfig = binaries.nwjs;
|
||||
const isRootDir = dir === '/';
|
||||
// /foo/ => foo/
|
||||
const subDir = dir.substring(1);
|
||||
const url = isRootDir ? this.binaryConfig.distUrl : `${this.s3Url}${encodeURIComponent(subDir)}`;
|
||||
const url = isRootDir ? binaryConfig.distUrl : `${this.s3Url}${encodeURIComponent(subDir)}`;
|
||||
const xml = await this.requestXml(url);
|
||||
if (!xml) return;
|
||||
|
||||
@@ -37,6 +43,6 @@ export class NwjsBinary extends BucketBinary {
|
||||
return { items, nextParams: null };
|
||||
}
|
||||
|
||||
return { items: this.parseItems(xml, dir), nextParams: null };
|
||||
return { items: this.parseItems(xml, dir, binaryConfig), nextParams: null };
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
|
||||
import { AbstractBinary, BinaryItem, FetchResult } from './AbstractBinary';
|
||||
import { AbstractBinary, BinaryAdapter, BinaryItem, FetchResult } from './AbstractBinary';
|
||||
import util from 'util';
|
||||
import path from 'path';
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
|
||||
const PACKAGE_URL = 'https://registry.npmjs.com/playwright-core';
|
||||
const DOWNLOAD_HOST = 'https://playwright.azureedge.net/';
|
||||
@@ -144,8 +146,14 @@ const DOWNLOAD_PATHS = {
|
||||
},
|
||||
};
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Playwright)
|
||||
export class PlaywrightBinary extends AbstractBinary {
|
||||
private dirItems?: Record<string, BinaryItem[]>;
|
||||
async initFetch() {
|
||||
this.dirItems = undefined;
|
||||
}
|
||||
|
||||
async fetch(dir: string): Promise<FetchResult | undefined> {
|
||||
if (!this.dirItems) {
|
||||
const packageData = await this.requestJSON(PACKAGE_URL);
|
||||
@@ -181,7 +189,9 @@ export class PlaywrightBinary extends AbstractBinary {
|
||||
browsers.push(...data.browsers);
|
||||
})
|
||||
.catch(err => {
|
||||
this.logger.warn('[PlaywrightBinary.fetch:error] Playwright version %s browser data request failed: %s', version, err);
|
||||
/* c8 ignore next 2 */
|
||||
this.logger.warn('[PlaywrightBinary.fetch:error] Playwright version %s browser data request failed: %s',
|
||||
version, err);
|
||||
}),
|
||||
),
|
||||
);
|
||||
@@ -216,4 +226,3 @@ export class PlaywrightBinary extends AbstractBinary {
|
||||
return { items: this.dirItems[dir] ?? [], nextParams: null };
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
129
app/common/adapter/binary/PrismaBinary.ts
Normal file
129
app/common/adapter/binary/PrismaBinary.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
import path from 'node:path';
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import binaries, { BinaryName } from '../../../../config/binaries';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Prisma)
|
||||
export class PrismaBinary extends AbstractBinary {
|
||||
private dirItems: {
|
||||
[key: string]: BinaryItem[];
|
||||
} = {};
|
||||
|
||||
async initFetch() {
|
||||
// https://github.com/cnpm/cnpmcore/issues/473#issuecomment-1562115738
|
||||
const pkgUrl = 'https://registry.npmjs.com/@prisma/engines';
|
||||
const data = await this.requestJSON(pkgUrl);
|
||||
const modified = data.time.modified;
|
||||
this.dirItems = {};
|
||||
this.dirItems['/'] = [
|
||||
{
|
||||
name: 'all_commits/',
|
||||
date: modified,
|
||||
size: '-',
|
||||
isDir: true,
|
||||
url: '',
|
||||
},
|
||||
];
|
||||
this.dirItems['/all_commits/'] = [];
|
||||
const commitIdMap: Record<string, boolean> = {};
|
||||
// https://list-binaries.prisma-orm.workers.dev/?delimiter=/&prefix=all_commits/61023c35d2c8762f66f09bc4183d2f630b541d08/
|
||||
for (const version in data.versions) {
|
||||
const major = parseInt(version.split('.', 1)[0]);
|
||||
// need >= 3.0.0
|
||||
if (major < 3) continue;
|
||||
const date = data.time[version];
|
||||
const pkg = data.versions[version];
|
||||
// https://registry.npmjs.com/@prisma/engines/4.14.1
|
||||
const enginesVersion = pkg.devDependencies['@prisma/engines-version'] || '';
|
||||
// "@prisma/engines-version": "4.14.0-67.d9a4c5988f480fa576d43970d5a23641aa77bc9c"
|
||||
const matched = /\.(\w{30,})$/.exec(enginesVersion);
|
||||
if (!matched) continue;
|
||||
const commitId = matched[1];
|
||||
if (commitIdMap[commitId]) continue;
|
||||
commitIdMap[commitId] = true;
|
||||
this.dirItems['/all_commits/'].push({
|
||||
name: `${commitId}/`,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: true,
|
||||
url: '',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async fetch(dir: string, binaryName: BinaryName): Promise<FetchResult | undefined> {
|
||||
const existsItems = this.dirItems[dir];
|
||||
if (existsItems) {
|
||||
return { items: existsItems, nextParams: null };
|
||||
}
|
||||
// /foo/ => foo/
|
||||
const binaryConfig = binaries[binaryName];
|
||||
const subDir = dir.substring(1);
|
||||
const url = `${binaryConfig.distUrl}?delimiter=/&prefix=${encodeURIComponent(subDir)}`;
|
||||
const result = await this.requestJSON(url);
|
||||
return { items: this.#parseItems(result), nextParams: null };
|
||||
}
|
||||
|
||||
#parseItems(result: any): BinaryItem[] {
|
||||
const items: BinaryItem[] = [];
|
||||
// objects": [
|
||||
// {
|
||||
// "uploaded": "2023-05-23T15:43:05.772Z",
|
||||
// "checksums": {
|
||||
// "md5": "d41d8cd98f00b204e9800998ecf8427e"
|
||||
// },
|
||||
// "httpEtag": "\"d41d8cd98f00b204e9800998ecf8427e\"",
|
||||
// "etag": "d41d8cd98f00b204e9800998ecf8427e",
|
||||
// "size": 0,
|
||||
// "version": "7e77b6b8c1d214f2c6be3c959749b5a6",
|
||||
// "key": "all_commits/61023c35d2c8762f66f09bc4183d2f630b541d08/darwin-arm64/.finished"
|
||||
// },
|
||||
// {
|
||||
// "uploaded": "2023-05-23T15:41:33.861Z",
|
||||
// "checksums": {
|
||||
// "md5": "4822215a13ae372ae82afd12689fce37"
|
||||
// },
|
||||
// "httpEtag": "\"4822215a13ae372ae82afd12689fce37\"",
|
||||
// "etag": "4822215a13ae372ae82afd12689fce37",
|
||||
// "size": 96,
|
||||
// "version": "7e77b6ba29d4e776023e4fa62825c13a",
|
||||
// "key": "all_commits/61023c35d2c8762f66f09bc4183d2f630b541d08/darwin-arm64/libquery_engine.dylib.node.gz.sha256"
|
||||
// },
|
||||
// https://list-binaries.prisma-orm.workers.dev/?delimiter=/&prefix=all_commits/61023c35d2c8762f66f09bc4183d2f630b541d08/darwin-arm64/
|
||||
const objects: {
|
||||
uploaded: string;
|
||||
size: number;
|
||||
key: string;
|
||||
}[] = result.objects || [];
|
||||
for (const o of objects) {
|
||||
const fullname = o.key;
|
||||
// ignore size = 0
|
||||
if (o.size === 0) continue;
|
||||
const name = path.basename(fullname);
|
||||
items.push({
|
||||
name,
|
||||
isDir: false,
|
||||
// https://binaries.prisma.sh/all_commits/2452cc6313d52b8b9a96999ac0e974d0aedf88db/darwin-arm64/prisma-fmt.gz
|
||||
url: `https://binaries.prisma.sh/${fullname}`,
|
||||
size: o.size,
|
||||
date: o.uploaded,
|
||||
});
|
||||
}
|
||||
// delimitedPrefixes: [ 'all_commits/61023c35d2c8762f66f09bc4183d2f630b541d08/darwin-arm64/' ]
|
||||
// https://list-binaries.prisma-orm.workers.dev/?delimiter=/&prefix=all_commits/61023c35d2c8762f66f09bc4183d2f630b541d08/
|
||||
const delimitedPrefixes: string[] = result.delimitedPrefixes || [];
|
||||
for (const fullname of delimitedPrefixes) {
|
||||
const name = `${path.basename(fullname)}/`;
|
||||
items.push({
|
||||
name,
|
||||
isDir: true,
|
||||
url: '',
|
||||
size: '-',
|
||||
date: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
return items;
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,18 @@
|
||||
import { AbstractBinary, FetchResult, BinaryItem } from './AbstractBinary';
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Puppeteer)
|
||||
export class PuppeteerBinary extends AbstractBinary {
|
||||
private dirItems: {
|
||||
private dirItems?: {
|
||||
[key: string]: BinaryItem[];
|
||||
};
|
||||
|
||||
async initFetch() {
|
||||
this.dirItems = undefined;
|
||||
}
|
||||
|
||||
async fetch(dir: string): Promise<FetchResult | undefined> {
|
||||
if (!this.dirItems) {
|
||||
const pkgUrl = 'https://registry.npmjs.com/puppeteer';
|
||||
@@ -27,7 +35,7 @@ export class PuppeteerBinary extends AbstractBinary {
|
||||
// chromium: '768783',
|
||||
// firefox: 'latest',
|
||||
// };
|
||||
const unpkgURL = 'https://unpkg.com/puppeteer@latest/lib/cjs/puppeteer/revisions.js';
|
||||
const unpkgURL = 'https://unpkg.com/puppeteer-core@latest/lib/cjs/puppeteer/revisions.js';
|
||||
const text = await this.requestXml(unpkgURL);
|
||||
const m = /chromium:\s+\'(\d+)\'\,/.exec(text);
|
||||
if (m && !chromiumRevisions.has(m[1])) {
|
||||
|
||||
@@ -1,79 +1,84 @@
|
||||
import { AbstractBinary, FetchResult, BinaryItem } from './AbstractBinary';
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Sqlcipher)
|
||||
export class SqlcipherBinary extends AbstractBinary {
|
||||
private dirItems: {
|
||||
[key: string]: BinaryItem[];
|
||||
};
|
||||
async initFetch() {
|
||||
// do nothing
|
||||
return;
|
||||
}
|
||||
|
||||
async fetch(dir: string): Promise<FetchResult | undefined> {
|
||||
if (!this.dirItems) {
|
||||
this.dirItems = {};
|
||||
const s3Url = 'https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher';
|
||||
const pkgUrl = 'https://registry.npmjs.com/@journeyapps/sqlcipher';
|
||||
const data = await this.requestJSON(pkgUrl);
|
||||
this.dirItems = {};
|
||||
this.dirItems['/'] = [];
|
||||
// https://github.com/journeyapps/node-sqlcipher/blob/master/.circleci/config.yml#L407
|
||||
// https://github.com/journeyapps/node-sqlcipher/issues/35#issuecomment-698924173
|
||||
// https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher/v5.3.0/napi-v3-darwin-arm64.tar.gz
|
||||
// https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher/v5.3.0/napi-v6-darwin-arm64.tar.gz
|
||||
// https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher/v5.3.0/napi-v3-darwin-x64.tar.gz
|
||||
// https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher/v5.3.0/napi-v6-darwin-x64.tar.gz
|
||||
const dirItems: {
|
||||
[key: string]: BinaryItem[];
|
||||
} = {
|
||||
'/': [],
|
||||
};
|
||||
const s3Url = 'https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher';
|
||||
const pkgUrl = 'https://registry.npmjs.com/@journeyapps/sqlcipher';
|
||||
const data = await this.requestJSON(pkgUrl);
|
||||
// https://github.com/journeyapps/node-sqlcipher/blob/master/.circleci/config.yml#L407
|
||||
// https://github.com/journeyapps/node-sqlcipher/issues/35#issuecomment-698924173
|
||||
// https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher/v5.3.0/napi-v3-darwin-arm64.tar.gz
|
||||
// https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher/v5.3.0/napi-v6-darwin-arm64.tar.gz
|
||||
// https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher/v5.3.0/napi-v3-darwin-x64.tar.gz
|
||||
// https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher/v5.3.0/napi-v6-darwin-x64.tar.gz
|
||||
|
||||
// https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher/v5.3.0/napi-v6-linux-x64.tar.gz
|
||||
// https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher/v5.3.0/napi-v3-linux-x64.tar.gz
|
||||
// https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher/v5.3.0/napi-v6-linux-x64.tar.gz
|
||||
// https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher/v5.3.0/napi-v3-linux-x64.tar.gz
|
||||
|
||||
// https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher/v5.3.0/napi-v6-win32-arm64.tar.gz
|
||||
// https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher/v5.3.0/napi-v3-win32-arm64.tar.gz
|
||||
// https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher/v5.3.0/napi-v3-win32-ia32.tar.gz
|
||||
// https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher/v5.3.0/napi-v6-win32-ia32.tar.gz
|
||||
// https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher/v5.3.0/napi-v3-win32-x64.tar.gz
|
||||
// https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher/v5.3.0/napi-v6-win32-x64.tar.gz
|
||||
const nodePlatformAndArchs = [
|
||||
'linux-x64',
|
||||
'darwin-x64',
|
||||
'darwin-arm64',
|
||||
'win32-x64',
|
||||
'win32-arm64',
|
||||
'win32-ia32',
|
||||
];
|
||||
for (const version in data.versions) {
|
||||
const major = parseInt(version.split('.', 1)[0]);
|
||||
if (major < 5) continue;
|
||||
// >= 5.0.0
|
||||
const pkgVersion = data.versions[version];
|
||||
const napiVersions = pkgVersion.binary && pkgVersion.binary.napi_versions || [];
|
||||
const date = data.time[version];
|
||||
this.dirItems['/'].push({
|
||||
name: `v${version}/`,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: true,
|
||||
url: '',
|
||||
});
|
||||
const versionDir = `/v${version}/`;
|
||||
this.dirItems[versionDir] = [];
|
||||
for (const nodePlatformAndArch of nodePlatformAndArchs) {
|
||||
// napi
|
||||
for (const napiVersion of napiVersions) {
|
||||
// >= 5.0.0
|
||||
// "package_name": "napi-v{napi_build_version}-{platform}-{arch}.tar.gz",
|
||||
// "napi_versions": [
|
||||
// 3, 6
|
||||
// ]
|
||||
const name = `napi-v${napiVersion}-${nodePlatformAndArch}.tar.gz`;
|
||||
this.dirItems[versionDir].push({
|
||||
name,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `${s3Url}/v${version}/${name}`,
|
||||
ignoreDownloadStatuses: [ 404, 403 ],
|
||||
});
|
||||
}
|
||||
// https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher/v5.3.0/napi-v6-win32-arm64.tar.gz
|
||||
// https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher/v5.3.0/napi-v3-win32-arm64.tar.gz
|
||||
// https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher/v5.3.0/napi-v3-win32-ia32.tar.gz
|
||||
// https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher/v5.3.0/napi-v6-win32-ia32.tar.gz
|
||||
// https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher/v5.3.0/napi-v3-win32-x64.tar.gz
|
||||
// https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher/v5.3.0/napi-v6-win32-x64.tar.gz
|
||||
const nodePlatformAndArchs = [
|
||||
'linux-x64',
|
||||
'darwin-x64',
|
||||
'darwin-arm64',
|
||||
'win32-x64',
|
||||
'win32-arm64',
|
||||
'win32-ia32',
|
||||
];
|
||||
for (const version in data.versions) {
|
||||
const major = parseInt(version.split('.', 1)[0]);
|
||||
if (major < 5) continue;
|
||||
// >= 5.0.0
|
||||
const pkgVersion = data.versions[version];
|
||||
const napiVersions = pkgVersion.binary && pkgVersion.binary.napi_versions || [];
|
||||
const date = data.time[version];
|
||||
dirItems['/'].push({
|
||||
name: `v${version}/`,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: true,
|
||||
url: '',
|
||||
});
|
||||
const versionDir = `/v${version}/`;
|
||||
dirItems[versionDir] = [];
|
||||
for (const nodePlatformAndArch of nodePlatformAndArchs) {
|
||||
// napi
|
||||
for (const napiVersion of napiVersions) {
|
||||
// >= 5.0.0
|
||||
// "package_name": "napi-v{napi_build_version}-{platform}-{arch}.tar.gz",
|
||||
// "napi_versions": [
|
||||
// 3, 6
|
||||
// ]
|
||||
const name = `napi-v${napiVersion}-${nodePlatformAndArch}.tar.gz`;
|
||||
dirItems[versionDir].push({
|
||||
name,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `${s3Url}/v${version}/${name}`,
|
||||
ignoreDownloadStatuses: [ 404, 403 ],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
return { items: this.dirItems[dir] };
|
||||
return { items: dirItems[dir] };
|
||||
}
|
||||
}
|
||||
|
||||
40
app/common/adapter/changesStream/AbstractChangesStream.ts
Normal file
40
app/common/adapter/changesStream/AbstractChangesStream.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import {
|
||||
ImplDecorator,
|
||||
Inject,
|
||||
QualifierImplDecoratorUtil,
|
||||
} from '@eggjs/tegg';
|
||||
import { RegistryType } from '../../../common/enum/Registry';
|
||||
import { Registry } from '../../../core/entity/Registry';
|
||||
import {
|
||||
EggHttpClient,
|
||||
EggLogger,
|
||||
} from 'egg';
|
||||
|
||||
export const CHANGE_STREAM_ATTRIBUTE = 'CHANGE_STREAM_ATTRIBUTE';
|
||||
export type ChangesStreamChange = {
|
||||
seq: string;
|
||||
fullname: string;
|
||||
};
|
||||
|
||||
export abstract class AbstractChangeStream {
|
||||
@Inject()
|
||||
protected logger: EggLogger;
|
||||
|
||||
@Inject()
|
||||
protected httpclient: EggHttpClient;
|
||||
|
||||
abstract getInitialSince(registry: Registry): Promise<string>;
|
||||
abstract fetchChanges(registry: Registry, since: string): AsyncGenerator<ChangesStreamChange>;
|
||||
|
||||
getChangesStreamUrl(registry: Registry, since: string, limit?: number): string {
|
||||
const url = new URL(registry.changeStream);
|
||||
url.searchParams.set('since', since);
|
||||
if (limit) {
|
||||
url.searchParams.set('limit', String(limit));
|
||||
}
|
||||
return url.toString();
|
||||
}
|
||||
}
|
||||
|
||||
export const RegistryChangesStream: ImplDecorator<AbstractChangeStream, typeof RegistryType> =
|
||||
QualifierImplDecoratorUtil.generatorDecorator(AbstractChangeStream, CHANGE_STREAM_ATTRIBUTE);
|
||||
52
app/common/adapter/changesStream/CnpmcoreChangesStream.ts
Normal file
52
app/common/adapter/changesStream/CnpmcoreChangesStream.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { RegistryType } from '../../../common/enum/Registry';
|
||||
import { Registry } from '../../../core/entity/Registry';
|
||||
import { E500 } from 'egg-errors';
|
||||
import { AbstractChangeStream, RegistryChangesStream } from './AbstractChangesStream';
|
||||
|
||||
@SingletonProto()
|
||||
@RegistryChangesStream(RegistryType.Cnpmcore)
|
||||
export class CnpmcoreChangesStream extends AbstractChangeStream {
|
||||
|
||||
async getInitialSince(registry: Registry): Promise<string> {
|
||||
const db = (new URL(registry.changeStream)).origin;
|
||||
const { status, data } = await this.httpclient.request(db, {
|
||||
followRedirect: true,
|
||||
timeout: 10000,
|
||||
dataType: 'json',
|
||||
});
|
||||
if (!data.update_seq) {
|
||||
throw new E500(`get getInitialSince failed: ${data.update_seq}`);
|
||||
}
|
||||
const since = String(data.update_seq - 10);
|
||||
this.logger.warn('[NpmChangesStream.getInitialSince:firstSeq] GET %s status: %s, data: %j, since: %s',
|
||||
registry.name, status, data, since);
|
||||
return since;
|
||||
}
|
||||
|
||||
async* fetchChanges(registry: Registry, since: string) {
|
||||
const db = this.getChangesStreamUrl(registry, since);
|
||||
// json mode
|
||||
const { data } = await this.httpclient.request(db, {
|
||||
followRedirect: true,
|
||||
timeout: 30000,
|
||||
dataType: 'json',
|
||||
gzip: true,
|
||||
});
|
||||
|
||||
if (data.results?.length > 0) {
|
||||
for (const change of data.results) {
|
||||
const seq = String(change.seq);
|
||||
const fullname = change.id;
|
||||
// cnpmcore 默认返回 >= 需要做特殊判断
|
||||
if (seq && fullname && seq !== since) {
|
||||
const change = {
|
||||
fullname,
|
||||
seq,
|
||||
};
|
||||
yield change;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
65
app/common/adapter/changesStream/CnpmjsorgChangesStream.ts
Normal file
65
app/common/adapter/changesStream/CnpmjsorgChangesStream.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { RegistryType } from '../../../common/enum/Registry';
|
||||
import { Registry } from '../../../core/entity/Registry';
|
||||
import { E500 } from 'egg-errors';
|
||||
import { AbstractChangeStream, RegistryChangesStream } from './AbstractChangesStream';
|
||||
|
||||
const MAX_LIMIT = 10000;
|
||||
|
||||
@SingletonProto()
|
||||
@RegistryChangesStream(RegistryType.Cnpmjsorg)
|
||||
export class CnpmjsorgChangesStream extends AbstractChangeStream {
|
||||
|
||||
// cnpmjsorg 未实现 update_seq 字段
|
||||
// 默认返回当前时间戳字符串
|
||||
async getInitialSince(registry: Registry): Promise<string> {
|
||||
const since = String((new Date()).getTime());
|
||||
this.logger.warn(`[CnpmjsorgChangesStream.getInitialSince] since: ${since}, skip query ${registry.changeStream}`);
|
||||
return since;
|
||||
}
|
||||
|
||||
private async tryFetch(registry: Registry, since: string, limit = 1000) {
|
||||
if (limit > MAX_LIMIT) {
|
||||
throw new E500(`limit too large, current since: ${since}, limit: ${limit}`);
|
||||
}
|
||||
const db = this.getChangesStreamUrl(registry, since, limit);
|
||||
// json mode
|
||||
const res = await this.httpclient.request(db, {
|
||||
followRedirect: true,
|
||||
timeout: 30000,
|
||||
dataType: 'json',
|
||||
gzip: true,
|
||||
});
|
||||
const { results = [] } = res.data;
|
||||
if (results?.length >= limit) {
|
||||
const [ first ] = results;
|
||||
const last = results[results.length - 1];
|
||||
if (first.gmt_modified === last.gmt_modified) {
|
||||
return await this.tryFetch(registry, since, limit + 1000);
|
||||
}
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
async* fetchChanges(registry: Registry, since: string) {
|
||||
// ref: https://github.com/cnpm/cnpmjs.org/pull/1734
|
||||
// 由于 cnpmjsorg 无法计算准确的 seq
|
||||
// since 是一个时间戳,需要确保一次返回的结果中首尾两个 gmtModified 不相等
|
||||
const { data } = await this.tryFetch(registry, since);
|
||||
|
||||
if (data.results?.length > 0) {
|
||||
for (const change of data.results) {
|
||||
const seq = new Date(change.gmt_modified).getTime() + '';
|
||||
const fullname = change.id;
|
||||
if (seq && fullname && seq !== since) {
|
||||
const change = {
|
||||
fullname,
|
||||
seq,
|
||||
};
|
||||
yield change;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
55
app/common/adapter/changesStream/NpmChangesStream.ts
Normal file
55
app/common/adapter/changesStream/NpmChangesStream.ts
Normal file
@@ -0,0 +1,55 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { E500 } from 'egg-errors';
|
||||
import { RegistryType } from '../../../common/enum/Registry';
|
||||
import { Registry } from '../../../core/entity/Registry';
|
||||
import { AbstractChangeStream, ChangesStreamChange, RegistryChangesStream } from './AbstractChangesStream';
|
||||
|
||||
@SingletonProto()
|
||||
@RegistryChangesStream(RegistryType.Npm)
|
||||
export class NpmChangesStream extends AbstractChangeStream {
|
||||
|
||||
async getInitialSince(registry: Registry): Promise<string> {
|
||||
const db = (new URL(registry.changeStream)).origin;
|
||||
const { status, data } = await this.httpclient.request(db, {
|
||||
followRedirect: true,
|
||||
timeout: 10000,
|
||||
dataType: 'json',
|
||||
});
|
||||
const since = String(data.update_seq - 10);
|
||||
if (!data.update_seq) {
|
||||
throw new E500(`get getInitialSince failed: ${data.update_seq}`);
|
||||
}
|
||||
this.logger.warn('[NpmChangesStream.getInitialSince] GET %s status: %s, data: %j, since: %s',
|
||||
registry.name, registry.changeStream, status, data, since);
|
||||
return since;
|
||||
}
|
||||
|
||||
async* fetchChanges(registry: Registry, since: string) {
|
||||
const db = this.getChangesStreamUrl(registry, since);
|
||||
const { res } = await this.httpclient.request(db, {
|
||||
streaming: true,
|
||||
timeout: 10000,
|
||||
});
|
||||
|
||||
let buf = '';
|
||||
for await (const chunk of res) {
|
||||
const text = chunk.toString();
|
||||
const lines = text.split('\n');
|
||||
|
||||
for (const line of lines) {
|
||||
const content = buf + line;
|
||||
const match = /"seq":(\d+),"id":"([^"]+)"/g.exec(content);
|
||||
const seq = match?.[1];
|
||||
const fullname = match?.[2];
|
||||
if (seq && fullname) {
|
||||
buf = '';
|
||||
const change: ChangesStreamChange = { fullname, seq };
|
||||
yield change;
|
||||
} else {
|
||||
buf += line;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,2 +1,28 @@
|
||||
export const BUG_VERSIONS = 'bug-versions';
|
||||
export const LATEST_TAG = 'latest';
|
||||
export const GLOBAL_WORKER = 'GLOBAL_WORKER';
|
||||
export enum SyncMode {
|
||||
none = 'none',
|
||||
admin = 'admin',
|
||||
exist = 'exist',
|
||||
all = 'all',
|
||||
}
|
||||
export enum ChangesStreamMode {
|
||||
json = 'json',
|
||||
streaming = 'streaming',
|
||||
}
|
||||
export enum SyncDeleteMode {
|
||||
ignore = 'ignore',
|
||||
block = 'block',
|
||||
delete = 'delete',
|
||||
}
|
||||
|
||||
export enum PresetRegistryName {
|
||||
default = 'default',
|
||||
self = 'self',
|
||||
}
|
||||
|
||||
export enum PackageAccessLevel {
|
||||
write = 'write',
|
||||
read = 'read',
|
||||
}
|
||||
|
||||
16
app/common/enum/Binary.ts
Normal file
16
app/common/enum/Binary.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
export enum BinaryType {
|
||||
Api = 'api',
|
||||
Bucket = 'bucket',
|
||||
Cypress = 'cypress',
|
||||
Electron = 'electron',
|
||||
GitHub = 'github',
|
||||
Imagemin = 'imagemin',
|
||||
Node = 'node',
|
||||
NodePreGyp = 'nodePreGyp',
|
||||
Nwjs = 'nwjs',
|
||||
Playwright = 'playwright',
|
||||
Puppeteer = 'puppeteer',
|
||||
Prisma = 'prisma',
|
||||
Sqlcipher = 'sqlcipher',
|
||||
ChromeForTesting = 'chromeForTesting',
|
||||
}
|
||||
19
app/common/enum/Hook.ts
Normal file
19
app/common/enum/Hook.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
export enum HookType {
|
||||
Package = 'package',
|
||||
Scope = 'scope',
|
||||
Owner = 'owner',
|
||||
}
|
||||
|
||||
export enum HookEventType {
|
||||
Star = 'package:star',
|
||||
Unstar = 'package:unstar',
|
||||
Publish = 'package:publish',
|
||||
Unpublish = 'package:unpublish',
|
||||
Owner = 'package:owner',
|
||||
OwnerRm = 'package:owner-rm',
|
||||
DistTag = 'package:dist-tag',
|
||||
DistTagRm = 'package:dist-tag-rm',
|
||||
Deprecated = 'package:deprecated',
|
||||
Undeprecated = 'package:undeprecated',
|
||||
Change = 'package:change',
|
||||
}
|
||||
5
app/common/enum/Registry.ts
Normal file
5
app/common/enum/Registry.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
export enum RegistryType {
|
||||
Npm = 'npm',
|
||||
Cnpmcore = 'cnpmcore',
|
||||
Cnpmjsorg = 'cnpmjsorg',
|
||||
}
|
||||
@@ -2,6 +2,8 @@ export enum TaskType {
|
||||
SyncPackage = 'sync_package',
|
||||
ChangesStream = 'changes_stream',
|
||||
SyncBinary = 'sync_binary',
|
||||
CreateHook = 'create_hook',
|
||||
TriggerHook = 'trigger_hook',
|
||||
}
|
||||
|
||||
export enum TaskState {
|
||||
|
||||
@@ -3,3 +3,9 @@ export enum LoginResultCode {
|
||||
Success,
|
||||
Fail,
|
||||
}
|
||||
|
||||
export enum WanStatusCode {
|
||||
UserNotFound,
|
||||
Unbound,
|
||||
Bound,
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { CnpmcoreConfig } from '../port/config';
|
||||
import { Readable } from 'stream';
|
||||
import { IncomingHttpHeaders } from 'http';
|
||||
import { EggContext } from '@eggjs/tegg';
|
||||
|
||||
export interface UploadResult {
|
||||
key: string;
|
||||
@@ -20,6 +22,10 @@ export interface AppendOptions {
|
||||
headers?: IncomingHttpHeaders,
|
||||
}
|
||||
|
||||
export interface DownloadOptions {
|
||||
timeout: number;
|
||||
}
|
||||
|
||||
export interface NFSClient {
|
||||
uploadBytes(bytes: Uint8Array, options: UploadOptions): Promise<UploadResult>;
|
||||
|
||||
@@ -33,5 +39,36 @@ export interface NFSClient {
|
||||
|
||||
createDownloadStream(key: string): Promise<Readable | undefined>;
|
||||
|
||||
download(key: string, filepath: string, options: DownloadOptions): Promise<void>;
|
||||
|
||||
url?(key: string): string;
|
||||
}
|
||||
|
||||
export interface QueueAdapter {
|
||||
push<T>(key: string, item: T): Promise<boolean>;
|
||||
pop<T>(key: string): Promise<T | null>;
|
||||
length(key: string): Promise<number>;
|
||||
}
|
||||
|
||||
export interface AuthUrlResult {
|
||||
loginUrl: string;
|
||||
doneUrl: string;
|
||||
}
|
||||
|
||||
export interface userResult {
|
||||
name: string;
|
||||
email: string;
|
||||
}
|
||||
export interface AuthClient {
|
||||
getAuthUrl(ctx: EggContext): Promise<AuthUrlResult>;
|
||||
ensureCurrentUser(): Promise<userResult | null>;
|
||||
}
|
||||
|
||||
declare module 'egg' {
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore
|
||||
// avoid TS2310 Type 'EggAppConfig' recursively references itself as a base type.
|
||||
interface EggAppConfig {
|
||||
cnpmcore: CnpmcoreConfig;
|
||||
}
|
||||
}
|
||||
|
||||
61
app/core/entity/Hook.ts
Normal file
61
app/core/entity/Hook.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { HookType } from '../../common/enum/Hook';
|
||||
import crypto from 'crypto';
|
||||
|
||||
export type CreateHookData = Omit<EasyData<HookData, 'hookId'>, 'enable' | 'latestTaskId'>;
|
||||
|
||||
export interface HookData extends EntityData {
|
||||
hookId: string;
|
||||
type: HookType;
|
||||
ownerId: string;
|
||||
name: string;
|
||||
endpoint: string;
|
||||
secret: string;
|
||||
latestTaskId?: string;
|
||||
enable: boolean;
|
||||
}
|
||||
|
||||
export class Hook extends Entity {
|
||||
readonly hookId: string;
|
||||
readonly type: HookType;
|
||||
readonly ownerId: string;
|
||||
readonly name: string;
|
||||
endpoint: string;
|
||||
secret: string;
|
||||
enable: boolean;
|
||||
latestTaskId?: string;
|
||||
|
||||
constructor(data: HookData) {
|
||||
super(data);
|
||||
this.hookId = data.hookId;
|
||||
this.type = data.type;
|
||||
this.ownerId = data.ownerId;
|
||||
this.name = data.name;
|
||||
this.endpoint = data.endpoint;
|
||||
this.secret = data.secret;
|
||||
this.latestTaskId = data.latestTaskId;
|
||||
this.enable = data.enable;
|
||||
}
|
||||
|
||||
static create(data: CreateHookData): Hook {
|
||||
const hookData: EasyData<HookData, 'hookId'> = Object.assign({}, data, {
|
||||
enable: true,
|
||||
latestTaskId: undefined,
|
||||
});
|
||||
const newData = EntityUtil.defaultData(hookData, 'hookId');
|
||||
return new Hook(newData);
|
||||
}
|
||||
|
||||
// payload 可能会特别大,如果做多次 stringify 浪费太多 cpu
|
||||
signPayload(payload: object): { digest, payloadStr } {
|
||||
const payloadStr = JSON.stringify(payload);
|
||||
const digest = crypto.createHmac('sha256', this.secret)
|
||||
.update(JSON.stringify(payload))
|
||||
.digest('hex');
|
||||
return {
|
||||
digest,
|
||||
payloadStr,
|
||||
};
|
||||
}
|
||||
}
|
||||
93
app/core/entity/HookEvent.ts
Normal file
93
app/core/entity/HookEvent.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
import { HookEventType } from '../../common/enum/Hook';
|
||||
|
||||
export interface PublishChangePayload {
|
||||
'dist-tag'?: string;
|
||||
version: string;
|
||||
}
|
||||
|
||||
export interface UnpublishChangePayload {
|
||||
'dist-tag'?: string;
|
||||
version?: string;
|
||||
}
|
||||
|
||||
export interface DistTagChangePayload {
|
||||
'dist-tag': string;
|
||||
}
|
||||
|
||||
export interface PackageOwnerPayload {
|
||||
maintainer: string;
|
||||
}
|
||||
|
||||
export interface DeprecatedChangePayload {
|
||||
deprecated: string;
|
||||
}
|
||||
|
||||
export class HookEvent<T = object> {
|
||||
readonly changeId: string;
|
||||
readonly event: HookEventType;
|
||||
readonly fullname: string;
|
||||
readonly type: 'package';
|
||||
readonly version: '1.0.0';
|
||||
readonly change: T;
|
||||
readonly time: number;
|
||||
|
||||
constructor(event: HookEventType, changeId: string, fullname: string, change: T) {
|
||||
this.changeId = changeId;
|
||||
this.event = event;
|
||||
this.fullname = fullname;
|
||||
this.type = 'package';
|
||||
this.version = '1.0.0';
|
||||
this.change = change;
|
||||
this.time = Date.now();
|
||||
}
|
||||
|
||||
static createPublishEvent(fullname: string, changeId: string, version: string, distTag?: string): HookEvent<PublishChangePayload> {
|
||||
return new HookEvent(HookEventType.Publish, changeId, fullname, {
|
||||
'dist-tag': distTag,
|
||||
version,
|
||||
});
|
||||
}
|
||||
|
||||
static createUnpublishEvent(fullname: string, changeId: string, version?: string, distTag?: string): HookEvent<UnpublishChangePayload> {
|
||||
return new HookEvent(HookEventType.Unpublish, changeId, fullname, {
|
||||
'dist-tag': distTag,
|
||||
version,
|
||||
});
|
||||
}
|
||||
|
||||
static createOwnerEvent(fullname: string, changeId: string, maintainer: string): HookEvent<PackageOwnerPayload> {
|
||||
return new HookEvent(HookEventType.Owner, changeId, fullname, {
|
||||
maintainer,
|
||||
});
|
||||
}
|
||||
|
||||
static createOwnerRmEvent(fullname: string, changeId: string, maintainer: string): HookEvent<PackageOwnerPayload> {
|
||||
return new HookEvent(HookEventType.OwnerRm, changeId, fullname, {
|
||||
maintainer,
|
||||
});
|
||||
}
|
||||
|
||||
static createDistTagEvent(fullname: string, changeId: string, distTag: string): HookEvent<DistTagChangePayload> {
|
||||
return new HookEvent(HookEventType.DistTag, changeId, fullname, {
|
||||
'dist-tag': distTag,
|
||||
});
|
||||
}
|
||||
|
||||
static createDistTagRmEvent(fullname: string, changeId: string, distTag: string): HookEvent<DistTagChangePayload> {
|
||||
return new HookEvent(HookEventType.DistTagRm, changeId, fullname, {
|
||||
'dist-tag': distTag,
|
||||
});
|
||||
}
|
||||
|
||||
static createDeprecatedEvent(fullname: string, changeId: string, deprecated: string): HookEvent<DeprecatedChangePayload> {
|
||||
return new HookEvent(HookEventType.Deprecated, changeId, fullname, {
|
||||
deprecated,
|
||||
});
|
||||
}
|
||||
|
||||
static createUndeprecatedEvent(fullname: string, changeId: string, deprecated: string): HookEvent<DeprecatedChangePayload> {
|
||||
return new HookEvent(HookEventType.Undeprecated, changeId, fullname, {
|
||||
deprecated,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -11,6 +11,7 @@ interface PackageData extends EntityData {
|
||||
description: string;
|
||||
abbreviatedsDist?: Dist;
|
||||
manifestsDist?: Dist;
|
||||
registryId?: string;
|
||||
}
|
||||
|
||||
export enum DIST_NAMES {
|
||||
@@ -36,6 +37,7 @@ export class Package extends Entity {
|
||||
description: string;
|
||||
abbreviatedsDist?: Dist;
|
||||
manifestsDist?: Dist;
|
||||
registryId?: string;
|
||||
|
||||
constructor(data: PackageData) {
|
||||
super(data);
|
||||
@@ -46,6 +48,7 @@ export class Package extends Entity {
|
||||
this.description = data.description;
|
||||
this.abbreviatedsDist = data.abbreviatedsDist;
|
||||
this.manifestsDist = data.manifestsDist;
|
||||
this.registryId = data.registryId;
|
||||
}
|
||||
|
||||
static create(data: EasyData<PackageData, 'packageId'>): Package {
|
||||
@@ -81,6 +84,11 @@ export class Package extends Entity {
|
||||
return this.createDist(DIST_NAMES.ABBREVIATED_MANIFESTS, info);
|
||||
}
|
||||
|
||||
createPackageVersionFile(path: string, version: string, info: FileInfo) {
|
||||
// path should starts with `/`, e.g.: '/foo/bar/index.js'
|
||||
return this.createDist(`files${path}`, info, version);
|
||||
}
|
||||
|
||||
private distDir(filename: string, version?: string) {
|
||||
if (version) {
|
||||
return `/packages/${this.fullname}/${version}/${filename}`;
|
||||
|
||||
43
app/core/entity/PackageVersionFile.ts
Normal file
43
app/core/entity/PackageVersionFile.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { Dist } from './Dist';
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
|
||||
interface PackageVersionFileData extends EntityData {
|
||||
packageVersionFileId: string;
|
||||
packageVersionId: string;
|
||||
dist: Dist;
|
||||
directory: string;
|
||||
name: string;
|
||||
contentType: string;
|
||||
mtime: Date;
|
||||
}
|
||||
|
||||
export class PackageVersionFile extends Entity {
|
||||
packageVersionFileId: string;
|
||||
packageVersionId: string;
|
||||
dist: Dist;
|
||||
directory: string;
|
||||
name: string;
|
||||
contentType: string;
|
||||
mtime: Date;
|
||||
|
||||
constructor(data: PackageVersionFileData) {
|
||||
super(data);
|
||||
this.packageVersionFileId = data.packageVersionFileId;
|
||||
this.packageVersionId = data.packageVersionId;
|
||||
this.dist = data.dist;
|
||||
this.directory = data.directory;
|
||||
this.name = data.name;
|
||||
this.contentType = data.contentType;
|
||||
this.mtime = data.mtime;
|
||||
}
|
||||
|
||||
get path() {
|
||||
return this.directory === '/' ? `/${this.name}` : `${this.directory}/${this.name}`;
|
||||
}
|
||||
|
||||
static create(data: EasyData<PackageVersionFileData, 'packageVersionFileId'>): PackageVersionFile {
|
||||
const newData = EntityUtil.defaultData(data, 'packageVersionFileId');
|
||||
return new PackageVersionFile(newData);
|
||||
}
|
||||
}
|
||||
38
app/core/entity/Registry.ts
Normal file
38
app/core/entity/Registry.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import type { RegistryType } from '../../common/enum/Registry';
|
||||
|
||||
interface RegistryData extends EntityData {
|
||||
name: string;
|
||||
registryId: string;
|
||||
host: string;
|
||||
changeStream: string;
|
||||
userPrefix: string;
|
||||
type: RegistryType;
|
||||
}
|
||||
|
||||
export type CreateRegistryData = Omit<EasyData<RegistryData, 'registryId'>, 'id'>;
|
||||
|
||||
export class Registry extends Entity {
|
||||
name: string;
|
||||
registryId: string;
|
||||
host: string;
|
||||
changeStream: string;
|
||||
userPrefix: string;
|
||||
type: RegistryType;
|
||||
|
||||
constructor(data: RegistryData) {
|
||||
super(data);
|
||||
this.name = data.name;
|
||||
this.registryId = data.registryId;
|
||||
this.host = data.host;
|
||||
this.changeStream = data.changeStream;
|
||||
this.userPrefix = data.userPrefix;
|
||||
this.type = data.type;
|
||||
}
|
||||
|
||||
public static create(data: CreateRegistryData): Registry {
|
||||
const newData = EntityUtil.defaultData(data, 'registryId');
|
||||
return new Registry(newData);
|
||||
}
|
||||
}
|
||||
28
app/core/entity/Scope.ts
Normal file
28
app/core/entity/Scope.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
|
||||
interface ScopeData extends EntityData {
|
||||
name: string;
|
||||
scopeId: string;
|
||||
registryId: string;
|
||||
}
|
||||
|
||||
export type CreateScopeData = Omit<EasyData<ScopeData, 'scopeId'>, 'id'>;
|
||||
|
||||
export class Scope extends Entity {
|
||||
name: string;
|
||||
registryId: string;
|
||||
scopeId: string;
|
||||
|
||||
constructor(data: ScopeData) {
|
||||
super(data);
|
||||
this.name = data.name;
|
||||
this.registryId = data.registryId;
|
||||
this.scopeId = data.scopeId;
|
||||
}
|
||||
|
||||
static create(data: CreateScopeData): Scope {
|
||||
const newData = EntityUtil.defaultData(data, 'scopeId');
|
||||
return new Scope(newData);
|
||||
}
|
||||
}
|
||||
@@ -4,43 +4,94 @@ import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { TaskType, TaskState } from '../../common/enum/Task';
|
||||
import dayjs from '../../common/dayjs';
|
||||
import { HookEvent } from './HookEvent';
|
||||
|
||||
interface TaskData extends EntityData {
|
||||
export const HOST_NAME = os.hostname();
|
||||
export const PID = process.pid;
|
||||
|
||||
export interface TaskBaseData {
|
||||
taskWorker: string;
|
||||
}
|
||||
|
||||
export interface TaskData<T = TaskBaseData> extends EntityData {
|
||||
taskId: string;
|
||||
type: TaskType;
|
||||
state: TaskState;
|
||||
targetName: string;
|
||||
authorId: string;
|
||||
authorIp: string;
|
||||
data: any;
|
||||
data: T;
|
||||
logPath?: string;
|
||||
logStorePosition?: string;
|
||||
attempts?: number;
|
||||
error?: string;
|
||||
bizId?: string;
|
||||
}
|
||||
|
||||
export type SyncPackageTaskOptions = {
|
||||
authorId?: string;
|
||||
authorIp?: string;
|
||||
remoteAuthToken?: string;
|
||||
tips?: string;
|
||||
skipDependencies?: boolean;
|
||||
syncDownloadData?: boolean;
|
||||
// force sync history version
|
||||
forceSyncHistory?: boolean;
|
||||
registryId?: string;
|
||||
};
|
||||
|
||||
export class Task extends Entity {
|
||||
export interface CreateHookTaskData extends TaskBaseData {
|
||||
hookEvent: HookEvent;
|
||||
}
|
||||
|
||||
export interface TriggerHookTaskData extends TaskBaseData {
|
||||
hookEvent: HookEvent;
|
||||
hookId: string;
|
||||
responseStatus?: number;
|
||||
}
|
||||
|
||||
export interface CreateSyncPackageTaskData extends TaskBaseData {
|
||||
remoteAuthToken?: string;
|
||||
tips?: string;
|
||||
skipDependencies?: boolean;
|
||||
syncDownloadData?: boolean;
|
||||
forceSyncHistory?: boolean;
|
||||
}
|
||||
|
||||
export interface ChangesStreamTaskData extends TaskBaseData {
|
||||
since: string;
|
||||
last_package?: string,
|
||||
last_package_created?: Date,
|
||||
task_count?: number,
|
||||
registryId?: string,
|
||||
}
|
||||
|
||||
export interface TaskUpdateCondition {
|
||||
taskId: string;
|
||||
attempts: number;
|
||||
}
|
||||
|
||||
export type CreateHookTask = Task<CreateHookTaskData>;
|
||||
export type TriggerHookTask = Task<TriggerHookTaskData>;
|
||||
export type CreateSyncPackageTask = Task<CreateSyncPackageTaskData>;
|
||||
export type ChangesStreamTask = Task<ChangesStreamTaskData>;
|
||||
|
||||
export class Task<T extends TaskBaseData = TaskBaseData> extends Entity {
|
||||
taskId: string;
|
||||
type: TaskType;
|
||||
state: TaskState;
|
||||
targetName: string;
|
||||
taskWorker: string;
|
||||
authorId: string;
|
||||
authorIp: string;
|
||||
data: any;
|
||||
data: T;
|
||||
logPath: string;
|
||||
logStorePosition: string;
|
||||
attempts: number;
|
||||
error: string;
|
||||
bizId?: string;
|
||||
|
||||
constructor(data: TaskData) {
|
||||
constructor(data: TaskData<T>) {
|
||||
super(data);
|
||||
this.taskId = data.taskId;
|
||||
this.type = data.type;
|
||||
@@ -53,6 +104,7 @@ export class Task extends Entity {
|
||||
this.logStorePosition = data.logStorePosition ?? '';
|
||||
this.attempts = data.attempts ?? 0;
|
||||
this.error = data.error ?? '';
|
||||
this.bizId = data.bizId;
|
||||
}
|
||||
|
||||
public resetLogPath() {
|
||||
@@ -61,15 +113,15 @@ export class Task extends Entity {
|
||||
}
|
||||
|
||||
public setExecuteWorker() {
|
||||
this.data.taskWorker = `${os.hostname()}:${process.pid}`;
|
||||
this.data.taskWorker = `${HOST_NAME}:${PID}`;
|
||||
}
|
||||
|
||||
private static create(data: EasyData<TaskData, 'taskId'>): Task {
|
||||
private static create<T extends TaskBaseData>(data: EasyData<TaskData<T>, 'taskId'>): Task<T> {
|
||||
const newData = EntityUtil.defaultData(data, 'taskId');
|
||||
return new Task(newData);
|
||||
}
|
||||
|
||||
public static createSyncPackage(fullname: string, options?: SyncPackageTaskOptions): Task {
|
||||
public static createSyncPackage(fullname: string, options?: SyncPackageTaskOptions): CreateSyncPackageTask {
|
||||
const data = {
|
||||
type: TaskType.SyncPackage,
|
||||
state: TaskState.Waiting,
|
||||
@@ -79,9 +131,12 @@ export class Task extends Entity {
|
||||
data: {
|
||||
// task execute worker
|
||||
taskWorker: '',
|
||||
remoteAuthToken: options?.remoteAuthToken,
|
||||
tips: options?.tips,
|
||||
registryId: options?.registryId ?? '',
|
||||
skipDependencies: options?.skipDependencies,
|
||||
syncDownloadData: options?.syncDownloadData,
|
||||
forceSyncHistory: options?.forceSyncHistory,
|
||||
},
|
||||
};
|
||||
const task = this.create(data);
|
||||
@@ -89,20 +144,72 @@ export class Task extends Entity {
|
||||
return task;
|
||||
}
|
||||
|
||||
public static createChangesStream(targetName: string): Task {
|
||||
public static createChangesStream(targetName: string, registryId = '', since = ''): ChangesStreamTask {
|
||||
const data = {
|
||||
type: TaskType.ChangesStream,
|
||||
state: TaskState.Waiting,
|
||||
targetName,
|
||||
authorId: `pid_${PID}`,
|
||||
authorIp: HOST_NAME,
|
||||
data: {
|
||||
// task execute worker
|
||||
taskWorker: '',
|
||||
registryId,
|
||||
since,
|
||||
},
|
||||
};
|
||||
return this.create(data) as ChangesStreamTask;
|
||||
}
|
||||
|
||||
public updateSyncData({ lastSince, taskCount, lastPackage }: SyncInfo) {
|
||||
const syncData = this.data as unknown as ChangesStreamTaskData;
|
||||
// 更新任务记录信息
|
||||
syncData.since = lastSince;
|
||||
syncData.task_count = (syncData.task_count || 0) + taskCount;
|
||||
|
||||
if (taskCount > 0) {
|
||||
syncData.last_package = lastPackage;
|
||||
syncData.last_package_created = new Date();
|
||||
}
|
||||
}
|
||||
|
||||
public static createCreateHookTask(hookEvent: HookEvent): CreateHookTask {
|
||||
const data = {
|
||||
type: TaskType.CreateHook,
|
||||
state: TaskState.Waiting,
|
||||
targetName: hookEvent.fullname,
|
||||
authorId: `pid_${process.pid}`,
|
||||
authorIp: os.hostname(),
|
||||
bizId: `CreateHook:${hookEvent.changeId}`,
|
||||
data: {
|
||||
// task execute worker
|
||||
taskWorker: '',
|
||||
hookEvent,
|
||||
},
|
||||
};
|
||||
const task = this.create(data);
|
||||
task.logPath = `/packages/${hookEvent.fullname}/hooks/${dayjs().format('YYYY/MM/DDHHmm')}-${task.taskId}.log`;
|
||||
return task;
|
||||
}
|
||||
|
||||
public static createTriggerHookTask(hookEvent: HookEvent, hookId: string): TriggerHookTask {
|
||||
const data = {
|
||||
type: TaskType.TriggerHook,
|
||||
state: TaskState.Waiting,
|
||||
targetName: hookEvent.fullname,
|
||||
authorId: `pid_${process.pid}`,
|
||||
bizId: `TriggerHook:${hookEvent.changeId}:${hookId}`,
|
||||
authorIp: os.hostname(),
|
||||
data: {
|
||||
// task execute worker
|
||||
taskWorker: '',
|
||||
since: '',
|
||||
hookEvent,
|
||||
hookId,
|
||||
},
|
||||
};
|
||||
return this.create(data);
|
||||
const task = this.create(data);
|
||||
task.logPath = `/packages/${hookEvent.fullname}/hooks/${dayjs().format('YYYY/MM/DDHHmm')}-${task.taskId}.log`;
|
||||
return task;
|
||||
}
|
||||
|
||||
public static createSyncBinary(targetName: string, lastData: any): Task {
|
||||
@@ -110,8 +217,9 @@ export class Task extends Entity {
|
||||
type: TaskType.SyncBinary,
|
||||
state: TaskState.Waiting,
|
||||
targetName,
|
||||
authorId: `pid_${process.pid}`,
|
||||
authorIp: os.hostname(),
|
||||
authorId: `pid_${PID}`,
|
||||
authorIp: HOST_NAME,
|
||||
bizId: `SyncBinary:${targetName}`,
|
||||
data: {
|
||||
// task execute worker
|
||||
taskWorker: '',
|
||||
@@ -122,4 +230,21 @@ export class Task extends Entity {
|
||||
task.logPath = `/binaries/${targetName}/syncs/${dayjs().format('YYYY/MM/DDHHmm')}-${task.taskId}.log`;
|
||||
return task;
|
||||
}
|
||||
|
||||
start(): TaskUpdateCondition {
|
||||
const condition = {
|
||||
taskId: this.taskId,
|
||||
attempts: this.attempts,
|
||||
};
|
||||
this.setExecuteWorker();
|
||||
this.state = TaskState.Processing;
|
||||
this.attempts += 1;
|
||||
return condition;
|
||||
}
|
||||
}
|
||||
|
||||
export type SyncInfo = {
|
||||
lastSince: string;
|
||||
taskCount: number;
|
||||
lastPackage?: string;
|
||||
};
|
||||
|
||||
@@ -1,14 +1,37 @@
|
||||
import dayjs from 'dayjs';
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
|
||||
interface TokenData extends EntityData {
|
||||
export enum TokenType {
|
||||
granular = 'granular',
|
||||
classic = 'classic',
|
||||
}
|
||||
interface BaseTokenData extends EntityData {
|
||||
tokenId: string;
|
||||
tokenMark: string;
|
||||
tokenKey: string;
|
||||
cidrWhitelist: string[];
|
||||
cidrWhitelist?: string[];
|
||||
userId: string;
|
||||
isReadonly: boolean;
|
||||
isAutomation: boolean;
|
||||
isReadonly?: boolean;
|
||||
type?: TokenType;
|
||||
}
|
||||
|
||||
interface ClassicTokenData extends BaseTokenData{
|
||||
isAutomation?: boolean;
|
||||
}
|
||||
interface GranularTokenData extends BaseTokenData {
|
||||
name: string;
|
||||
description?: string;
|
||||
allowedScopes?: string[];
|
||||
allowedPackages?: string[];
|
||||
expires: number;
|
||||
expiredAt: Date;
|
||||
}
|
||||
|
||||
type TokenData = ClassicTokenData | GranularTokenData;
|
||||
|
||||
export function isGranularToken(data: TokenData): data is GranularTokenData {
|
||||
return data.type === TokenType.granular;
|
||||
}
|
||||
|
||||
export class Token extends Entity {
|
||||
@@ -19,6 +42,13 @@ export class Token extends Entity {
|
||||
readonly userId: string;
|
||||
readonly isReadonly: boolean;
|
||||
readonly isAutomation: boolean;
|
||||
readonly type?: TokenType;
|
||||
readonly name?: string;
|
||||
readonly description?: string;
|
||||
readonly allowedScopes?: string[];
|
||||
readonly expiredAt?: Date;
|
||||
readonly expires?: number;
|
||||
allowedPackages?: string[];
|
||||
token?: string;
|
||||
|
||||
constructor(data: TokenData) {
|
||||
@@ -27,13 +57,27 @@ export class Token extends Entity {
|
||||
this.tokenId = data.tokenId;
|
||||
this.tokenMark = data.tokenMark;
|
||||
this.tokenKey = data.tokenKey;
|
||||
this.cidrWhitelist = data.cidrWhitelist;
|
||||
this.isReadonly = data.isReadonly;
|
||||
this.isAutomation = data.isAutomation;
|
||||
this.cidrWhitelist = data.cidrWhitelist || [];
|
||||
this.isReadonly = data.isReadonly || false;
|
||||
this.type = data.type || TokenType.classic;
|
||||
|
||||
if (isGranularToken(data)) {
|
||||
this.name = data.name;
|
||||
this.description = data.description;
|
||||
this.allowedScopes = data.allowedScopes;
|
||||
this.expiredAt = data.expiredAt;
|
||||
this.allowedPackages = data.allowedPackages;
|
||||
} else {
|
||||
this.isAutomation = data.isAutomation || false;
|
||||
}
|
||||
}
|
||||
|
||||
static create(data: EasyData<TokenData, 'tokenId'>): Token {
|
||||
const newData = EntityUtil.defaultData(data, 'tokenId');
|
||||
if (isGranularToken(newData) && !newData.expiredAt) {
|
||||
newData.expiredAt = dayjs(newData.createdAt).add(newData.expires, 'days').toDate();
|
||||
}
|
||||
return new Token(newData);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { cleanUserPrefix } from '../../common/PackageUtil';
|
||||
|
||||
interface UserData extends EntityData {
|
||||
userId: string;
|
||||
@@ -15,6 +16,7 @@ interface UserData extends EntityData {
|
||||
export class User extends Entity {
|
||||
userId: string;
|
||||
name: string;
|
||||
displayName: string;
|
||||
email: string;
|
||||
passwordSalt: string;
|
||||
passwordIntegrity: string;
|
||||
@@ -26,6 +28,7 @@ export class User extends Entity {
|
||||
super(data);
|
||||
this.userId = data.userId;
|
||||
this.name = data.name;
|
||||
this.displayName = cleanUserPrefix(this.name);
|
||||
this.email = data.email;
|
||||
this.passwordSalt = data.passwordSalt;
|
||||
this.passwordIntegrity = data.passwordIntegrity;
|
||||
|
||||
32
app/core/entity/WebauthnCredential.ts
Normal file
32
app/core/entity/WebauthnCredential.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
|
||||
interface WebauthnCredentialData extends EntityData {
|
||||
wancId: string;
|
||||
userId: string;
|
||||
credentialId: string;
|
||||
publicKey: string;
|
||||
browserType?: string;
|
||||
}
|
||||
|
||||
export class WebauthnCredential extends Entity {
|
||||
wancId: string;
|
||||
userId: string;
|
||||
credentialId: string;
|
||||
publicKey: string;
|
||||
browserType?: string;
|
||||
|
||||
constructor(data: WebauthnCredentialData) {
|
||||
super(data);
|
||||
this.wancId = data.wancId;
|
||||
this.userId = data.userId;
|
||||
this.credentialId = data.credentialId;
|
||||
this.publicKey = data.publicKey;
|
||||
this.browserType = data.browserType;
|
||||
}
|
||||
|
||||
static create(data: EasyData<WebauthnCredentialData, 'wancId'>): WebauthnCredential {
|
||||
const newData = EntityUtil.defaultData(data, 'wancId');
|
||||
return new WebauthnCredential(newData);
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
import { EggAppConfig } from 'egg';
|
||||
import { Event, Inject } from '@eggjs/tegg';
|
||||
import {
|
||||
PACKAGE_UNPUBLISHED,
|
||||
@@ -8,83 +9,141 @@ import {
|
||||
PACKAGE_TAG_REMOVED,
|
||||
PACKAGE_MAINTAINER_CHANGED,
|
||||
PACKAGE_MAINTAINER_REMOVED,
|
||||
PACKAGE_META_CHANGED,
|
||||
PACKAGE_META_CHANGED, PackageMetaChange,
|
||||
} from './index';
|
||||
import { ChangeRepository } from '../../repository/ChangeRepository';
|
||||
import { Change } from '../entity/Change';
|
||||
import { HookEvent } from '../entity/HookEvent';
|
||||
import { Task } from '../entity/Task';
|
||||
import { User } from '../entity/User';
|
||||
import { TaskService } from '../service/TaskService';
|
||||
|
||||
class ChangesStreamEvent {
|
||||
@Inject()
|
||||
private readonly changeRepository: ChangeRepository;
|
||||
|
||||
protected async addChange(type: string, fullname: string, data: object) {
|
||||
await this.changeRepository.addChange(Change.create({
|
||||
@Inject()
|
||||
protected readonly taskService: TaskService;
|
||||
|
||||
@Inject()
|
||||
protected readonly config: EggAppConfig;
|
||||
|
||||
protected get hookEnable() {
|
||||
return this.config.hookEnable;
|
||||
}
|
||||
|
||||
protected async addChange(type: string, fullname: string, data: object): Promise<Change> {
|
||||
const change = Change.create({
|
||||
type,
|
||||
targetName: fullname,
|
||||
data,
|
||||
}));
|
||||
});
|
||||
await this.changeRepository.addChange(change);
|
||||
return change;
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_UNPUBLISHED)
|
||||
export class PackageUnpublished extends ChangesStreamEvent {
|
||||
async handle(fullname: string) {
|
||||
await this.addChange(PACKAGE_UNPUBLISHED, fullname, {});
|
||||
const change = await this.addChange(PACKAGE_UNPUBLISHED, fullname, {});
|
||||
if (this.hookEnable) {
|
||||
const task = Task.createCreateHookTask(HookEvent.createUnpublishEvent(fullname, change.changeId));
|
||||
await this.taskService.createTask(task, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_VERSION_ADDED)
|
||||
export class PackageVersionAdded extends ChangesStreamEvent {
|
||||
async handle(fullname: string, version: string) {
|
||||
await this.addChange(PACKAGE_VERSION_ADDED, fullname, { version });
|
||||
async handle(fullname: string, version: string, tag?: string) {
|
||||
const change = await this.addChange(PACKAGE_VERSION_ADDED, fullname, { version });
|
||||
if (this.hookEnable) {
|
||||
const task = Task.createCreateHookTask(HookEvent.createPublishEvent(fullname, change.changeId, version, tag));
|
||||
await this.taskService.createTask(task, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_VERSION_REMOVED)
|
||||
export class PackageVersionRemoved extends ChangesStreamEvent {
|
||||
async handle(fullname: string, version: string) {
|
||||
await this.addChange(PACKAGE_VERSION_REMOVED, fullname, { version });
|
||||
async handle(fullname: string, version: string, tag?: string) {
|
||||
const change = await this.addChange(PACKAGE_VERSION_REMOVED, fullname, { version });
|
||||
if (this.hookEnable) {
|
||||
const task = Task.createCreateHookTask(HookEvent.createUnpublishEvent(fullname, change.changeId, version, tag));
|
||||
await this.taskService.createTask(task, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_TAG_ADDED)
|
||||
export class PackageTagAdded extends ChangesStreamEvent {
|
||||
async handle(fullname: string, tag: string) {
|
||||
await this.addChange(PACKAGE_TAG_ADDED, fullname, { tag });
|
||||
const change = await this.addChange(PACKAGE_TAG_ADDED, fullname, { tag });
|
||||
if (this.hookEnable) {
|
||||
const task = Task.createCreateHookTask(HookEvent.createDistTagEvent(fullname, change.changeId, tag));
|
||||
await this.taskService.createTask(task, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_TAG_CHANGED)
|
||||
export class PackageTagChanged extends ChangesStreamEvent {
|
||||
async handle(fullname: string, tag: string) {
|
||||
await this.addChange(PACKAGE_TAG_CHANGED, fullname, { tag });
|
||||
const change = await this.addChange(PACKAGE_TAG_CHANGED, fullname, { tag });
|
||||
if (this.hookEnable) {
|
||||
const task = Task.createCreateHookTask(HookEvent.createDistTagEvent(fullname, change.changeId, tag));
|
||||
await this.taskService.createTask(task, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_TAG_REMOVED)
|
||||
export class PackageTagRemoved extends ChangesStreamEvent {
|
||||
async handle(fullname: string, tag: string) {
|
||||
await this.addChange(PACKAGE_TAG_REMOVED, fullname, { tag });
|
||||
const change = await this.addChange(PACKAGE_TAG_REMOVED, fullname, { tag });
|
||||
if (this.hookEnable) {
|
||||
const task = Task.createCreateHookTask(HookEvent.createDistTagRmEvent(fullname, change.changeId, tag));
|
||||
await this.taskService.createTask(task, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_MAINTAINER_CHANGED)
|
||||
export class PackageMaintainerChanged extends ChangesStreamEvent {
|
||||
async handle(fullname: string) {
|
||||
await this.addChange(PACKAGE_MAINTAINER_CHANGED, fullname, {});
|
||||
async handle(fullname: string, maintainers: User[]) {
|
||||
const change = await this.addChange(PACKAGE_MAINTAINER_CHANGED, fullname, {});
|
||||
// TODO 应该比较差值,而不是全量推送
|
||||
if (this.hookEnable) {
|
||||
for (const maintainer of maintainers) {
|
||||
const task = Task.createCreateHookTask(HookEvent.createOwnerEvent(fullname, change.changeId, maintainer.name));
|
||||
await this.taskService.createTask(task, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_MAINTAINER_REMOVED)
|
||||
export class PackageMaintainerRemoved extends ChangesStreamEvent {
|
||||
async handle(fullname: string, maintainer: string) {
|
||||
await this.addChange(PACKAGE_MAINTAINER_REMOVED, fullname, { maintainer });
|
||||
const change = await this.addChange(PACKAGE_MAINTAINER_REMOVED, fullname, { maintainer });
|
||||
if (this.hookEnable) {
|
||||
const task = Task.createCreateHookTask(HookEvent.createOwnerRmEvent(fullname, change.changeId, maintainer));
|
||||
await this.taskService.createTask(task, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_META_CHANGED)
|
||||
export class PackageMetaChanged extends ChangesStreamEvent {
|
||||
async handle(fullname: string, meta: object) {
|
||||
await this.addChange(PACKAGE_META_CHANGED, fullname, { ...meta });
|
||||
async handle(fullname: string, meta: PackageMetaChange) {
|
||||
const change = await this.addChange(PACKAGE_META_CHANGED, fullname, { ...meta });
|
||||
const { deprecateds } = meta;
|
||||
if (this.hookEnable) {
|
||||
for (const deprecated of deprecateds || []) {
|
||||
const task = Task.createCreateHookTask(HookEvent.createDeprecatedEvent(fullname, change.changeId, deprecated.version));
|
||||
await this.taskService.createTask(task, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
35
app/core/event/SyncPackageVersionFile.ts
Normal file
35
app/core/event/SyncPackageVersionFile.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import { Event, Inject } from '@eggjs/tegg';
|
||||
import {
|
||||
EggAppConfig,
|
||||
} from 'egg';
|
||||
import { PACKAGE_VERSION_ADDED } from './index';
|
||||
import { getScopeAndName } from '../../common/PackageUtil';
|
||||
import { PackageManagerService } from '../service/PackageManagerService';
|
||||
import { PackageVersionFileService } from '../service/PackageVersionFileService';
|
||||
|
||||
class SyncPackageVersionFileEvent {
|
||||
@Inject()
|
||||
protected readonly config: EggAppConfig;
|
||||
@Inject()
|
||||
private readonly packageManagerService: PackageManagerService;
|
||||
@Inject()
|
||||
private readonly packageVersionFileService: PackageVersionFileService;
|
||||
|
||||
protected async syncPackageVersionFile(fullname: string, version: string) {
|
||||
if (!this.config.cnpmcore.enableUnpkg) return;
|
||||
// ignore sync on unittest
|
||||
if (this.config.env === 'unittest' && fullname !== '@cnpm/unittest-unpkg-demo') return;
|
||||
const [ scope, name ] = getScopeAndName(fullname);
|
||||
const { packageVersion } = await this.packageManagerService.showPackageVersionByVersionOrTag(
|
||||
scope, name, version);
|
||||
if (!packageVersion) return;
|
||||
await this.packageVersionFileService.syncPackageVersionFiles(packageVersion);
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_VERSION_ADDED)
|
||||
export class PackageVersionAdded extends SyncPackageVersionFileEvent {
|
||||
async handle(fullname: string, version: string) {
|
||||
await this.syncPackageVersionFile(fullname, version);
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
import '@eggjs/tegg';
|
||||
import { User } from '../entity/User';
|
||||
|
||||
export const PACKAGE_UNPUBLISHED = 'PACKAGE_UNPUBLISHED';
|
||||
export const PACKAGE_BLOCKED = 'PACKAGE_BLOCKED';
|
||||
@@ -12,18 +13,28 @@ export const PACKAGE_MAINTAINER_CHANGED = 'PACKAGE_MAINTAINER_CHANGED';
|
||||
export const PACKAGE_MAINTAINER_REMOVED = 'PACKAGE_MAINTAINER_REMOVED';
|
||||
export const PACKAGE_META_CHANGED = 'PACKAGE_META_CHANGED';
|
||||
|
||||
export interface PackageDeprecated {
|
||||
version: string;
|
||||
deprecated: string;
|
||||
}
|
||||
|
||||
export interface PackageMetaChange {
|
||||
deprecateds?: Array<PackageDeprecated>;
|
||||
}
|
||||
|
||||
|
||||
declare module '@eggjs/tegg' {
|
||||
interface Events {
|
||||
[PACKAGE_UNPUBLISHED]: (fullname: string) => Promise<void>;
|
||||
[PACKAGE_BLOCKED]: (fullname: string) => Promise<void>;
|
||||
[PACKAGE_UNBLOCKED]: (fullname: string) => Promise<void>;
|
||||
[PACKAGE_VERSION_ADDED]: (fullname: string, version: string) => Promise<void>;
|
||||
[PACKAGE_VERSION_REMOVED]: (fullname: string, version: string) => Promise<void>;
|
||||
[PACKAGE_VERSION_ADDED]: (fullname: string, version: string, tag?: string) => Promise<void>;
|
||||
[PACKAGE_VERSION_REMOVED]: (fullname: string, version: string, tag?: string) => Promise<void>;
|
||||
[PACKAGE_TAG_ADDED]: (fullname: string, tag: string) => Promise<void>;
|
||||
[PACKAGE_TAG_CHANGED]: (fullname: string, tag: string) => Promise<void>;
|
||||
[PACKAGE_TAG_REMOVED]: (fullname: string, tag: string) => Promise<void>;
|
||||
[PACKAGE_MAINTAINER_CHANGED]: (fullname: string) => Promise<void>;
|
||||
[PACKAGE_MAINTAINER_CHANGED]: (fullname: string, maintainers: User[]) => Promise<void>;
|
||||
[PACKAGE_MAINTAINER_REMOVED]: (fullname: string, maintainer: string) => Promise<void>;
|
||||
[PACKAGE_META_CHANGED]: (fullname: string, meta: object) => Promise<void>;
|
||||
[PACKAGE_META_CHANGED]: (fullname: string, meta: PackageMetaChange) => Promise<void>;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
import { rm } from 'fs/promises';
|
||||
import {
|
||||
AccessLevel,
|
||||
ContextProto,
|
||||
SingletonProto,
|
||||
Inject,
|
||||
EggObjectFactory,
|
||||
} from '@eggjs/tegg';
|
||||
import {
|
||||
EggContextHttpClient,
|
||||
EggHttpClient,
|
||||
} from 'egg';
|
||||
import fs from 'fs/promises';
|
||||
import binaries, { SyncerClass } from '../../../config/binaries';
|
||||
import binaries, { BinaryName, CategoryName } from '../../../config/binaries';
|
||||
import { NFSAdapter } from '../../common/adapter/NFSAdapter';
|
||||
import { TaskType, TaskState } from '../../common/enum/Task';
|
||||
import { downloadToTempfile } from '../../common/FileUtil';
|
||||
@@ -17,39 +18,15 @@ import { Task } from '../entity/Task';
|
||||
import { Binary } from '../entity/Binary';
|
||||
import { TaskService } from './TaskService';
|
||||
import { AbstractBinary, BinaryItem } from '../../common/adapter/binary/AbstractBinary';
|
||||
import { ApiBinary } from '../../common/adapter/binary/ApiBinary';
|
||||
import { AbstractService } from '../../common/AbstractService';
|
||||
import { NodeBinary } from '../../common/adapter/binary/NodeBinary';
|
||||
import { NwjsBinary } from '../../common/adapter/binary/NwjsBinary';
|
||||
import { BucketBinary } from '../../common/adapter/binary/BucketBinary';
|
||||
import { CypressBinary } from '../../common/adapter/binary/CypressBinary';
|
||||
import { SqlcipherBinary } from '../../common/adapter/binary/SqlcipherBinary';
|
||||
import { PuppeteerBinary } from '../../common/adapter/binary/PuppeteerBinary';
|
||||
import { GithubBinary } from '../../common/adapter/binary/GithubBinary';
|
||||
import { ElectronBinary } from '../../common/adapter/binary/ElectronBinary';
|
||||
import { NodePreGypBinary } from '../../common/adapter/binary/NodePreGypBinary';
|
||||
import { ImageminBinary } from '../../common/adapter/binary/ImageminBinary';
|
||||
import { PlaywrightBinary } from '../../common/adapter/binary/PlaywrightBinary';
|
||||
|
||||
const BinaryClasses = {
|
||||
[SyncerClass.NodeBinary]: NodeBinary,
|
||||
[SyncerClass.NwjsBinary]: NwjsBinary,
|
||||
[SyncerClass.BucketBinary]: BucketBinary,
|
||||
[SyncerClass.CypressBinary]: CypressBinary,
|
||||
[SyncerClass.SqlcipherBinary]: SqlcipherBinary,
|
||||
[SyncerClass.PuppeteerBinary]: PuppeteerBinary,
|
||||
[SyncerClass.GithubBinary]: GithubBinary,
|
||||
[SyncerClass.ElectronBinary]: ElectronBinary,
|
||||
[SyncerClass.NodePreGypBinary]: NodePreGypBinary,
|
||||
[SyncerClass.ImageminBinary]: ImageminBinary,
|
||||
[SyncerClass.PlaywrightBinary]: PlaywrightBinary,
|
||||
};
|
||||
import { TaskRepository } from '../../repository/TaskRepository';
|
||||
import { BinaryType } from '../../common/enum/Binary';
|
||||
|
||||
function isoNow() {
|
||||
return new Date().toISOString();
|
||||
}
|
||||
|
||||
@ContextProto({
|
||||
@SingletonProto({
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
})
|
||||
export class BinarySyncerService extends AbstractService {
|
||||
@@ -58,28 +35,71 @@ export class BinarySyncerService extends AbstractService {
|
||||
@Inject()
|
||||
private readonly taskService: TaskService;
|
||||
@Inject()
|
||||
private readonly httpclient: EggContextHttpClient;
|
||||
private readonly taskRepository: TaskRepository;
|
||||
@Inject()
|
||||
private readonly httpclient: EggHttpClient;
|
||||
@Inject()
|
||||
private readonly nfsAdapter: NFSAdapter;
|
||||
@Inject()
|
||||
private readonly eggObjectFactory: EggObjectFactory;
|
||||
|
||||
public async findBinary(binaryName: string, parent: string, name: string) {
|
||||
return await this.binaryRepository.findBinary(binaryName, parent, name);
|
||||
// canvas/v2.6.1/canvas-v2.6.1-node-v57-linux-glibc-x64.tar.gz
|
||||
// -> node-canvas-prebuilt/v2.6.1/node-canvas-prebuilt-v2.6.1-node-v57-linux-glibc-x64.tar.gz
|
||||
// canvas 历史版本的 targetName 可能是 category 需要兼容
|
||||
public async findBinary(targetName: BinaryName | CategoryName, parent: string, name: string) {
|
||||
return await this.binaryRepository.findBinary(targetName, parent, name);
|
||||
}
|
||||
|
||||
public async listDirBinaries(binary: Binary) {
|
||||
return await this.binaryRepository.listBinaries(binary.category, `${binary.parent}${binary.name}`);
|
||||
}
|
||||
|
||||
public async listRootBinaries(binaryName: string) {
|
||||
return await this.binaryRepository.listBinaries(binaryName, '/');
|
||||
public async listRootBinaries(binaryName: BinaryName) {
|
||||
// 通常 binaryName 和 category 是一样的,但是有些特殊的 binaryName 会有多个 category,比如 canvas
|
||||
// 所以查询 canvas 的时候,需要将 binaryName 和 category 的数据都查出来
|
||||
const {
|
||||
category,
|
||||
} = binaries[binaryName];
|
||||
const reqs = [
|
||||
this.binaryRepository.listBinaries(binaryName, '/'),
|
||||
];
|
||||
if (category && category !== binaryName) {
|
||||
reqs.push(this.binaryRepository.listBinaries(category, '/'));
|
||||
}
|
||||
|
||||
const [
|
||||
rootBinary,
|
||||
categoryBinary,
|
||||
] = await Promise.all(reqs);
|
||||
|
||||
const versions = rootBinary.map(b => b.name);
|
||||
categoryBinary?.forEach(b => {
|
||||
const version = b.name;
|
||||
// 只将没有的版本添加进去
|
||||
if (!versions.includes(version)) {
|
||||
rootBinary.push(b);
|
||||
}
|
||||
});
|
||||
|
||||
return rootBinary;
|
||||
}
|
||||
|
||||
public async downloadBinary(binary: Binary) {
|
||||
return await this.nfsAdapter.getDownloadUrlOrStream(binary.storePath);
|
||||
}
|
||||
|
||||
public async createTask(binaryName: string, lastData?: any) {
|
||||
return await this.taskService.createTask(Task.createSyncBinary(binaryName, lastData), false);
|
||||
// SyncBinary 由定时任务每台单机定时触发,手动去重
|
||||
// 添加 bizId 在 db 防止重复,记录 id 错误
|
||||
public async createTask(binaryName: BinaryName, lastData?: any) {
|
||||
const existsTask = await this.taskRepository.findTaskByTargetName(binaryName, TaskType.SyncBinary);
|
||||
if (existsTask) {
|
||||
return existsTask;
|
||||
}
|
||||
try {
|
||||
return await this.taskService.createTask(Task.createSyncBinary(binaryName, lastData), false);
|
||||
} catch (e) {
|
||||
this.logger.error('[BinarySyncerService.createTask] binaryName: %s, error: %s', binaryName, e);
|
||||
}
|
||||
}
|
||||
|
||||
public async findTask(taskId: string) {
|
||||
@@ -95,12 +115,12 @@ export class BinarySyncerService extends AbstractService {
|
||||
}
|
||||
|
||||
public async executeTask(task: Task) {
|
||||
const binaryName = task.targetName;
|
||||
const binaryInstance = this.createBinaryInstance(binaryName);
|
||||
const binaryName = task.targetName as BinaryName;
|
||||
const binaryAdapter = await this.getBinaryAdapter(binaryName);
|
||||
const logUrl = `${this.config.cnpmcore.registry}/-/binary/${binaryName}/syncs/${task.taskId}/log`;
|
||||
let logs: string[] = [];
|
||||
logs.push(`[${isoNow()}] 🚧🚧🚧🚧🚧 Start sync binary "${binaryName}" 🚧🚧🚧🚧🚧`);
|
||||
if (!binaryInstance) {
|
||||
if (!binaryAdapter) {
|
||||
task.error = 'unknow binaryName';
|
||||
logs.push(`[${isoNow()}] ❌ Synced "${binaryName}" fail, ${task.error}, log: ${logUrl}`);
|
||||
logs.push(`[${isoNow()}] ❌❌❌❌❌ "${binaryName}" ❌❌❌❌❌`);
|
||||
@@ -115,7 +135,7 @@ export class BinarySyncerService extends AbstractService {
|
||||
this.logger.info('[BinarySyncerService.executeTask:start] taskId: %s, targetName: %s, log: %s',
|
||||
task.taskId, task.targetName, logUrl);
|
||||
try {
|
||||
await this.syncDir(binaryInstance, task, '/');
|
||||
await this.syncDir(binaryAdapter, task, '/');
|
||||
logs.push(`[${isoNow()}] 🟢 log: ${logUrl}`);
|
||||
logs.push(`[${isoNow()}] 🟢🟢🟢🟢🟢 "${binaryName}" 🟢🟢🟢🟢🟢`);
|
||||
await this.taskService.finishTask(task, TaskState.Success, logs.join('\n'));
|
||||
@@ -132,22 +152,22 @@ export class BinarySyncerService extends AbstractService {
|
||||
}
|
||||
}
|
||||
|
||||
private async syncDir(binaryInstance: AbstractBinary, task: Task, dir: string, parentIndex = '') {
|
||||
const binaryName = task.targetName;
|
||||
const result = await binaryInstance.fetch(dir, task.data);
|
||||
private async syncDir(binaryAdapter: AbstractBinary, task: Task, dir: string, parentIndex = '') {
|
||||
const binaryName = task.targetName as BinaryName;
|
||||
const result = await binaryAdapter.fetch(dir, binaryName);
|
||||
let hasDownloadError = false;
|
||||
let hasItems = false;
|
||||
if (result && result.items.length > 0) {
|
||||
hasItems = true;
|
||||
let logs: string[] = [];
|
||||
const newItems = await this.diff(binaryName, dir, result.items);
|
||||
logs.push(`[${isoNow()}][${dir}] 🚧 Syncing diff: ${result.items.length} => ${newItems.length}, Binary class: ${binaryInstance.constructor.name}`);
|
||||
logs.push(`[${isoNow()}][${dir}] 🚧 Syncing diff: ${result.items.length} => ${newItems.length}, Binary class: ${binaryAdapter.constructor.name}`);
|
||||
for (const [ index, { item, reason }] of newItems.entries()) {
|
||||
if (item.isDir) {
|
||||
logs.push(`[${isoNow()}][${dir}] 🚧 [${parentIndex}${index}] Start sync dir ${JSON.stringify(item)}, reason: ${reason}`);
|
||||
await this.taskService.appendTaskLog(task, logs.join('\n'));
|
||||
logs = [];
|
||||
const [ hasError, hasSubItems ] = await this.syncDir(binaryInstance, task, `${dir}${item.name}`, `${parentIndex}${index}.`);
|
||||
const [ hasError, hasSubItems ] = await this.syncDir(binaryAdapter, task, `${dir}${item.name}`, `${parentIndex}${index}.`);
|
||||
if (hasError) {
|
||||
hasDownloadError = true;
|
||||
} else {
|
||||
@@ -160,13 +180,24 @@ export class BinarySyncerService extends AbstractService {
|
||||
} else {
|
||||
// download to nfs
|
||||
logs.push(`[${isoNow()}][${dir}] 🚧 [${parentIndex}${index}] Downloading ${JSON.stringify(item)}, reason: ${reason}`);
|
||||
// skip exists binary file
|
||||
const existsBinary = await this.binaryRepository.findBinary(item.category, item.parent, item.name);
|
||||
if (existsBinary && existsBinary.date === item.date) {
|
||||
logs.push(`[${isoNow()}][${dir}] 🟢 [${parentIndex}${index}] binary file exists, skip download, binaryId: ${existsBinary.binaryId}`);
|
||||
this.logger.info('[BinarySyncerService.syncDir:skipDownload] binaryId: %s exists, storePath: %s',
|
||||
existsBinary.binaryId, existsBinary.storePath);
|
||||
continue;
|
||||
}
|
||||
await this.taskService.appendTaskLog(task, logs.join('\n'));
|
||||
logs = [];
|
||||
let localFile = '';
|
||||
try {
|
||||
const { tmpfile, headers, timing } =
|
||||
await downloadToTempfile(this.httpclient, this.config.dataDir, item.sourceUrl!, item.ignoreDownloadStatuses);
|
||||
logs.push(`[${isoNow()}][${dir}] 🟢 [${parentIndex}${index}] HTTP content-length: ${headers['content-length']}, timing: ${JSON.stringify(timing)}, ${item.sourceUrl} => ${tmpfile}`);
|
||||
await downloadToTempfile(
|
||||
this.httpclient, this.config.dataDir, item.sourceUrl!, { ignoreDownloadStatuses: item.ignoreDownloadStatuses });
|
||||
const log = `[${isoNow()}][${dir}] 🟢 [${parentIndex}${index}] HTTP content-length: ${headers['content-length']}, timing: ${JSON.stringify(timing)}, ${item.sourceUrl} => ${tmpfile}`;
|
||||
logs.push(log);
|
||||
this.logger.info('[BinarySyncerService.syncDir:downloadToTempfile] %s', log);
|
||||
localFile = tmpfile;
|
||||
const binary = await this.saveBinaryItem(item, tmpfile);
|
||||
logs.push(`[${isoNow()}][${dir}] 🟢 [${parentIndex}${index}] Synced file success, binaryId: ${binary.binaryId}`);
|
||||
@@ -174,7 +205,7 @@ export class BinarySyncerService extends AbstractService {
|
||||
logs = [];
|
||||
} catch (err: any) {
|
||||
if (err.name === 'DownloadNotFoundError') {
|
||||
this.logger.warn('Not found %s, skip it', item.sourceUrl);
|
||||
this.logger.info('Not found %s, skip it', item.sourceUrl);
|
||||
logs.push(`[${isoNow()}][${dir}] 🧪️ [${parentIndex}${index}] Download ${item.sourceUrl} not found, skip it`);
|
||||
} else {
|
||||
this.logger.error('Download binary %s %s', item.sourceUrl, err);
|
||||
@@ -200,7 +231,7 @@ export class BinarySyncerService extends AbstractService {
|
||||
return [ hasDownloadError, hasItems ];
|
||||
}
|
||||
|
||||
private async diff(binaryName: string, dir: string, fetchItems: BinaryItem[]) {
|
||||
private async diff(binaryName: BinaryName, dir: string, fetchItems: BinaryItem[]) {
|
||||
const existsItems = await this.binaryRepository.listBinaries(binaryName, dir);
|
||||
const existsMap = new Map<string, Binary>();
|
||||
for (const item of existsItems) {
|
||||
@@ -248,17 +279,17 @@ export class BinarySyncerService extends AbstractService {
|
||||
return binary;
|
||||
}
|
||||
|
||||
private createBinaryInstance(binaryName: string): AbstractBinary | undefined {
|
||||
private async getBinaryAdapter(binaryName: BinaryName): Promise<AbstractBinary | undefined> {
|
||||
const config = this.config.cnpmcore;
|
||||
const binaryConfig = binaries[binaryName];
|
||||
|
||||
let binaryAdapter: AbstractBinary;
|
||||
if (config.sourceRegistryIsCNpm) {
|
||||
const binaryConfig = binaries[binaryName];
|
||||
const syncBinaryFromAPISource = config.syncBinaryFromAPISource || `${config.sourceRegistry}/-/binary`;
|
||||
return new ApiBinary(this.httpclient, this.logger, binaryConfig, syncBinaryFromAPISource);
|
||||
}
|
||||
for (const binaryConfig of Object.values(binaries)) {
|
||||
if (binaryConfig.category === binaryName) {
|
||||
return new BinaryClasses[binaryConfig.syncer](this.httpclient, this.logger, binaryConfig);
|
||||
}
|
||||
binaryAdapter = await this.eggObjectFactory.getEggObject(AbstractBinary, BinaryType.Api);
|
||||
} else {
|
||||
binaryAdapter = await this.eggObjectFactory.getEggObject(AbstractBinary, binaryConfig.type);
|
||||
}
|
||||
await binaryAdapter.initFetch(binaryName);
|
||||
return binaryAdapter;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { AccessLevel, ContextProto, Inject } from '@eggjs/tegg';
|
||||
import { AccessLevel, SingletonProto, Inject } from '@eggjs/tegg';
|
||||
import { EggLogger } from 'egg';
|
||||
import pMap from 'p-map';
|
||||
import { BugVersion } from '../entity/BugVersion';
|
||||
@@ -7,7 +7,7 @@ import { DistRepository } from '../../repository/DistRepository';
|
||||
import { getScopeAndName } from '../../common/PackageUtil';
|
||||
import { CacheService } from './CacheService';
|
||||
|
||||
@ContextProto({
|
||||
@SingletonProto({
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
})
|
||||
export class BugVersionService {
|
||||
|
||||
@@ -1,36 +1,46 @@
|
||||
import {
|
||||
AccessLevel,
|
||||
ContextProto,
|
||||
SingletonProto,
|
||||
Inject,
|
||||
} from '@eggjs/tegg';
|
||||
import { CacheAdapter } from '../../common/adapter/CacheAdapter';
|
||||
import { AbstractService } from '../../common/AbstractService';
|
||||
import { ChangesStreamTaskData } from '../entity/Task';
|
||||
|
||||
type PackageCacheAttribe = 'etag' | 'manifests';
|
||||
|
||||
type TotalData = {
|
||||
export type UpstreamRegistryInfo = {
|
||||
registry_name: string;
|
||||
source_registry: string;
|
||||
changes_stream_url: string;
|
||||
} & ChangesStreamTaskData;
|
||||
|
||||
export type DownloadInfo = {
|
||||
today: number;
|
||||
yesterday: number;
|
||||
samedayLastweek: number;
|
||||
thisweek: number;
|
||||
thismonth: number;
|
||||
thisyear: number;
|
||||
lastweek: number;
|
||||
lastmonth: number;
|
||||
lastyear: number;
|
||||
};
|
||||
|
||||
export type TotalData = {
|
||||
packageCount: number;
|
||||
packageVersionCount: number;
|
||||
lastPackage: string;
|
||||
lastPackageVersion: string;
|
||||
download: {
|
||||
today: number;
|
||||
yesterday: number;
|
||||
samedayLastweek: number;
|
||||
thisweek: number;
|
||||
thismonth: number;
|
||||
thisyear: number;
|
||||
lastweek: number;
|
||||
lastmonth: number;
|
||||
lastyear: number;
|
||||
};
|
||||
changesStream: object,
|
||||
download: DownloadInfo;
|
||||
changesStream: ChangesStreamTaskData;
|
||||
lastChangeId: number | bigint;
|
||||
cacheTime: string;
|
||||
upstreamRegistries: UpstreamRegistryInfo[];
|
||||
};
|
||||
const TOTAL_DATA_KEY = '__TOTAL_DATA__';
|
||||
|
||||
@ContextProto({
|
||||
@SingletonProto({
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
})
|
||||
export class CacheService extends AbstractService {
|
||||
@@ -72,6 +82,7 @@ export class CacheService extends AbstractService {
|
||||
lastyear: 0,
|
||||
},
|
||||
changesStream: {},
|
||||
upstreamRegistries: [],
|
||||
lastChangeId: 0,
|
||||
cacheTime: '',
|
||||
};
|
||||
|
||||
@@ -2,70 +2,97 @@ import os from 'os';
|
||||
import { setTimeout } from 'timers/promises';
|
||||
import {
|
||||
AccessLevel,
|
||||
ContextProto,
|
||||
SingletonProto,
|
||||
EggObjectFactory,
|
||||
Inject,
|
||||
} from '@eggjs/tegg';
|
||||
import {
|
||||
EggContextHttpClient,
|
||||
} from 'egg';
|
||||
import { TaskType } from '../../common/enum/Task';
|
||||
import { TaskState, TaskType } from '../../common/enum/Task';
|
||||
import { AbstractService } from '../../common/AbstractService';
|
||||
import { TaskRepository } from '../../repository/TaskRepository';
|
||||
import { Task } from '../entity/Task';
|
||||
import { PackageSyncerService } from './PackageSyncerService';
|
||||
import { HOST_NAME, ChangesStreamTask, Task } from '../entity/Task';
|
||||
import { PackageSyncerService, RegistryNotMatchError } from './PackageSyncerService';
|
||||
import { TaskService } from './TaskService';
|
||||
import { RegistryManagerService } from './RegistryManagerService';
|
||||
import { E500 } from 'egg-errors';
|
||||
import { Registry } from '../entity/Registry';
|
||||
import { AbstractChangeStream } from '../../common/adapter/changesStream/AbstractChangesStream';
|
||||
import { getScopeAndName } from '../../common/PackageUtil';
|
||||
import { GLOBAL_WORKER } from '../../common/constants';
|
||||
import { ScopeManagerService } from './ScopeManagerService';
|
||||
import { PackageRepository } from '../../repository/PackageRepository';
|
||||
|
||||
@ContextProto({
|
||||
@SingletonProto({
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
})
|
||||
export class ChangesStreamService extends AbstractService {
|
||||
@Inject()
|
||||
private readonly taskRepository: TaskRepository;
|
||||
@Inject()
|
||||
private readonly httpclient: EggContextHttpClient;
|
||||
@Inject()
|
||||
private readonly packageSyncerService: PackageSyncerService;
|
||||
@Inject()
|
||||
private readonly taskService: TaskService;
|
||||
@Inject()
|
||||
private readonly registryManagerService : RegistryManagerService;
|
||||
@Inject()
|
||||
private readonly scopeManagerService : ScopeManagerService;
|
||||
@Inject()
|
||||
private readonly eggObjectFactory: EggObjectFactory;
|
||||
@Inject()
|
||||
private readonly packageRepository: PackageRepository;
|
||||
|
||||
public async findExecuteTask() {
|
||||
const targetName = 'GLOBAL_WORKER';
|
||||
const existsTask = await this.taskRepository.findTaskByTargetName(targetName, TaskType.ChangesStream);
|
||||
if (!existsTask) {
|
||||
// 出于向下兼容考虑, changes_stream 类型 Task 分为
|
||||
// GLOBAL_WORKER: 默认的同步源
|
||||
// `{registryName}_WORKER`: 自定义 scope 的同步源
|
||||
public async findExecuteTask(): Promise<ChangesStreamTask | null> {
|
||||
const targetName = GLOBAL_WORKER;
|
||||
const globalRegistryTask = await this.taskRepository.findTaskByTargetName(targetName, TaskType.ChangesStream);
|
||||
// 如果没有配置默认同步源,先进行初始化
|
||||
if (!globalRegistryTask) {
|
||||
await this.taskService.createTask(Task.createChangesStream(targetName), false);
|
||||
}
|
||||
return await this.taskService.findExecuteTask(TaskType.ChangesStream);
|
||||
// 自定义 scope 由 admin 手动创建
|
||||
// 根据 TaskType.ChangesStream 从队列中获取
|
||||
return await this.taskService.findExecuteTask(TaskType.ChangesStream) as ChangesStreamTask;
|
||||
}
|
||||
|
||||
public async executeTask(task: Task) {
|
||||
public async suspendSync(exit = false) {
|
||||
this.logger.info('[ChangesStreamService.suspendSync:start]');
|
||||
if (this.config.cnpmcore.enableChangesStream) {
|
||||
// 防止继续获取新的任务
|
||||
if (exit) {
|
||||
this.config.cnpmcore.enableChangesStream = false;
|
||||
}
|
||||
const authorIp = os.hostname();
|
||||
// 暂停当前机器所有的 changesStream 任务
|
||||
const tasks = await this.taskRepository.findTaskByAuthorIpAndType(authorIp, TaskType.ChangesStream);
|
||||
for (const task of tasks) {
|
||||
if (task.state === TaskState.Processing) {
|
||||
this.logger.info('[ChangesStreamService.suspendSync:suspend] taskId: %s', task.taskId);
|
||||
// 1. 更新任务状态为 waiting
|
||||
// 2. 重新推入任务队列供其他机器执行
|
||||
await this.taskService.retryTask(task);
|
||||
}
|
||||
}
|
||||
}
|
||||
this.logger.info('[ChangesStreamService.suspendSync:finish]');
|
||||
}
|
||||
|
||||
public async executeTask(task: ChangesStreamTask) {
|
||||
task.authorIp = os.hostname();
|
||||
task.authorId = `pid_${process.pid}`;
|
||||
await this.taskRepository.saveTask(task);
|
||||
|
||||
const changesStreamRegistry: string = this.config.cnpmcore.changesStreamRegistry;
|
||||
// https://github.com/npm/registry-follower-tutorial
|
||||
// default "update_seq": 7138885,
|
||||
// 初始化 changeStream 任务
|
||||
// since 默认从 1 开始
|
||||
try {
|
||||
let since: string = task.data.since;
|
||||
// get update_seq from ${changesStreamRegistry} on the first time
|
||||
if (!since) {
|
||||
const { status, data } = await this.httpclient.request(changesStreamRegistry, {
|
||||
followRedirect: true,
|
||||
timeout: 10000,
|
||||
dataType: 'json',
|
||||
});
|
||||
if (data.update_seq) {
|
||||
since = String(data.update_seq - 10);
|
||||
} else {
|
||||
since = '7139538';
|
||||
}
|
||||
this.logger.warn('[ChangesStreamService.executeTask:firstSeq] GET %s status: %s, data: %j, since: %s',
|
||||
changesStreamRegistry, status, data, since);
|
||||
since = await this.getInitialSince(task);
|
||||
}
|
||||
// allow disable changesStream dynamic
|
||||
while (since && this.config.cnpmcore.enableChangesStream) {
|
||||
const { lastSince, taskCount } = await this.handleChanges(since, task);
|
||||
this.logger.warn('[ChangesStreamService.executeTask:changes] since: %s => %s, %d new tasks, taskId: %s, updatedAt: %j',
|
||||
const { lastSince, taskCount } = await this.executeSync(since, task);
|
||||
this.logger.info('[ChangesStreamService.executeTask:changes] since: %s => %s, %d new tasks, taskId: %s, updatedAt: %j',
|
||||
since, lastSince, taskCount, task.taskId, task.updatedAt);
|
||||
since = lastSince;
|
||||
if (taskCount === 0 && this.config.env === 'unittest') {
|
||||
@@ -78,104 +105,132 @@ export class ChangesStreamService extends AbstractService {
|
||||
this.logger.error(err);
|
||||
task.error = `${err}`;
|
||||
await this.taskRepository.saveTask(task);
|
||||
await this.suspendSync();
|
||||
}
|
||||
}
|
||||
|
||||
private async handleChanges(since: string, task: Task) {
|
||||
const changesStreamRegistry: string = this.config.cnpmcore.changesStreamRegistry;
|
||||
const changesStreamRegistryMode: string = this.config.cnpmcore.changesStreamRegistryMode;
|
||||
const db = `${changesStreamRegistry}/_changes?since=${since}`;
|
||||
let lastSince = since;
|
||||
let taskCount = 0;
|
||||
if (changesStreamRegistryMode === 'streaming') {
|
||||
const { res } = await this.httpclient.request(db, {
|
||||
streaming: true,
|
||||
timeout: 10000,
|
||||
});
|
||||
for await (const chunk of res) {
|
||||
const text: string = chunk.toString();
|
||||
// {"seq":7138879,"id":"@danydodson/prettier-config","changes":[{"rev":"5-a56057032714af25400d93517773a82a"}]}
|
||||
// console.log('😄%j😄', text);
|
||||
// 😄"{\"seq\":7138738,\"id\":\"wargerm\",\"changes\":[{\"rev\":\"59-f0a0d326db4c62ed480987a04ba3bf8f\"}]}"😄
|
||||
// 😄",\n{\"seq\":7138739,\"id\":\"@laffery/webpack-starter-kit\",\"changes\":[{\"rev\":\"4-84a8dc470a07872f4cdf85cf8ef892a1\"}]},\n{\"seq\":7138741,\"id\":\"venom-bot\",\"changes\":[{\"rev\":\"103-908654b1ad4b0e0fd40b468d75730674\"}]}"😄
|
||||
// 😄",\n{\"seq\":7138743,\"id\":\"react-native-template-pytorch-live\",\"changes\":[{\"rev\":\"40-871c686b200312303ba7c4f7f93e0362\"}]}"😄
|
||||
// 😄",\n{\"seq\":7138745,\"id\":\"ccxt\",\"changes\":[{\"rev\":\"10205-25367c525a0a3bd61be3a72223ce212c\"}]}"😄
|
||||
const matchs = text.matchAll(/"seq":(\d+),"id":"([^"]+)"/gm);
|
||||
let count = 0;
|
||||
let lastPackage = '';
|
||||
for (const match of matchs) {
|
||||
const seq = match[1];
|
||||
const fullname = match[2];
|
||||
if (seq && fullname) {
|
||||
await this.packageSyncerService.createTask(fullname, {
|
||||
authorIp: os.hostname(),
|
||||
authorId: 'ChangesStreamService',
|
||||
skipDependencies: true,
|
||||
tips: `Sync cause by changes_stream(${changesStreamRegistry}) update seq: ${seq}`,
|
||||
});
|
||||
count++;
|
||||
lastSince = seq;
|
||||
lastPackage = fullname;
|
||||
}
|
||||
}
|
||||
if (count > 0) {
|
||||
taskCount += count;
|
||||
task.data = {
|
||||
...task.data,
|
||||
since: lastSince,
|
||||
last_package: lastPackage,
|
||||
last_package_created: new Date(),
|
||||
task_count: (task.data.task_count || 0) + count,
|
||||
};
|
||||
await this.taskRepository.saveTask(task);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// json mode
|
||||
// {"results":[{"seq":1988653,"type":"PACKAGE_VERSION_ADDED","id":"dsr-package-mercy-magot-thorp-sward","changes":[{"version":"1.0.1"}]},
|
||||
const { data } = await this.httpclient.request(db, {
|
||||
followRedirect: true,
|
||||
timeout: 30000,
|
||||
dataType: 'json',
|
||||
gzip: true,
|
||||
});
|
||||
if (data.results?.length > 0) {
|
||||
let count = 0;
|
||||
let lastPackage = '';
|
||||
for (const change of data.results) {
|
||||
const seq = change.seq;
|
||||
const fullname = change.id;
|
||||
if (seq && fullname && seq !== since) {
|
||||
await this.packageSyncerService.createTask(fullname, {
|
||||
authorIp: os.hostname(),
|
||||
authorId: 'ChangesStreamService',
|
||||
skipDependencies: true,
|
||||
tips: `Sync cause by changes_stream(${changesStreamRegistry}) update seq: ${seq}, change: ${JSON.stringify(change)}`,
|
||||
});
|
||||
count++;
|
||||
lastSince = seq;
|
||||
lastPackage = fullname;
|
||||
}
|
||||
}
|
||||
if (count > 0) {
|
||||
taskCount += count;
|
||||
task.data = {
|
||||
...task.data,
|
||||
since: lastSince,
|
||||
last_package: lastPackage,
|
||||
last_package_created: new Date(),
|
||||
task_count: (task.data.task_count || 0) + count,
|
||||
};
|
||||
await this.taskRepository.saveTask(task);
|
||||
}
|
||||
// 优先从 registryId 获取,如果没有的话再返回默认的 registry
|
||||
public async prepareRegistry(task: ChangesStreamTask): Promise<Registry> {
|
||||
const { registryId } = task.data || {};
|
||||
// 如果已有 registryId, 查询 DB 直接获取
|
||||
if (registryId) {
|
||||
const registry = await this.registryManagerService.findByRegistryId(registryId);
|
||||
if (!registry) {
|
||||
this.logger.error('[ChangesStreamService.getRegistry:error] registryId %s not found', registryId);
|
||||
throw new E500(`invalid change stream registry: ${registryId}`);
|
||||
}
|
||||
return registry;
|
||||
}
|
||||
|
||||
if (taskCount === 0) {
|
||||
// keep update task, make sure updatedAt changed
|
||||
task.updatedAt = new Date();
|
||||
const registry = await this.registryManagerService.ensureDefaultRegistry();
|
||||
task.data = {
|
||||
...(task.data || {}),
|
||||
registryId: registry.registryId,
|
||||
};
|
||||
await this.taskRepository.saveTask(task);
|
||||
|
||||
return registry;
|
||||
}
|
||||
|
||||
// 根据 regsitry 判断是否需要添加同步任务
|
||||
// 1. 如果该包已经指定了 registryId 则以 registryId 为准
|
||||
// 1. 该包的 scope 在当前 registry 下
|
||||
// 2. 如果 registry 下没有配置 scope (认为是通用 registry 地址) ,且该包的 scope 不在其他 registry 下
|
||||
public async needSync(registry: Registry, fullname: string): Promise<boolean> {
|
||||
const [ scopeName, name ] = getScopeAndName(fullname);
|
||||
const packageEntity = await this.packageRepository.findPackage(scopeName, name);
|
||||
|
||||
// 如果包不存在,且处在 exist 模式下,则不同步
|
||||
if (this.config.cnpmcore.syncMode === 'exist' && !packageEntity) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (packageEntity?.registryId) {
|
||||
return registry.registryId === packageEntity.registryId;
|
||||
}
|
||||
|
||||
const scope = await this.scopeManagerService.findByName(scopeName);
|
||||
const inCurrentRegistry = scope && scope?.registryId === registry.registryId;
|
||||
if (inCurrentRegistry) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const registryScopeCount = await this.scopeManagerService.countByRegistryId(registry.registryId);
|
||||
// 当前包没有 scope 信息,且当前 registry 下没有 scope,是通用 registry,需要同步
|
||||
return !scope && !registryScopeCount;
|
||||
}
|
||||
public async getInitialSince(task: ChangesStreamTask): Promise<string> {
|
||||
const registry = await this.prepareRegistry(task);
|
||||
const changesStreamAdapter = await this.eggObjectFactory.getEggObject(AbstractChangeStream, registry.type) as AbstractChangeStream;
|
||||
const since = await changesStreamAdapter.getInitialSince(registry);
|
||||
return since;
|
||||
}
|
||||
|
||||
// 从 changesStream 获取需要同步的数据
|
||||
// 更新任务的 since 和 taskCount 相关字段
|
||||
public async executeSync(since: string, task: ChangesStreamTask) {
|
||||
const registry = await this.prepareRegistry(task);
|
||||
const changesStreamAdapter = await this.eggObjectFactory.getEggObject(AbstractChangeStream, registry.type) as AbstractChangeStream;
|
||||
let taskCount = 0;
|
||||
let lastSince = since;
|
||||
|
||||
// 获取需要同步的数据
|
||||
// 需要根据 scope 和包信息进行过滤
|
||||
const stream = changesStreamAdapter.fetchChanges(registry, since);
|
||||
let lastPackage: string | undefined;
|
||||
|
||||
// 创建同步任务
|
||||
for await (const change of stream) {
|
||||
const { fullname, seq } = change;
|
||||
lastPackage = fullname;
|
||||
lastSince = seq;
|
||||
const valid = await this.needSync(registry, fullname);
|
||||
if (valid) {
|
||||
taskCount++;
|
||||
const tips = `Sync cause by changes_stream(${registry.changeStream}) update seq: ${seq}`;
|
||||
try {
|
||||
const task = await this.packageSyncerService.createTask(fullname, {
|
||||
authorIp: HOST_NAME,
|
||||
authorId: 'ChangesStreamService',
|
||||
registryId: registry.registryId,
|
||||
skipDependencies: true,
|
||||
tips,
|
||||
});
|
||||
this.logger.info('[ChangesStreamService.createTask:success] fullname: %s, task: %s, tips: %s',
|
||||
fullname, task.id, tips);
|
||||
} catch (err) {
|
||||
if (err instanceof RegistryNotMatchError) {
|
||||
this.logger.warn('[ChangesStreamService.executeSync:skip] fullname: %s, error: %s, tips: %s',
|
||||
fullname, err, tips);
|
||||
continue;
|
||||
}
|
||||
// only log error, make sure changes still reading
|
||||
this.logger.error('[ChangesStreamService.executeSync:error] fullname: %s, error: %s, tips: %s',
|
||||
fullname, err, tips);
|
||||
this.logger.error(err);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// 实时更新 task 信息
|
||||
// 即使不需要同步,防止任务处理累积耗时超过 10min
|
||||
task.updateSyncData({
|
||||
lastSince,
|
||||
lastPackage,
|
||||
taskCount,
|
||||
});
|
||||
await this.taskRepository.saveTask(task);
|
||||
}
|
||||
|
||||
// 如果 taskCount 为 0 更新一下任务信息
|
||||
if (taskCount === 0) {
|
||||
task.updateSyncData({
|
||||
lastSince,
|
||||
lastPackage,
|
||||
taskCount,
|
||||
});
|
||||
await this.taskRepository.saveTask(task);
|
||||
}
|
||||
|
||||
return { lastSince, taskCount };
|
||||
}
|
||||
}
|
||||
|
||||
78
app/core/service/CreateHookTriggerService.ts
Normal file
78
app/core/service/CreateHookTriggerService.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import { AccessLevel, SingletonProto, Inject } from '@eggjs/tegg';
|
||||
import { AbstractService } from '../../common/AbstractService';
|
||||
import { HookType } from '../../common/enum/Hook';
|
||||
import { TaskState } from '../../common/enum/Task';
|
||||
import { HookEvent } from '../entity/HookEvent';
|
||||
import { CreateHookTask, Task } from '../entity/Task';
|
||||
import { HookRepository } from '../../repository/HookRepository';
|
||||
import { PackageRepository } from '../../repository/PackageRepository';
|
||||
import pMap from 'p-map';
|
||||
import { Hook } from '../entity/Hook';
|
||||
import { TaskService } from './TaskService';
|
||||
import { isoNow } from '../../common/LogUtil';
|
||||
import { getScopeAndName } from '../../common/PackageUtil';
|
||||
|
||||
@SingletonProto({
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
})
|
||||
export class CreateHookTriggerService extends AbstractService {
|
||||
@Inject()
|
||||
private readonly hookRepository: HookRepository;
|
||||
|
||||
@Inject()
|
||||
private readonly packageRepository: PackageRepository;
|
||||
|
||||
@Inject()
|
||||
private readonly taskService: TaskService;
|
||||
|
||||
async executeTask(task: CreateHookTask): Promise<void> {
|
||||
const { hookEvent } = task.data;
|
||||
const [ scope, name ] = getScopeAndName(hookEvent.fullname);
|
||||
const pkg = await this.packageRepository.findPackage(scope, name);
|
||||
if (!pkg) {
|
||||
await this.taskService.finishTask(task, TaskState.Success, `[${isoNow()}][Hooks] package ${hookEvent.fullname} not exits`);
|
||||
return;
|
||||
}
|
||||
|
||||
const startLog = [
|
||||
`[${isoNow()}][Hooks] Start Create Trigger for ${pkg.fullname} ${task.data.hookEvent.changeId}`,
|
||||
`[${isoNow()}][Hooks] change content ${JSON.stringify(task.data.hookEvent.change)}`,
|
||||
];
|
||||
await this.taskService.finishTask(task, TaskState.Processing, startLog.join('\n'));
|
||||
|
||||
try {
|
||||
await this.taskService.appendTaskLog(task, `[${isoNow()}][Hooks] PushHooks to ${HookType.Package} ${pkg.fullname}\n`);
|
||||
await this.createTriggerByMethod(task, HookType.Package, pkg.fullname, hookEvent);
|
||||
await this.taskService.appendTaskLog(task, `[${isoNow()}][Hooks] PushHooks to ${HookType.Scope} ${pkg.scope}\n`);
|
||||
await this.createTriggerByMethod(task, HookType.Scope, pkg.scope, hookEvent);
|
||||
|
||||
const maintainers = await this.packageRepository.listPackageMaintainers(pkg.packageId);
|
||||
for (const maintainer of maintainers) {
|
||||
await this.taskService.appendTaskLog(task, `[${isoNow()}][Hooks] PushHooks to ${HookType.Owner} ${maintainer.name}\n`);
|
||||
await this.createTriggerByMethod(task, HookType.Owner, maintainer.name, hookEvent);
|
||||
}
|
||||
await this.taskService.finishTask(task, TaskState.Success, `[${isoNow()}][Hooks] create trigger succeed \n`);
|
||||
} catch (e) {
|
||||
e.message = 'create trigger failed: ' + e.message;
|
||||
await this.taskService.finishTask(task, TaskState.Fail, `[${isoNow()}][Hooks] ${e.stack} \n`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
private async createTriggerByMethod(task: Task, type: HookType, name: string, hookEvent: HookEvent) {
|
||||
let hooks = await this.hookRepository.listHooksByTypeAndName(type, name);
|
||||
while (hooks.length) {
|
||||
await this.createTriggerTasks(hooks, hookEvent);
|
||||
hooks = await this.hookRepository.listHooksByTypeAndName(type, name, hooks[hooks.length - 1].id);
|
||||
await this.taskService.appendTaskLog(task,
|
||||
`[${isoNow()}][Hooks] PushHooks to ${type} ${name} ${hooks.length} \n`);
|
||||
}
|
||||
}
|
||||
|
||||
private async createTriggerTasks(hooks: Array<Hook>, hookEvent: HookEvent) {
|
||||
await pMap(hooks, async hook => {
|
||||
const triggerHookTask = Task.createTriggerHookTask(hookEvent, hook.hookId);
|
||||
await this.taskService.createTask(triggerHookTask, true);
|
||||
}, { concurrency: 5 });
|
||||
}
|
||||
}
|
||||
16
app/core/service/EventCorkerAdvice.ts
Normal file
16
app/core/service/EventCorkerAdvice.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { ContextEventBus, Inject } from '@eggjs/tegg';
|
||||
import { Advice, IAdvice } from '@eggjs/tegg/aop';
|
||||
|
||||
@Advice()
|
||||
export class EventCorkAdvice implements IAdvice {
|
||||
@Inject()
|
||||
private eventBus: ContextEventBus;
|
||||
|
||||
async beforeCall() {
|
||||
this.eventBus.cork();
|
||||
}
|
||||
|
||||
async afterFinally() {
|
||||
this.eventBus.uncork();
|
||||
}
|
||||
}
|
||||
96
app/core/service/HookManageService.ts
Normal file
96
app/core/service/HookManageService.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
import { AccessLevel, SingletonProto, Inject } from '@eggjs/tegg';
|
||||
import { Hook } from '../entity/Hook';
|
||||
import { HookType } from '../../common/enum/Hook';
|
||||
import {
|
||||
ForbiddenError,
|
||||
NotFoundError,
|
||||
} from 'egg-errors';
|
||||
import { HookRepository } from '../../repository/HookRepository';
|
||||
import { EggAppConfig } from 'egg';
|
||||
|
||||
export interface CreateHookCommand {
|
||||
type: HookType;
|
||||
ownerId: string;
|
||||
name: string;
|
||||
endpoint: string;
|
||||
secret: string;
|
||||
}
|
||||
|
||||
export interface UpdateHookCommand {
|
||||
operatorId: string;
|
||||
hookId: string;
|
||||
endpoint: string;
|
||||
secret: string;
|
||||
}
|
||||
|
||||
export interface DeleteHookCommand {
|
||||
operatorId: string;
|
||||
hookId: string;
|
||||
}
|
||||
|
||||
@SingletonProto({
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
})
|
||||
export class HookManageService {
|
||||
@Inject()
|
||||
private readonly hookRepository: HookRepository;
|
||||
|
||||
@Inject()
|
||||
private readonly config: EggAppConfig;
|
||||
|
||||
get hooksLimit() {
|
||||
return this.config.cnpmcore.hooksLimit;
|
||||
}
|
||||
|
||||
async createHook(cmd: CreateHookCommand): Promise<Hook> {
|
||||
const hooks = await this.hookRepository.listHooksByOwnerId(cmd.ownerId);
|
||||
// FIXME: 会有并发问题,需要有一个用户全局锁去记录
|
||||
if (hooks.length >= this.hooksLimit) {
|
||||
throw new ForbiddenError('hooks limit exceeded');
|
||||
}
|
||||
const hook = Hook.create(cmd);
|
||||
await this.hookRepository.saveHook(hook);
|
||||
return hook;
|
||||
}
|
||||
|
||||
async updateHook(cmd: UpdateHookCommand): Promise<Hook> {
|
||||
const hook = await this.hookRepository.findHookById(cmd.hookId);
|
||||
if (!hook) {
|
||||
throw new NotFoundError(`hook ${cmd.hookId} not found`);
|
||||
}
|
||||
if (hook.ownerId !== cmd.operatorId) {
|
||||
throw new ForbiddenError(`hook ${cmd.hookId} not belong to ${cmd.operatorId}`);
|
||||
}
|
||||
hook.endpoint = cmd.endpoint;
|
||||
hook.secret = cmd.secret;
|
||||
await this.hookRepository.saveHook(hook);
|
||||
return hook;
|
||||
}
|
||||
|
||||
async deleteHook(cmd: DeleteHookCommand): Promise<Hook> {
|
||||
const hook = await this.hookRepository.findHookById(cmd.hookId);
|
||||
if (!hook) {
|
||||
throw new NotFoundError(`hook ${cmd.hookId} not found`);
|
||||
}
|
||||
if (hook.ownerId !== cmd.operatorId) {
|
||||
throw new ForbiddenError(`hook ${cmd.hookId} not belong to ${cmd.operatorId}`);
|
||||
}
|
||||
await this.hookRepository.removeHook(cmd.hookId);
|
||||
return hook;
|
||||
}
|
||||
|
||||
async listHooksByOwnerId(ownerId: string): Promise<Hook[]> {
|
||||
return await this.hookRepository.listHooksByOwnerId(ownerId);
|
||||
}
|
||||
|
||||
async getHookByOwnerId(hookId: string, userId: string): Promise<Hook> {
|
||||
const hook = await this.hookRepository.findHookById(hookId);
|
||||
if (!hook) {
|
||||
throw new NotFoundError(`hook ${hookId} not found`);
|
||||
}
|
||||
if (hook.ownerId !== userId) {
|
||||
throw new ForbiddenError(`hook ${hookId} not belong to ${userId}`);
|
||||
}
|
||||
return hook;
|
||||
}
|
||||
}
|
||||
111
app/core/service/HookTriggerService.ts
Normal file
111
app/core/service/HookTriggerService.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
import { AccessLevel, SingletonProto, Inject } from '@eggjs/tegg';
|
||||
import { TriggerHookTask } from '../entity/Task';
|
||||
import { HookEvent } from '../entity/HookEvent';
|
||||
import { HookRepository } from '../../repository/HookRepository';
|
||||
import { PackageRepository } from '../../repository/PackageRepository';
|
||||
import { DistRepository } from '../../repository/DistRepository';
|
||||
import { UserRepository } from '../../repository/UserRepository';
|
||||
import { Hook } from '../entity/Hook';
|
||||
import { EggContextHttpClient } from 'egg';
|
||||
import { isoNow } from '../../common/LogUtil';
|
||||
import { TaskState } from '../../common/enum/Task';
|
||||
import { TaskService } from './TaskService';
|
||||
import { getScopeAndName } from '../../common/PackageUtil';
|
||||
|
||||
@SingletonProto({
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
})
|
||||
export class HookTriggerService {
|
||||
@Inject()
|
||||
private readonly hookRepository: HookRepository;
|
||||
|
||||
@Inject()
|
||||
private readonly packageRepository: PackageRepository;
|
||||
|
||||
@Inject()
|
||||
private readonly distRepository: DistRepository;
|
||||
|
||||
@Inject()
|
||||
private readonly userRepository: UserRepository;
|
||||
|
||||
@Inject()
|
||||
private readonly httpclient: EggContextHttpClient;
|
||||
|
||||
@Inject()
|
||||
private readonly taskService: TaskService;
|
||||
|
||||
async executeTask(task: TriggerHookTask) {
|
||||
const { hookId, hookEvent } = task.data;
|
||||
const hook = await this.hookRepository.findHookById(hookId);
|
||||
if (!hook) {
|
||||
await this.taskService.finishTask(task, TaskState.Success, `[${isoNow()}][TriggerHooks] hook ${hookId} not exits`);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const payload = await this.createTriggerPayload(task, hookEvent, hook);
|
||||
if (!payload) {
|
||||
await this.taskService.finishTask(task, TaskState.Success, `[${isoNow()}][TriggerHooks] generate payload failed \n`);
|
||||
return;
|
||||
}
|
||||
const status = await this.doExecuteTrigger(hook, payload);
|
||||
hook.latestTaskId = task.taskId;
|
||||
task.data.responseStatus = status;
|
||||
await this.hookRepository.saveHook(hook);
|
||||
await this.taskService.finishTask(task, TaskState.Success, `[${isoNow()}][TriggerHooks] trigger hook succeed ${status} \n`);
|
||||
} catch (e) {
|
||||
e.message = 'trigger hook failed: ' + e.message;
|
||||
task.error = e.message;
|
||||
await this.taskService.finishTask(task, TaskState.Fail, `[${isoNow()}][TriggerHooks] ${e.stack} \n`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
async doExecuteTrigger(hook: Hook, payload: object): Promise<number> {
|
||||
const { digest, payloadStr } = hook.signPayload(payload);
|
||||
const url = new URL(hook.endpoint);
|
||||
const res = await this.httpclient.request(hook.endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'content-type': 'application/json',
|
||||
'x-npm-signature': `sha256=${digest}`,
|
||||
host: url.host,
|
||||
},
|
||||
// webhook 场景下,由于 endpoint 都不同
|
||||
// 因此几乎不存在连接复用的情况,因此这里不使用 keepAlive
|
||||
agent: false,
|
||||
httpsAgent: false,
|
||||
data: payloadStr,
|
||||
} as any);
|
||||
if (res.status >= 200 && res.status < 300) {
|
||||
return res.status;
|
||||
}
|
||||
throw new Error(`hook response with ${res.status}`);
|
||||
}
|
||||
|
||||
async createTriggerPayload(task: TriggerHookTask, hookEvent: HookEvent, hook: Hook): Promise<object | undefined> {
|
||||
const [ scope, name ] = getScopeAndName(hookEvent.fullname);
|
||||
const pkg = await this.packageRepository.findPackage(scope, name);
|
||||
if (!pkg) {
|
||||
await this.taskService.finishTask(task, TaskState.Success, `[${isoNow()}][TriggerHooks] can not found pkg for ${hookEvent.fullname} \n`);
|
||||
return;
|
||||
}
|
||||
const user = await this.userRepository.findUserByUserId(hook.ownerId);
|
||||
if (!user) {
|
||||
await this.taskService.finishTask(task, TaskState.Success, `[${isoNow()}][TriggerHooks] can not found user for ${hook.ownerId} \n`);
|
||||
return;
|
||||
}
|
||||
const manifest = await this.distRepository.readDistBytesToJSON(pkg!.manifestsDist!);
|
||||
return {
|
||||
event: hookEvent.event,
|
||||
name: pkg.fullname,
|
||||
type: 'package',
|
||||
version: '1.0.0',
|
||||
hookOwner: {
|
||||
username: user.name,
|
||||
},
|
||||
payload: manifest,
|
||||
change: hookEvent.change,
|
||||
time: hookEvent.time,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,18 +1,25 @@
|
||||
import { stat } from 'fs/promises';
|
||||
import {
|
||||
AccessLevel,
|
||||
ContextProto,
|
||||
SingletonProto,
|
||||
EventBus,
|
||||
Inject,
|
||||
} from '@eggjs/tegg';
|
||||
import { ForbiddenError } from 'egg-errors';
|
||||
import { RequireAtLeastOne } from 'type-fest';
|
||||
import semver from 'semver';
|
||||
import { calculateIntegrity, detectInstallScript, formatTarball, getFullname, getScopeAndName } from '../../common/PackageUtil';
|
||||
import {
|
||||
calculateIntegrity,
|
||||
detectInstallScript,
|
||||
formatTarball,
|
||||
getFullname,
|
||||
getScopeAndName,
|
||||
hasShrinkWrapInTgz,
|
||||
} from '../../common/PackageUtil';
|
||||
import { AbstractService } from '../../common/AbstractService';
|
||||
import { BugVersionStore } from '../../common/adapter/BugVersionStore';
|
||||
import { BUG_VERSIONS, LATEST_TAG } from '../../common/constants';
|
||||
import { PackageRepository } from '../../repository/PackageRepository';
|
||||
import { AbbreviatedPackageJSONType, AbbreviatedPackageManifestType, PackageJSONType, PackageManifestType, PackageRepository } from '../../repository/PackageRepository';
|
||||
import { PackageVersionBlockRepository } from '../../repository/PackageVersionBlockRepository';
|
||||
import { PackageVersionDownloadRepository } from '../../repository/PackageVersionDownloadRepository';
|
||||
import { DistRepository } from '../../repository/DistRepository';
|
||||
@@ -37,6 +44,8 @@ import {
|
||||
} from '../event';
|
||||
import { BugVersionService } from './BugVersionService';
|
||||
import { BugVersion } from '../entity/BugVersion';
|
||||
import { RegistryManagerService } from './RegistryManagerService';
|
||||
import { Registry } from '../entity/Registry';
|
||||
|
||||
export interface PublishPackageCmd {
|
||||
// maintainer: Maintainer;
|
||||
@@ -45,7 +54,8 @@ export interface PublishPackageCmd {
|
||||
name: string;
|
||||
version: string;
|
||||
description: string;
|
||||
packageJson: any;
|
||||
packageJson: PackageJSONType;
|
||||
registryId?: string;
|
||||
readme: string;
|
||||
// require content or localFile field
|
||||
dist: RequireAtLeastOne<{
|
||||
@@ -64,8 +74,9 @@ export interface PublishPackageCmd {
|
||||
|
||||
const TOTAL = '@@TOTAL@@';
|
||||
const SCOPE_TOTAL_PREFIX = '@@SCOPE@@:';
|
||||
const DESCRIPTION_LIMIT = 1024 * 10;
|
||||
|
||||
@ContextProto({
|
||||
@SingletonProto({
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
})
|
||||
export class PackageManagerService extends AbstractService {
|
||||
@@ -83,6 +94,8 @@ export class PackageManagerService extends AbstractService {
|
||||
private readonly bugVersionStore: BugVersionStore;
|
||||
@Inject()
|
||||
private readonly distRepository: DistRepository;
|
||||
@Inject()
|
||||
private readonly registryManagerService: RegistryManagerService;
|
||||
|
||||
private static downloadCounters = {};
|
||||
|
||||
@@ -95,6 +108,7 @@ export class PackageManagerService extends AbstractService {
|
||||
name: cmd.name,
|
||||
isPrivate: cmd.isPrivate,
|
||||
description: cmd.description,
|
||||
registryId: cmd.registryId,
|
||||
});
|
||||
} else {
|
||||
// update description
|
||||
@@ -102,6 +116,17 @@ export class PackageManagerService extends AbstractService {
|
||||
if (pkg.description !== cmd.description) {
|
||||
pkg.description = cmd.description;
|
||||
}
|
||||
|
||||
/* c8 ignore next 3 */
|
||||
// package can be migrated into another registry
|
||||
if (cmd.registryId) {
|
||||
pkg.registryId = cmd.registryId;
|
||||
}
|
||||
}
|
||||
|
||||
// 防止 description 长度超过 db 限制
|
||||
if (pkg.description?.length > DESCRIPTION_LIMIT) {
|
||||
pkg.description = pkg.description.substring(0, DESCRIPTION_LIMIT);
|
||||
}
|
||||
await this.packageRepository.savePackage(pkg);
|
||||
// create maintainer
|
||||
@@ -117,9 +142,30 @@ export class PackageManagerService extends AbstractService {
|
||||
delete cmd.packageJson.readme;
|
||||
}
|
||||
|
||||
const publishTime = cmd.publishTime || new Date();
|
||||
|
||||
// add _cnpmcore_publish_time field to cmd.packageJson
|
||||
if (!cmd.packageJson._cnpmcore_publish_time) {
|
||||
cmd.packageJson._cnpmcore_publish_time = new Date();
|
||||
cmd.packageJson._cnpmcore_publish_time = publishTime;
|
||||
}
|
||||
if (!cmd.packageJson.publish_time) {
|
||||
cmd.packageJson.publish_time = publishTime.getTime();
|
||||
}
|
||||
if (cmd.packageJson._hasShrinkwrap === undefined) {
|
||||
cmd.packageJson._hasShrinkwrap = await hasShrinkWrapInTgz(cmd.dist.content || cmd.dist.localFile!);
|
||||
}
|
||||
|
||||
// add _registry_name field to cmd.packageJson
|
||||
if (!cmd.packageJson._source_registry_name) {
|
||||
let registry: Registry | null;
|
||||
if (cmd.registryId) {
|
||||
registry = await this.registryManagerService.findByRegistryId(cmd.registryId);
|
||||
} else {
|
||||
registry = await this.registryManagerService.ensureDefaultRegistry();
|
||||
}
|
||||
if (registry) {
|
||||
cmd.packageJson._source_registry_name = registry.name;
|
||||
}
|
||||
}
|
||||
|
||||
// https://github.com/npm/registry/blob/master/docs/responses/package-metadata.md#abbreviated-version-object
|
||||
@@ -175,7 +221,11 @@ export class PackageManagerService extends AbstractService {
|
||||
engines: cmd.packageJson.engines,
|
||||
_hasShrinkwrap: cmd.packageJson._hasShrinkwrap,
|
||||
hasInstallScript,
|
||||
});
|
||||
// https://github.com/cnpm/npminstall/blob/13efc7eec21a61e509226e3772bfb75cd5605612/lib/install_package.js#L176
|
||||
// npminstall require publish time to show the recently update versions
|
||||
publish_time: cmd.packageJson.publish_time,
|
||||
_source_registry_name: cmd.packageJson._source_registry_name,
|
||||
} as AbbreviatedPackageJSONType);
|
||||
const abbreviatedDistBytes = Buffer.from(abbreviated);
|
||||
const abbreviatedDistIntegrity = await calculateIntegrity(abbreviatedDistBytes);
|
||||
const readmeDistBytes = Buffer.from(cmd.readme);
|
||||
@@ -186,7 +236,7 @@ export class PackageManagerService extends AbstractService {
|
||||
pkgVersion = PackageVersion.create({
|
||||
packageId: pkg.packageId,
|
||||
version: cmd.version,
|
||||
publishTime: cmd.publishTime || new Date(),
|
||||
publishTime,
|
||||
manifestDist: pkg.createManifest(cmd.version, {
|
||||
size: manifestDistBytes.length,
|
||||
shasum: manifestDistIntegrity.shasum,
|
||||
@@ -209,14 +259,21 @@ export class PackageManagerService extends AbstractService {
|
||||
this.distRepository.saveDist(pkgVersion.manifestDist, manifestDistBytes),
|
||||
this.distRepository.saveDist(pkgVersion.readmeDist, readmeDistBytes),
|
||||
]);
|
||||
await this.packageRepository.createPackageVersion(pkgVersion);
|
||||
try {
|
||||
await this.packageRepository.createPackageVersion(pkgVersion);
|
||||
} catch (e) {
|
||||
if (e.code === 'ER_DUP_ENTRY') {
|
||||
throw new ForbiddenError(`Can't modify pre-existing version: ${pkg.fullname}@${cmd.version}`);
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
if (cmd.skipRefreshPackageManifests !== true) {
|
||||
await this.refreshPackageChangeVersionsToDists(pkg, [ pkgVersion.version ]);
|
||||
}
|
||||
if (cmd.tag) {
|
||||
await this.savePackageTag(pkg, cmd.tag, cmd.version, true);
|
||||
}
|
||||
this.eventBus.emit(PACKAGE_VERSION_ADDED, pkg.fullname, pkgVersion.version);
|
||||
this.eventBus.emit(PACKAGE_VERSION_ADDED, pkg.fullname, pkgVersion.version, cmd.tag);
|
||||
return pkgVersion;
|
||||
}
|
||||
|
||||
@@ -233,15 +290,15 @@ export class PackageManagerService extends AbstractService {
|
||||
}
|
||||
await this.packageVersionBlockRepository.savePackageVersionBlock(block);
|
||||
if (pkg.manifestsDist && pkg.abbreviatedsDist) {
|
||||
const fullManifests = await this.distRepository.readDistBytesToJSON(pkg.manifestsDist);
|
||||
const fullManifests = await this.distRepository.readDistBytesToJSON<PackageManifestType>(pkg.manifestsDist);
|
||||
if (fullManifests) {
|
||||
fullManifests.block = reason;
|
||||
}
|
||||
const abbreviatedManifests = await this.distRepository.readDistBytesToJSON(pkg.abbreviatedsDist);
|
||||
const abbreviatedManifests = await this.distRepository.readDistBytesToJSON<AbbreviatedPackageManifestType>(pkg.abbreviatedsDist);
|
||||
if (abbreviatedManifests) {
|
||||
abbreviatedManifests.block = reason;
|
||||
}
|
||||
await this._updatePackageManifestsToDists(pkg, fullManifests, abbreviatedManifests);
|
||||
await this._updatePackageManifestsToDists(pkg, fullManifests || null, abbreviatedManifests || null);
|
||||
this.eventBus.emit(PACKAGE_BLOCKED, pkg.fullname);
|
||||
this.logger.info('[packageManagerService.blockPackage:success] packageId: %s, reason: %j',
|
||||
pkg.packageId, reason);
|
||||
@@ -255,15 +312,15 @@ export class PackageManagerService extends AbstractService {
|
||||
await this.packageVersionBlockRepository.removePackageVersionBlock(block.packageVersionBlockId);
|
||||
}
|
||||
if (pkg.manifestsDist && pkg.abbreviatedsDist) {
|
||||
const fullManifests = await this.distRepository.readDistBytesToJSON(pkg.manifestsDist);
|
||||
const fullManifests = await this.distRepository.readDistBytesToJSON<PackageManifestType>(pkg.manifestsDist);
|
||||
if (fullManifests) {
|
||||
fullManifests.block = undefined;
|
||||
}
|
||||
const abbreviatedManifests = await this.distRepository.readDistBytesToJSON(pkg.abbreviatedsDist);
|
||||
const abbreviatedManifests = await this.distRepository.readDistBytesToJSON<AbbreviatedPackageManifestType>(pkg.abbreviatedsDist);
|
||||
if (abbreviatedManifests) {
|
||||
abbreviatedManifests.block = undefined;
|
||||
}
|
||||
await this._updatePackageManifestsToDists(pkg, fullManifests, abbreviatedManifests);
|
||||
await this._updatePackageManifestsToDists(pkg, fullManifests || null, abbreviatedManifests || null);
|
||||
this.eventBus.emit(PACKAGE_UNBLOCKED, pkg.fullname);
|
||||
this.logger.info('[packageManagerService.unblockPackage:success] packageId: %s',
|
||||
pkg.packageId);
|
||||
@@ -273,7 +330,7 @@ export class PackageManagerService extends AbstractService {
|
||||
async replacePackageMaintainers(pkg: Package, maintainers: User[]) {
|
||||
await this.packageRepository.replacePackageMaintainers(pkg.packageId, maintainers.map(m => m.userId));
|
||||
await this._refreshPackageManifestRootAttributeOnlyToDists(pkg, 'maintainers');
|
||||
this.eventBus.emit(PACKAGE_MAINTAINER_CHANGED, pkg.fullname);
|
||||
this.eventBus.emit(PACKAGE_MAINTAINER_CHANGED, pkg.fullname, maintainers);
|
||||
}
|
||||
|
||||
async savePackageMaintainers(pkg: Package, maintainers: User[]) {
|
||||
@@ -286,7 +343,7 @@ export class PackageManagerService extends AbstractService {
|
||||
}
|
||||
if (hasNewRecord) {
|
||||
await this._refreshPackageManifestRootAttributeOnlyToDists(pkg, 'maintainers');
|
||||
this.eventBus.emit(PACKAGE_MAINTAINER_CHANGED, pkg.fullname);
|
||||
this.eventBus.emit(PACKAGE_MAINTAINER_CHANGED, pkg.fullname, maintainers);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -305,28 +362,23 @@ export class PackageManagerService extends AbstractService {
|
||||
}
|
||||
|
||||
async listPackageFullManifests(scope: string, name: string, isSync = false) {
|
||||
return await this._listPackageFullOrAbbreviatedManifests(scope, name, true, isSync);
|
||||
return await this._listPackageFullOrAbbreviatedManifests<PackageManifestType>(scope, name, true, isSync);
|
||||
}
|
||||
|
||||
async listPackageAbbreviatedManifests(scope: string, name: string, isSync = false) {
|
||||
return await this._listPackageFullOrAbbreviatedManifests(scope, name, false, isSync);
|
||||
}
|
||||
|
||||
async showPackageVersionManifest(scope: string, name: string, versionOrTag: string, isSync = false) {
|
||||
let blockReason = '';
|
||||
let manifest;
|
||||
async showPackageVersionByVersionOrTag(scope: string, name: string, versionOrTag: string): Promise<{
|
||||
blockReason?: string,
|
||||
pkg?: Package,
|
||||
packageVersion?: PackageVersion | null,
|
||||
}> {
|
||||
const pkg = await this.packageRepository.findPackage(scope, name);
|
||||
const pkgId = pkg?.packageId;
|
||||
if (!pkg) return { manifest: null, blockReason, pkgId };
|
||||
|
||||
if (!pkg) return {};
|
||||
const block = await this.packageVersionBlockRepository.findPackageBlock(pkg.packageId);
|
||||
if (block) {
|
||||
blockReason = block.reason;
|
||||
return {
|
||||
blockReason,
|
||||
manifest,
|
||||
pkgId,
|
||||
};
|
||||
return { blockReason: block.reason, pkg };
|
||||
}
|
||||
let version = versionOrTag;
|
||||
if (!semver.valid(versionOrTag)) {
|
||||
@@ -337,8 +389,21 @@ export class PackageManagerService extends AbstractService {
|
||||
}
|
||||
}
|
||||
const packageVersion = await this.packageRepository.findPackageVersion(pkg.packageId, version);
|
||||
if (!packageVersion) return { manifest: null, blockReason, pkgId };
|
||||
manifest = await this.distRepository.findPackageVersionManifest(packageVersion.packageId, version);
|
||||
return { packageVersion, pkg };
|
||||
}
|
||||
|
||||
async showPackageVersionManifest(scope: string, name: string, versionOrTag: string, isSync = false) {
|
||||
let manifest;
|
||||
const { blockReason, packageVersion, pkg } = await this.showPackageVersionByVersionOrTag(scope, name, versionOrTag);
|
||||
if (blockReason) {
|
||||
return {
|
||||
blockReason,
|
||||
manifest,
|
||||
pkg,
|
||||
};
|
||||
}
|
||||
if (!packageVersion) return { manifest: null, blockReason, pkg };
|
||||
manifest = await this.distRepository.findPackageVersionManifest(packageVersion.packageId, packageVersion.version);
|
||||
let bugVersion: BugVersion | undefined;
|
||||
// sync mode response no bug version fixed
|
||||
if (!isSync) {
|
||||
@@ -348,8 +413,7 @@ export class PackageManagerService extends AbstractService {
|
||||
const fullname = getFullname(scope, name);
|
||||
manifest = await this.bugVersionService.fixPackageBugVersion(bugVersion, fullname, manifest);
|
||||
}
|
||||
return { manifest, blockReason, pkgId };
|
||||
|
||||
return { manifest, blockReason, pkg };
|
||||
}
|
||||
|
||||
async downloadPackageVersionTar(packageVersion: PackageVersion) {
|
||||
@@ -408,9 +472,9 @@ export class PackageManagerService extends AbstractService {
|
||||
this.logger.info('[packageManagerService.savePackageVersionCounters:saved] %d total', total);
|
||||
}
|
||||
|
||||
public async saveDeprecatedVersions(pkg: Package, deprecateds: { version: string; deprecated: string }[]) {
|
||||
public async saveDeprecatedVersions(pkg: Package, deprecatedList: { version: string; deprecated: string }[]) {
|
||||
const updateVersions: string[] = [];
|
||||
for (const { version, deprecated } of deprecateds) {
|
||||
for (const { version, deprecated } of deprecatedList) {
|
||||
const pkgVersion = await this.packageRepository.findPackageVersion(pkg.packageId, version);
|
||||
if (!pkgVersion) continue;
|
||||
const message = deprecated === '' ? undefined : deprecated;
|
||||
@@ -420,7 +484,7 @@ export class PackageManagerService extends AbstractService {
|
||||
updateVersions.push(version);
|
||||
}
|
||||
await this.refreshPackageChangeVersionsToDists(pkg, updateVersions);
|
||||
this.eventBus.emit(PACKAGE_META_CHANGED, pkg.fullname, { deprecateds });
|
||||
this.eventBus.emit(PACKAGE_META_CHANGED, pkg.fullname, { deprecateds: deprecatedList });
|
||||
}
|
||||
|
||||
public async savePackageVersionManifest(pkgVersion: PackageVersion, mergeManifest: object, mergeAbbreviated: object) {
|
||||
@@ -442,6 +506,11 @@ export class PackageManagerService extends AbstractService {
|
||||
|
||||
public async unpublishPackage(pkg: Package) {
|
||||
const pkgVersions = await this.packageRepository.listPackageVersions(pkg.packageId);
|
||||
// already unpublished
|
||||
if (pkgVersions.length === 0) {
|
||||
this.logger.info(`[packageManagerService.unpublishPackage:skip] ${pkg.packageId} already unpublished`);
|
||||
return;
|
||||
}
|
||||
for (const pkgVersion of pkgVersions) {
|
||||
await this._removePackageVersionAndDist(pkgVersion);
|
||||
}
|
||||
@@ -464,10 +533,17 @@ export class PackageManagerService extends AbstractService {
|
||||
}
|
||||
|
||||
public async removePackageVersion(pkg: Package, pkgVersion: PackageVersion, skipRefreshPackageManifests = false) {
|
||||
const currentVersions = await this.packageRepository.listPackageVersionNames(pkg.packageId);
|
||||
// only one version, unpublish the package
|
||||
if (currentVersions.length === 1 && currentVersions[0] === pkgVersion.version) {
|
||||
await this.unpublishPackage(pkg);
|
||||
return;
|
||||
}
|
||||
// remove version & update tags
|
||||
await this._removePackageVersionAndDist(pkgVersion);
|
||||
// all versions removed
|
||||
const versions = await this.packageRepository.listPackageVersionNames(pkg.packageId);
|
||||
if (versions.length > 0) {
|
||||
let updateTag: string | undefined;
|
||||
// make sure latest tag exists
|
||||
const latestTag = await this.packageRepository.findPackageTag(pkg.packageId, 'latest');
|
||||
if (latestTag?.version === pkgVersion.version) {
|
||||
@@ -475,17 +551,16 @@ export class PackageManagerService extends AbstractService {
|
||||
// https://github.com/npm/libnpmpublish/blob/main/unpublish.js#L62
|
||||
const latestVersion = versions.sort(semver.compareLoose).pop();
|
||||
if (latestVersion) {
|
||||
updateTag = latestTag.tag;
|
||||
await this.savePackageTag(pkg, latestTag.tag, latestVersion, true);
|
||||
}
|
||||
}
|
||||
if (skipRefreshPackageManifests !== true) {
|
||||
await this.refreshPackageChangeVersionsToDists(pkg, undefined, [ pkgVersion.version ]);
|
||||
this.eventBus.emit(PACKAGE_VERSION_REMOVED, pkg.fullname, pkgVersion.version);
|
||||
this.eventBus.emit(PACKAGE_VERSION_REMOVED, pkg.fullname, pkgVersion.version, updateTag);
|
||||
}
|
||||
return;
|
||||
}
|
||||
// unpublish
|
||||
await this.unpublishPackage(pkg);
|
||||
}
|
||||
|
||||
public async savePackageTag(pkg: Package, tag: string, version: string, skipEvent = false) {
|
||||
@@ -529,9 +604,9 @@ export class PackageManagerService extends AbstractService {
|
||||
if (!pkg.manifestsDist?.distId || !pkg.abbreviatedsDist?.distId) {
|
||||
return await this._refreshPackageManifestsToDists(pkg);
|
||||
}
|
||||
const fullManifests = await this.distRepository.readDistBytesToJSON(pkg.manifestsDist);
|
||||
const abbreviatedManifests = await this.distRepository.readDistBytesToJSON(pkg.abbreviatedsDist);
|
||||
if (!fullManifests.versions || !abbreviatedManifests.versions) {
|
||||
const fullManifests = await this.distRepository.readDistBytesToJSON<PackageManifestType>(pkg.manifestsDist);
|
||||
const abbreviatedManifests = await this.distRepository.readDistBytesToJSON<AbbreviatedPackageManifestType>(pkg.abbreviatedsDist);
|
||||
if (!fullManifests?.versions || !abbreviatedManifests?.versions) {
|
||||
// is unpublished, refresh all again
|
||||
return await this._refreshPackageManifestsToDists(pkg);
|
||||
}
|
||||
@@ -540,7 +615,7 @@ export class PackageManagerService extends AbstractService {
|
||||
for (const version of updateVersions) {
|
||||
const packageVersion = await this.packageRepository.findPackageVersion(pkg.packageId, version);
|
||||
if (packageVersion) {
|
||||
const manifest = await this.distRepository.readDistBytesToJSON(packageVersion.manifestDist);
|
||||
const manifest = await this.distRepository.readDistBytesToJSON<PackageJSONType>(packageVersion.manifestDist);
|
||||
if (!manifest) continue;
|
||||
if ('readme' in manifest) {
|
||||
delete manifest.readme;
|
||||
@@ -548,8 +623,10 @@ export class PackageManagerService extends AbstractService {
|
||||
fullManifests.versions[packageVersion.version] = manifest;
|
||||
fullManifests.time[packageVersion.version] = packageVersion.publishTime;
|
||||
|
||||
const abbreviatedManifest = await this.distRepository.readDistBytesToJSON(packageVersion.abbreviatedDist);
|
||||
abbreviatedManifests.versions[packageVersion.version] = abbreviatedManifest;
|
||||
const abbreviatedManifest = await this.distRepository.readDistBytesToJSON<AbbreviatedPackageJSONType>(packageVersion.abbreviatedDist);
|
||||
if (abbreviatedManifest) {
|
||||
abbreviatedManifests.versions[packageVersion.version] = abbreviatedManifest;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -571,13 +648,14 @@ export class PackageManagerService extends AbstractService {
|
||||
// TODO performance problem, cache bugVersion and update with schedule
|
||||
const pkg = await this.packageRepository.findPackage('', BUG_VERSIONS);
|
||||
if (!pkg) return;
|
||||
/* c8 ignore next 10 */
|
||||
const tag = await this.packageRepository.findPackageTag(pkg!.packageId, LATEST_TAG);
|
||||
if (!tag) return;
|
||||
let bugVersion = this.bugVersionStore.getBugVersion(tag!.version);
|
||||
if (!bugVersion) {
|
||||
const packageVersionJson = await this.distRepository.findPackageVersionManifest(pkg!.packageId, tag!.version);
|
||||
const packageVersionJson = (await this.distRepository.findPackageVersionManifest(pkg!.packageId, tag!.version)) as PackageJSONType;
|
||||
if (!packageVersionJson) return;
|
||||
const data = packageVersionJson.config['bug-versions'];
|
||||
const data = packageVersionJson.config?.['bug-versions'];
|
||||
bugVersion = new BugVersion(data);
|
||||
this.bugVersionStore.setBugVersion(bugVersion, tag!.version);
|
||||
}
|
||||
@@ -608,19 +686,25 @@ export class PackageManagerService extends AbstractService {
|
||||
// only refresh root attributes only, e.g.: dist-tags, maintainers
|
||||
private async _refreshPackageManifestRootAttributeOnlyToDists(pkg: Package, refreshAttr: 'dist-tags' | 'maintainers') {
|
||||
if (refreshAttr === 'maintainers') {
|
||||
const fullManifests = await this.distRepository.readDistBytesToJSON(pkg.manifestsDist!);
|
||||
const fullManifests = await this.distRepository.readDistBytesToJSON<PackageManifestType>(pkg.manifestsDist!);
|
||||
const maintainers = await this._listPackageMaintainers(pkg);
|
||||
fullManifests.maintainers = maintainers;
|
||||
await this._updatePackageManifestsToDists(pkg, fullManifests, null);
|
||||
if (fullManifests) {
|
||||
fullManifests.maintainers = maintainers;
|
||||
await this._updatePackageManifestsToDists(pkg, fullManifests, null);
|
||||
}
|
||||
} else if (refreshAttr === 'dist-tags') {
|
||||
const fullManifests = await this.distRepository.readDistBytesToJSON(pkg.manifestsDist!);
|
||||
const abbreviatedManifests = await this.distRepository.readDistBytesToJSON(pkg.abbreviatedsDist!);
|
||||
await this._setPackageDistTagsAndLatestInfos(pkg, fullManifests, abbreviatedManifests);
|
||||
await this._updatePackageManifestsToDists(pkg, fullManifests, abbreviatedManifests);
|
||||
const fullManifests = await this.distRepository.readDistBytesToJSON<PackageManifestType>(pkg.manifestsDist!);
|
||||
if (fullManifests) {
|
||||
const abbreviatedManifests = await this.distRepository.readDistBytesToJSON<AbbreviatedPackageManifestType>(pkg.abbreviatedsDist!);
|
||||
if (abbreviatedManifests) {
|
||||
await this._setPackageDistTagsAndLatestInfos(pkg, fullManifests, abbreviatedManifests);
|
||||
}
|
||||
await this._updatePackageManifestsToDists(pkg, fullManifests, abbreviatedManifests || null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private _mergeLatestManifestFields(fullManifests: object, latestManifest: object) {
|
||||
private _mergeLatestManifestFields(fullManifests: PackageManifestType, latestManifest: PackageJSONType | null) {
|
||||
if (!latestManifest) return;
|
||||
// https://github.com/npm/registry/blob/master/docs/responses/package-metadata.md#full-metadata-format
|
||||
const fieldsFromLatestManifest = [
|
||||
@@ -633,14 +717,14 @@ export class PackageManagerService extends AbstractService {
|
||||
}
|
||||
}
|
||||
|
||||
private async _setPackageDistTagsAndLatestInfos(pkg: Package, fullManifests: any, abbreviatedManifests: any) {
|
||||
private async _setPackageDistTagsAndLatestInfos(pkg: Package, fullManifests: PackageManifestType, abbreviatedManifests: AbbreviatedPackageManifestType) {
|
||||
const distTags = await this._listPackageDistTags(pkg);
|
||||
if (distTags.latest) {
|
||||
const packageVersion = await this.packageRepository.findPackageVersion(pkg.packageId, distTags.latest);
|
||||
if (packageVersion) {
|
||||
fullManifests.readme = await this.distRepository.readDistBytesToString(packageVersion.readmeDist);
|
||||
const latestManifest = await this.distRepository.readDistBytesToJSON(packageVersion.manifestDist);
|
||||
this._mergeLatestManifestFields(fullManifests, latestManifest);
|
||||
const latestManifest = await this.distRepository.readDistBytesToJSON<PackageJSONType>(packageVersion.manifestDist);
|
||||
this._mergeLatestManifestFields(fullManifests, latestManifest || null);
|
||||
}
|
||||
}
|
||||
fullManifests['dist-tags'] = distTags;
|
||||
@@ -648,8 +732,8 @@ export class PackageManagerService extends AbstractService {
|
||||
}
|
||||
|
||||
private async _mergeManifestDist(manifestDist: Dist, mergeData?: any, replaceData?: any) {
|
||||
let manifest = await this.distRepository.readDistBytesToJSON(manifestDist);
|
||||
if (mergeData) {
|
||||
let manifest = await this.distRepository.readDistBytesToJSON<PackageManifestType>(manifestDist);
|
||||
if (mergeData && manifest) {
|
||||
Object.assign(manifest, mergeData);
|
||||
}
|
||||
if (replaceData) {
|
||||
@@ -663,7 +747,7 @@ export class PackageManagerService extends AbstractService {
|
||||
await this.distRepository.saveDist(manifestDist, manifestBytes);
|
||||
}
|
||||
|
||||
private async _updatePackageManifestsToDists(pkg: Package, fullManifests: any | null, abbreviatedManifests: any | null): Promise<void> {
|
||||
private async _updatePackageManifestsToDists(pkg: Package, fullManifests: PackageManifestType | null, abbreviatedManifests: AbbreviatedPackageManifestType | null): Promise<void> {
|
||||
const modified = new Date();
|
||||
if (fullManifests) {
|
||||
fullManifests.time.modified = modified;
|
||||
@@ -704,7 +788,7 @@ export class PackageManagerService extends AbstractService {
|
||||
}
|
||||
}
|
||||
|
||||
private async _listPackageFullOrAbbreviatedManifests(scope: string, name: string, isFullManifests: boolean, isSync: boolean) {
|
||||
private async _listPackageFullOrAbbreviatedManifests<T extends PackageManifestType | AbbreviatedPackageManifestType>(scope: string, name: string, isFullManifests: boolean, isSync: boolean) {
|
||||
let etag = '';
|
||||
let blockReason = '';
|
||||
const pkg = await this.packageRepository.findPackage(scope, name);
|
||||
@@ -726,13 +810,13 @@ export class PackageManagerService extends AbstractService {
|
||||
// read from dist
|
||||
if (dist?.distId) {
|
||||
etag = `"${dist.shasum}"`;
|
||||
const data = await this.distRepository.readDistBytesToJSON(dist);
|
||||
const data = (await this.distRepository.readDistBytesToJSON(dist)) as T;
|
||||
if (bugVersion) {
|
||||
await this.bugVersionService.fixPackageBugVersions(bugVersion, fullname, data.versions);
|
||||
const distBytes = Buffer.from(JSON.stringify(data));
|
||||
const distIntegrity = await calculateIntegrity(distBytes);
|
||||
etag = `"${distIntegrity.shasum}"`;
|
||||
}
|
||||
const distBytes = Buffer.from(JSON.stringify(data));
|
||||
const distIntegrity = await calculateIntegrity(distBytes);
|
||||
etag = `"${distIntegrity.shasum}"`;
|
||||
return { etag, data, blockReason };
|
||||
}
|
||||
|
||||
@@ -744,7 +828,8 @@ export class PackageManagerService extends AbstractService {
|
||||
return { etag, data: null, blockReason };
|
||||
}
|
||||
await this._updatePackageManifestsToDists(pkg, fullManifests, abbreviatedManifests);
|
||||
const manifests = (fullManifests || abbreviatedManifests)!;
|
||||
const manifests = (fullManifests || abbreviatedManifests)! as T;
|
||||
/* c8 ignore next 5 */
|
||||
if (bugVersion) {
|
||||
await this.bugVersionService.fixPackageBugVersions(bugVersion, fullname, (manifests as any).versions);
|
||||
const distBytes = Buffer.from(JSON.stringify(manifests));
|
||||
@@ -758,16 +843,11 @@ export class PackageManagerService extends AbstractService {
|
||||
}
|
||||
|
||||
private async _listPackageMaintainers(pkg: Package) {
|
||||
const maintainers: { name: string; email: string; }[] = [];
|
||||
const users = await this.packageRepository.listPackageMaintainers(pkg.packageId);
|
||||
for (const user of users) {
|
||||
const name = user.name.startsWith('npm:') ? user.name.replace('npm:', '') : user.name;
|
||||
maintainers.push({ name, email: user.email });
|
||||
}
|
||||
return maintainers;
|
||||
return users.map(({ displayName, email }) => ({ name: displayName, email }));
|
||||
}
|
||||
|
||||
private async _listPackageFullManifests(pkg: Package): Promise<object | null> {
|
||||
private async _listPackageFullManifests(pkg: Package): Promise<PackageManifestType | null> {
|
||||
// read all verions from db
|
||||
const packageVersions = await this.packageRepository.listPackageVersions(pkg.packageId);
|
||||
if (packageVersions.length === 0) return null;
|
||||
@@ -775,7 +855,7 @@ export class PackageManagerService extends AbstractService {
|
||||
const distTags = await this._listPackageDistTags(pkg);
|
||||
const maintainers = await this._listPackageMaintainers(pkg);
|
||||
// https://github.com/npm/registry/blob/master/docs/responses/package-metadata.md#full-metadata-format
|
||||
const data = {
|
||||
const data:PackageManifestType = {
|
||||
_id: `${pkg.fullname}`,
|
||||
_rev: `${pkg.id}-${pkg.packageId}`,
|
||||
'dist-tags': distTags,
|
||||
@@ -810,21 +890,22 @@ export class PackageManagerService extends AbstractService {
|
||||
// users: an object whose keys are the npm user names of people who have starred this package
|
||||
};
|
||||
|
||||
let lastestTagVersion = '';
|
||||
let latestTagVersion = '';
|
||||
if (distTags.latest) {
|
||||
lastestTagVersion = distTags.latest;
|
||||
latestTagVersion = distTags.latest;
|
||||
}
|
||||
|
||||
let latestManifest: any;
|
||||
let latestPackageVersion = packageVersions[0];
|
||||
// https://github.com/npm/registry/blob/master/docs/responses/package-metadata.md#package-metadata
|
||||
for (const packageVersion of packageVersions) {
|
||||
const manifest = await this.distRepository.readDistBytesToJSON(packageVersion.manifestDist);
|
||||
const manifest = await this.distRepository.readDistBytesToJSON<PackageJSONType>(packageVersion.manifestDist);
|
||||
if (!manifest) continue;
|
||||
/* c8 ignore next 3 */
|
||||
if ('readme' in manifest) {
|
||||
delete manifest.readme;
|
||||
}
|
||||
if (lastestTagVersion && packageVersion.version === lastestTagVersion) {
|
||||
if (latestTagVersion && packageVersion.version === latestTagVersion) {
|
||||
latestManifest = manifest;
|
||||
latestPackageVersion = packageVersion;
|
||||
}
|
||||
@@ -840,7 +921,7 @@ export class PackageManagerService extends AbstractService {
|
||||
return data;
|
||||
}
|
||||
|
||||
private async _listPackageAbbreviatedManifests(pkg: Package): Promise<object | null> {
|
||||
private async _listPackageAbbreviatedManifests(pkg: Package): Promise<AbbreviatedPackageManifestType | null> {
|
||||
// read all verions from db
|
||||
const packageVersions = await this.packageRepository.listPackageVersions(pkg.packageId);
|
||||
if (packageVersions.length === 0) return null;
|
||||
@@ -848,7 +929,7 @@ export class PackageManagerService extends AbstractService {
|
||||
const distTags = await this._listPackageDistTags(pkg);
|
||||
// https://github.com/npm/registry/blob/master/docs/responses/package-metadata.md#package-metadata
|
||||
// tiny-tarball is a small package with only one version and no dependencies.
|
||||
const data = {
|
||||
const data: AbbreviatedPackageManifestType = {
|
||||
'dist-tags': distTags,
|
||||
modified: pkg.updatedAt,
|
||||
name: pkg.fullname,
|
||||
@@ -856,8 +937,10 @@ export class PackageManagerService extends AbstractService {
|
||||
};
|
||||
|
||||
for (const packageVersion of packageVersions) {
|
||||
const manifest = await this.distRepository.readDistBytesToJSON(packageVersion.abbreviatedDist);
|
||||
data.versions[packageVersion.version] = manifest;
|
||||
const manifest = await this.distRepository.readDistBytesToJSON<AbbreviatedPackageJSONType>(packageVersion.abbreviatedDist);
|
||||
if (manifest) {
|
||||
data.versions[packageVersion.version] = manifest;
|
||||
}
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
@@ -1,15 +1,17 @@
|
||||
import os from 'os';
|
||||
import {
|
||||
AccessLevel,
|
||||
ContextProto,
|
||||
SingletonProto,
|
||||
Inject,
|
||||
} from '@eggjs/tegg';
|
||||
import { Pointcut } from '@eggjs/tegg/aop';
|
||||
import {
|
||||
EggContextHttpClient,
|
||||
} from 'egg';
|
||||
import { setTimeout } from 'timers/promises';
|
||||
import { rm } from 'fs/promises';
|
||||
import { NPMRegistry } from '../../common/adapter/NPMRegistry';
|
||||
import semver from 'semver';
|
||||
import { NPMRegistry, RegistryResponse } from '../../common/adapter/NPMRegistry';
|
||||
import { detectInstallScript, getScopeAndName } from '../../common/PackageUtil';
|
||||
import { downloadToTempfile } from '../../common/FileUtil';
|
||||
import { TaskState, TaskType } from '../../common/enum/Task';
|
||||
@@ -18,20 +20,37 @@ import { TaskRepository } from '../../repository/TaskRepository';
|
||||
import { PackageRepository } from '../../repository/PackageRepository';
|
||||
import { PackageVersionDownloadRepository } from '../../repository/PackageVersionDownloadRepository';
|
||||
import { UserRepository } from '../../repository/UserRepository';
|
||||
import { DistRepository } from '../../repository/DistRepository';
|
||||
import { Task, SyncPackageTaskOptions } from '../entity/Task';
|
||||
import { Task, SyncPackageTaskOptions, CreateSyncPackageTask } from '../entity/Task';
|
||||
import { Package } from '../entity/Package';
|
||||
import { UserService } from './UserService';
|
||||
import { TaskService } from './TaskService';
|
||||
import { PackageManagerService } from './PackageManagerService';
|
||||
import { CacheService } from './CacheService';
|
||||
import { User } from '../entity/User';
|
||||
import { RegistryManagerService } from './RegistryManagerService';
|
||||
import { Registry } from '../entity/Registry';
|
||||
import { BadRequestError } from 'egg-errors';
|
||||
import { ScopeManagerService } from './ScopeManagerService';
|
||||
import { EventCorkAdvice } from './EventCorkerAdvice';
|
||||
import { SyncDeleteMode } from '../../common/constants';
|
||||
|
||||
type syncDeletePkgOptions = {
|
||||
task: Task,
|
||||
pkg: Package | null,
|
||||
logUrl: string,
|
||||
url: string,
|
||||
logs: string[],
|
||||
data: any,
|
||||
};
|
||||
|
||||
function isoNow() {
|
||||
return new Date().toISOString();
|
||||
}
|
||||
|
||||
@ContextProto({
|
||||
export class RegistryNotMatchError extends BadRequestError {
|
||||
}
|
||||
|
||||
@SingletonProto({
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
})
|
||||
export class PackageSyncerService extends AbstractService {
|
||||
@@ -56,9 +75,19 @@ export class PackageSyncerService extends AbstractService {
|
||||
@Inject()
|
||||
private readonly httpclient: EggContextHttpClient;
|
||||
@Inject()
|
||||
private readonly distRepository: DistRepository;
|
||||
private readonly registryManagerService: RegistryManagerService;
|
||||
@Inject()
|
||||
private readonly scopeManagerService: ScopeManagerService;
|
||||
|
||||
public async createTask(fullname: string, options?: SyncPackageTaskOptions) {
|
||||
const [ scope, name ] = getScopeAndName(fullname);
|
||||
const pkg = await this.packageRepository.findPackage(scope, name);
|
||||
// sync task request registry is not same as package registry
|
||||
if (pkg && pkg.registryId && options?.registryId) {
|
||||
if (pkg.registryId !== options.registryId) {
|
||||
throw new RegistryNotMatchError(`package ${fullname} is not in registry ${options.registryId}`);
|
||||
}
|
||||
}
|
||||
return await this.taskService.createTask(Task.createSyncPackage(fullname, options), true);
|
||||
}
|
||||
|
||||
@@ -71,7 +100,7 @@ export class PackageSyncerService extends AbstractService {
|
||||
}
|
||||
|
||||
public async findExecuteTask() {
|
||||
return await this.taskService.findExecuteTask(TaskType.SyncPackage);
|
||||
return await this.taskService.findExecuteTask(TaskType.SyncPackage) as CreateSyncPackageTask;
|
||||
}
|
||||
|
||||
public get allowSyncDownloadData() {
|
||||
@@ -96,7 +125,8 @@ export class PackageSyncerService extends AbstractService {
|
||||
logs.push(`[${isoNow()}][DownloadData] 🚧🚧🚧🚧🚧 Syncing "${fullname}" download data "${start}:${end}" on ${registry} 🚧🚧🚧🚧🚧`);
|
||||
const failEnd = '❌❌❌❌❌ 🚮 give up 🚮 ❌❌❌❌❌';
|
||||
try {
|
||||
const { data, status, res } = await this.npmRegistry.getDownloadRanges(registry, fullname, start, end);
|
||||
const { remoteAuthToken } = task.data as SyncPackageTaskOptions;
|
||||
const { data, status, res } = await this.npmRegistry.getDownloadRanges(registry, fullname, start, end, { remoteAuthToken });
|
||||
downloads = data.downloads || [];
|
||||
logs.push(`[${isoNow()}][DownloadData] 🚧 HTTP [${status}] timing: ${JSON.stringify(res.timing)}, downloads: ${downloads.length}`);
|
||||
} catch (err: any) {
|
||||
@@ -132,12 +162,13 @@ export class PackageSyncerService extends AbstractService {
|
||||
private async syncUpstream(task: Task) {
|
||||
const registry = this.npmRegistry.registry;
|
||||
const fullname = task.targetName;
|
||||
const { remoteAuthToken } = task.data as SyncPackageTaskOptions;
|
||||
let logs: string[] = [];
|
||||
let logId = '';
|
||||
logs.push(`[${isoNow()}][UP] 🚧🚧🚧🚧🚧 Waiting sync "${fullname}" task on ${registry} 🚧🚧🚧🚧🚧`);
|
||||
const failEnd = `❌❌❌❌❌ Sync ${registry}/${fullname} 🚮 give up 🚮 ❌❌❌❌❌`;
|
||||
try {
|
||||
const { data, status, res } = await this.npmRegistry.createSyncTask(fullname);
|
||||
const { data, status, res } = await this.npmRegistry.createSyncTask(fullname, { remoteAuthToken });
|
||||
logs.push(`[${isoNow()}][UP] 🚧 HTTP [${status}] timing: ${JSON.stringify(res.timing)}, data: ${JSON.stringify(data)}`);
|
||||
logId = data.logId;
|
||||
} catch (err: any) {
|
||||
@@ -160,9 +191,10 @@ export class PackageSyncerService extends AbstractService {
|
||||
let useTime = Date.now() - startTime;
|
||||
while (useTime < maxTimeout) {
|
||||
// sleep 1s ~ 6s in random
|
||||
await setTimeout(1000 + Math.random() * 5000);
|
||||
const delay = process.env.NODE_ENV === 'test' ? 100 : 1000 + Math.random() * 5000;
|
||||
await setTimeout(delay);
|
||||
try {
|
||||
const { data, status, url } = await this.npmRegistry.getSyncTask(fullname, logId, offset);
|
||||
const { data, status, url } = await this.npmRegistry.getSyncTask(fullname, logId, offset, { remoteAuthToken });
|
||||
useTime = Date.now() - startTime;
|
||||
if (!logUrl) {
|
||||
logUrl = url;
|
||||
@@ -190,10 +222,137 @@ export class PackageSyncerService extends AbstractService {
|
||||
await this.taskService.appendTaskLog(task, logs.join('\n'));
|
||||
}
|
||||
|
||||
private isRemovedInRemote(remoteFetchResult: RegistryResponse) {
|
||||
const { status, data } = remoteFetchResult;
|
||||
|
||||
// deleted or blocked
|
||||
if (status === 404 || status === 451) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const hasMaintainers = data?.maintainers && data?.maintainers.length !== 0;
|
||||
if (hasMaintainers) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// unpublished
|
||||
const timeMap = data.time || {};
|
||||
if (timeMap.unpublished) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// security holder
|
||||
// test/fixtures/registry.npmjs.org/security-holding-package.json
|
||||
let isSecurityHolder = true;
|
||||
for (const versionInfo of Object.entries<{ _npmUser?: { name: string } }>(data.versions || {})) {
|
||||
const [ v, info ] = versionInfo;
|
||||
// >=0.0.1-security <0.0.2-0
|
||||
const isSecurityVersion = semver.satisfies(v, '^0.0.1-security');
|
||||
const isNpmUser = info?._npmUser?.name === 'npm';
|
||||
if (!isSecurityVersion || !isNpmUser) {
|
||||
isSecurityHolder = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return isSecurityHolder;
|
||||
}
|
||||
|
||||
// sync deleted package, deps on the syncDeleteMode
|
||||
// - ignore: do nothing, just finish the task
|
||||
// - delete: remove the package from local registry
|
||||
// - block: block the package, update the manifest.block, instead of delete versions
|
||||
// 根据 syncDeleteMode 配置,处理删包场景
|
||||
// - ignore: 不做任何处理,直接结束任务
|
||||
// - delete: 删除包数据,包括 manifest 存储
|
||||
// - block: 软删除 将包标记为 block,用户无法直接使用
|
||||
private async syncDeletePkg({ task, pkg, logUrl, url, logs, data }: syncDeletePkgOptions) {
|
||||
const fullname = task.targetName;
|
||||
const failEnd = `❌❌❌❌❌ ${url || fullname} ❌❌❌❌❌`;
|
||||
const syncDeleteMode: SyncDeleteMode = this.config.cnpmcore.syncDeleteMode;
|
||||
logs.push(`[${isoNow()}] 🟢 Package "${fullname}" was removed in remote registry, response data: ${JSON.stringify(data)}, config.syncDeleteMode = ${syncDeleteMode}`);
|
||||
|
||||
// pkg not exists in local registry
|
||||
if (!pkg) {
|
||||
task.error = `Package not exists, response data: ${JSON.stringify(data)}`;
|
||||
logs.push(`[${isoNow()}] ❌ ${task.error}, log: ${logUrl}`);
|
||||
logs.push(`[${isoNow()}] ${failEnd}`);
|
||||
await this.taskService.finishTask(task, TaskState.Fail, logs.join('\n'));
|
||||
this.logger.info('[PackageSyncerService.executeTask:fail-404] taskId: %s, targetName: %s, %s',
|
||||
task.taskId, task.targetName, task.error);
|
||||
return;
|
||||
}
|
||||
|
||||
if (syncDeleteMode === SyncDeleteMode.ignore) {
|
||||
// ignore deleted package
|
||||
logs.push(`[${isoNow()}] 🟢 Skip remove since config.syncDeleteMode = ignore`);
|
||||
} else if (syncDeleteMode === SyncDeleteMode.block) {
|
||||
// block deleted package
|
||||
await this.packageManagerService.blockPackage(pkg, 'Removed in remote registry');
|
||||
logs.push(`[${isoNow()}] 🟢 Block the package since config.syncDeleteMode = block`);
|
||||
} else if (syncDeleteMode === SyncDeleteMode.delete) {
|
||||
// delete package
|
||||
await this.packageManagerService.unpublishPackage(pkg);
|
||||
logs.push(`[${isoNow()}] 🟢 Delete the package since config.syncDeleteMode = delete`);
|
||||
}
|
||||
|
||||
// update log
|
||||
logs.push(`[${isoNow()}] 🟢 log: ${logUrl}`);
|
||||
logs.push(`[${isoNow()}] 🟢🟢🟢🟢🟢 ${url} 🟢🟢🟢🟢🟢`);
|
||||
await this.taskService.finishTask(task, TaskState.Success, logs.join('\n'));
|
||||
this.logger.info('[PackageSyncerService.executeTask:remove-package] taskId: %s, targetName: %s',
|
||||
task.taskId, task.targetName);
|
||||
|
||||
}
|
||||
|
||||
// 初始化对应的 Registry
|
||||
// 1. 优先从 pkg.registryId 获取 (registryId 一经设置 不应改变)
|
||||
// 1. 其次从 task.data.registryId (创建单包同步任务时传入)
|
||||
// 2. 接着根据 scope 进行计算 (作为子包依赖同步时候,无 registryId)
|
||||
// 3. 最后返回 default registryId (可能 default registry 也不存在)
|
||||
public async initSpecRegistry(task: Task, pkg: Package | null = null, scope?: string): Promise<Registry | null> {
|
||||
const registryId = pkg?.registryId || (task.data as SyncPackageTaskOptions).registryId;
|
||||
let targetHost: string = this.config.cnpmcore.sourceRegistry;
|
||||
let registry: Registry | null = null;
|
||||
|
||||
// 当前任务作为 deps 引入时,不会配置 registryId
|
||||
// 历史 Task 可能没有配置 registryId
|
||||
if (registryId) {
|
||||
registry = await this.registryManagerService.findByRegistryId(registryId);
|
||||
} else if (scope) {
|
||||
const scopeModel = await this.scopeManagerService.findByName(scope);
|
||||
if (scopeModel?.registryId) {
|
||||
registry = await this.registryManagerService.findByRegistryId(scopeModel?.registryId);
|
||||
}
|
||||
}
|
||||
|
||||
// 采用默认的 registry
|
||||
if (!registry) {
|
||||
registry = await this.registryManagerService.ensureDefaultRegistry();
|
||||
}
|
||||
|
||||
// 更新 targetHost 地址
|
||||
// defaultRegistry 可能还未创建
|
||||
if (registry?.host) {
|
||||
targetHost = registry.host;
|
||||
}
|
||||
this.npmRegistry.setRegistryHost(targetHost);
|
||||
return registry;
|
||||
}
|
||||
|
||||
// 由于 cnpmcore 将 version 和 tag 作为两个独立的 changes 事件分发
|
||||
// 普通版本发布时,短时间内会有两条相同 task 进行同步
|
||||
// 尽量保证读取和写入都需保证任务幂等,需要确保 changes 在同步任务完成后再触发
|
||||
// 通过 DB 唯一索引来保证任务幂等,插入失败不影响 pkg.manifests 更新
|
||||
// 通过 eventBus.cork/uncork 来暂缓事件触发
|
||||
@Pointcut(EventCorkAdvice)
|
||||
public async executeTask(task: Task) {
|
||||
const fullname = task.targetName;
|
||||
const { tips, skipDependencies: originSkipDependencies, syncDownloadData } = task.data as SyncPackageTaskOptions;
|
||||
const registry = this.npmRegistry.registry;
|
||||
const [ scope, name ] = getScopeAndName(fullname);
|
||||
const { tips, skipDependencies: originSkipDependencies, syncDownloadData, forceSyncHistory, remoteAuthToken } = task.data as SyncPackageTaskOptions;
|
||||
let pkg = await this.packageRepository.findPackage(scope, name);
|
||||
const registry = await this.initSpecRegistry(task, pkg, scope);
|
||||
const registryHost = this.npmRegistry.registry;
|
||||
let logs: string[] = [];
|
||||
if (tips) {
|
||||
logs.push(`[${isoNow()}] 👉👉👉👉👉 Tips: ${tips} 👈👈👈👈👈`);
|
||||
@@ -206,11 +365,23 @@ export class PackageSyncerService extends AbstractService {
|
||||
const logUrl = `${this.config.cnpmcore.registry}/-/package/${fullname}/syncs/${task.taskId}/log`;
|
||||
this.logger.info('[PackageSyncerService.executeTask:start] taskId: %s, targetName: %s, attempts: %s, taskQueue: %s/%s, syncUpstream: %s, log: %s',
|
||||
task.taskId, task.targetName, task.attempts, taskQueueLength, taskQueueHighWaterSize, syncUpstream, logUrl);
|
||||
logs.push(`[${isoNow()}] 🚧🚧🚧🚧🚧 Syncing from ${registry}/${fullname}, skipDependencies: ${skipDependencies}, syncUpstream: ${syncUpstream}, syncDownloadData: ${!!syncDownloadData}, attempts: ${task.attempts}, worker: "${os.hostname()}/${process.pid}", taskQueue: ${taskQueueLength}/${taskQueueHighWaterSize} 🚧🚧🚧🚧🚧`);
|
||||
logs.push(`[${isoNow()}] 🚧🚧🚧🚧🚧 Syncing from ${registryHost}/${fullname}, skipDependencies: ${skipDependencies}, syncUpstream: ${syncUpstream}, syncDownloadData: ${!!syncDownloadData}, forceSyncHistory: ${!!forceSyncHistory} attempts: ${task.attempts}, worker: "${os.hostname()}/${process.pid}", taskQueue: ${taskQueueLength}/${taskQueueHighWaterSize} 🚧🚧🚧🚧🚧`);
|
||||
logs.push(`[${isoNow()}] 🚧 log: ${logUrl}`);
|
||||
|
||||
const [ scope, name ] = getScopeAndName(fullname);
|
||||
let pkg = await this.packageRepository.findPackage(scope, name);
|
||||
if (pkg && pkg?.registryId !== registry?.registryId) {
|
||||
if (pkg.registryId) {
|
||||
logs.push(`[${isoNow()}] ❌❌❌❌❌ ${fullname} registry is ${pkg.registryId} not belong to ${registry?.registryId}, skip sync ❌❌❌❌❌`);
|
||||
await this.taskService.finishTask(task, TaskState.Fail, logs.join('\n'));
|
||||
this.logger.info('[PackageSyncerService.executeTask:fail] taskId: %s, targetName: %s, invalid registryId',
|
||||
task.taskId, task.targetName);
|
||||
return;
|
||||
}
|
||||
// 多同步源之前没有 registryId
|
||||
// publish() 版本不变时,不会更新 registryId
|
||||
// 在同步前,进行更新操作
|
||||
pkg.registryId = registry?.registryId;
|
||||
await this.packageRepository.savePackage(pkg);
|
||||
}
|
||||
|
||||
if (syncDownloadData && pkg) {
|
||||
await this.syncDownloadData(task, pkg);
|
||||
@@ -239,11 +410,11 @@ export class PackageSyncerService extends AbstractService {
|
||||
return;
|
||||
}
|
||||
|
||||
let result: any;
|
||||
let registryFetchResult: RegistryResponse;
|
||||
try {
|
||||
result = await this.npmRegistry.getFullManifests(fullname);
|
||||
registryFetchResult = await this.npmRegistry.getFullManifests(fullname, { remoteAuthToken });
|
||||
} catch (err: any) {
|
||||
const status = err.status || 'unknow';
|
||||
const status = err.status || 'unknown';
|
||||
task.error = `request manifests error: ${err}, status: ${status}`;
|
||||
logs.push(`[${isoNow()}] ❌ Synced ${fullname} fail, ${task.error}, log: ${logUrl}`);
|
||||
logs.push(`[${isoNow()}] ❌❌❌❌❌ ${fullname} ❌❌❌❌❌`);
|
||||
@@ -253,7 +424,22 @@ export class PackageSyncerService extends AbstractService {
|
||||
return;
|
||||
}
|
||||
|
||||
const { url, data, headers, res, status } = result;
|
||||
const { url, data, headers, res, status } = registryFetchResult;
|
||||
/* c8 ignore next 13 */
|
||||
if (status >= 500 || !data) {
|
||||
// GET https://registry.npmjs.org/%40modern-js%2Fstyle-compiler?t=1683348626499&cache=0, status: 522
|
||||
// registry will response status 522 and data will be null
|
||||
// > TypeError: Cannot read properties of null (reading 'readme')
|
||||
task.error = `request manifests response error, status: ${status}, data: ${JSON.stringify(data)}`;
|
||||
logs.push(`[${isoNow()}] ❌ response headers: ${JSON.stringify(headers)}`);
|
||||
logs.push(`[${isoNow()}] ❌ Synced ${fullname} fail, ${task.error}, log: ${logUrl}`);
|
||||
logs.push(`[${isoNow()}] ❌❌❌❌❌ ${fullname} ❌❌❌❌❌`);
|
||||
this.logger.info('[PackageSyncerService.executeTask:fail-request-error] taskId: %s, targetName: %s, %s',
|
||||
task.taskId, task.targetName, task.error);
|
||||
await this.taskService.retryTask(task, logs.join('\n'));
|
||||
return;
|
||||
}
|
||||
|
||||
let readme = data.readme || '';
|
||||
if (typeof readme !== 'string') {
|
||||
readme = JSON.stringify(readme);
|
||||
@@ -267,29 +453,19 @@ export class PackageSyncerService extends AbstractService {
|
||||
const contentLength = headers['content-length'] || '-';
|
||||
logs.push(`[${isoNow()}] HTTP [${status}] content-length: ${contentLength}, timing: ${JSON.stringify(res.timing)}`);
|
||||
|
||||
if (status === 404) {
|
||||
if (pkg) {
|
||||
await this.packageManagerService.unpublishPackage(pkg);
|
||||
logs.push(`[${isoNow()}] 🟢 Package "${fullname}" was unpublished caused by 404 response: ${JSON.stringify(data)}`);
|
||||
logs.push(`[${isoNow()}] 🟢 log: ${logUrl}`);
|
||||
logs.push(`[${isoNow()}] 🟢🟢🟢🟢🟢 ${url} 🟢🟢🟢🟢🟢`);
|
||||
await this.taskService.finishTask(task, TaskState.Success, logs.join('\n'));
|
||||
this.logger.info('[PackageSyncerService.executeTask:remove-package] taskId: %s, targetName: %s',
|
||||
task.taskId, task.targetName);
|
||||
} else {
|
||||
task.error = `Package not exists, response data: ${JSON.stringify(data)}`;
|
||||
logs.push(`[${isoNow()}] ❌ ${task.error}, log: ${logUrl}`);
|
||||
logs.push(`[${isoNow()}] ${failEnd}`);
|
||||
await this.taskService.finishTask(task, TaskState.Fail, logs.join('\n'));
|
||||
this.logger.info('[PackageSyncerService.executeTask:fail-404] taskId: %s, targetName: %s, %s',
|
||||
task.taskId, task.targetName, task.error);
|
||||
}
|
||||
if (this.isRemovedInRemote(registryFetchResult)) {
|
||||
await this.syncDeletePkg({ task, pkg, logs, logUrl, url, data });
|
||||
return;
|
||||
}
|
||||
|
||||
const versionMap = data.versions || {};
|
||||
const distTags = data['dist-tags'] || {};
|
||||
|
||||
// show latest information
|
||||
if (distTags.latest) {
|
||||
logs.push(`[${isoNow()}] 📖 ${fullname} latest version: ${distTags.latest ?? '-'}, published time: ${JSON.stringify(timeMap[distTags.latest])}`);
|
||||
}
|
||||
|
||||
// 1. save maintainers
|
||||
// maintainers: [
|
||||
// { name: 'bomsy', email: 'b4bomsy@gmail.com' },
|
||||
@@ -322,7 +498,7 @@ export class PackageSyncerService extends AbstractService {
|
||||
for (const maintainer of maintainers) {
|
||||
if (maintainer.name && maintainer.email) {
|
||||
maintainersMap[maintainer.name] = maintainer;
|
||||
const { changed, user } = await this.userService.savePublicUser(maintainer.name, maintainer.email);
|
||||
const { changed, user } = await this.userService.saveUser(registry?.userPrefix, maintainer.name, maintainer.email);
|
||||
users.push(user);
|
||||
if (changed) {
|
||||
changedUserCount++;
|
||||
@@ -349,20 +525,6 @@ export class PackageSyncerService extends AbstractService {
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
if (timeMap.unpublished) {
|
||||
if (pkg) {
|
||||
await this.packageManagerService.unpublishPackage(pkg);
|
||||
logs.push(`[${isoNow()}] 🟢 Sync unpublished package: ${JSON.stringify(timeMap.unpublished)} success`);
|
||||
} else {
|
||||
logs.push(`[${isoNow()}] 📖 Ignore unpublished package: ${JSON.stringify(timeMap.unpublished)}`);
|
||||
}
|
||||
logs.push(`[${isoNow()}] 🟢 log: ${logUrl}`);
|
||||
logs.push(`[${isoNow()}] 🟢🟢🟢🟢🟢 ${url} 🟢🟢🟢🟢🟢`);
|
||||
await this.taskService.finishTask(task, TaskState.Success, logs.join('\n'));
|
||||
this.logger.info('[PackageSyncerService.executeTask:success] taskId: %s, targetName: %s',
|
||||
task.taskId, task.targetName);
|
||||
return;
|
||||
}
|
||||
|
||||
// invalid maintainers, sync fail
|
||||
task.error = `invalid maintainers: ${JSON.stringify(maintainers)}`;
|
||||
@@ -390,8 +552,8 @@ export class PackageSyncerService extends AbstractService {
|
||||
for (const item of versions) {
|
||||
const version: string = item.version;
|
||||
if (!version) continue;
|
||||
let existsItem = existsVersionMap[version];
|
||||
const existsAbbreviatedItem = abbreviatedVersionMap[version];
|
||||
let existsItem: typeof existsVersionMap[string] | undefined = existsVersionMap[version];
|
||||
let existsAbbreviatedItem: typeof abbreviatedVersionMap[string] | undefined = abbreviatedVersionMap[version];
|
||||
const shouldDeleteReadme = !!(existsItem && 'readme' in existsItem);
|
||||
if (pkg) {
|
||||
if (existsItem) {
|
||||
@@ -400,16 +562,17 @@ export class PackageSyncerService extends AbstractService {
|
||||
updateVersions.push(version);
|
||||
logs.push(`[${isoNow()}] 🐛 Remote version ${version} not exists on local abbreviated manifests, need to refresh`);
|
||||
}
|
||||
} else {
|
||||
// try to read from db detect if last sync interrupt before refreshPackageManifestsToDists() be called
|
||||
existsItem = await this.distRepository.findPackageVersionManifest(pkg.packageId, version);
|
||||
// only allow existsItem on db to force refresh, to avoid big versions fresh
|
||||
// see https://r.cnpmjs.org/-/package/@npm-torg/public-scoped-free-org-test-package-2/syncs/61fcc7e8c1646e26a845b674/log
|
||||
if (existsItem) {
|
||||
// version not exists on manifests, need to refresh
|
||||
// bugfix: https://github.com/cnpm/cnpmcore/issues/115
|
||||
updateVersions.push(version);
|
||||
logs.push(`[${isoNow()}] 🐛 Remote version ${version} not exists on local manifests, need to refresh`);
|
||||
}
|
||||
|
||||
if (existsItem && forceSyncHistory === true) {
|
||||
const pkgVer = await this.packageRepository.findPackageVersion(pkg.packageId, version);
|
||||
if (pkgVer) {
|
||||
logs.push(`[${isoNow()}] 🚧 [${syncIndex}] Remove version ${version} for force sync history`);
|
||||
await this.packageManagerService.removePackageVersion(pkg, pkgVer, true);
|
||||
existsItem = undefined;
|
||||
existsAbbreviatedItem = undefined;
|
||||
existsVersionMap[version] = undefined;
|
||||
abbreviatedVersionMap[version] = undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -472,7 +635,7 @@ export class PackageSyncerService extends AbstractService {
|
||||
let localFile: string;
|
||||
try {
|
||||
const { tmpfile, headers, timing } =
|
||||
await downloadToTempfile(this.httpclient, this.config.dataDir, tarball);
|
||||
await downloadToTempfile(this.httpclient, this.config.dataDir, tarball, { remoteAuthToken });
|
||||
localFile = tmpfile;
|
||||
logs.push(`[${isoNow()}] 🚧 [${syncIndex}] HTTP content-length: ${headers['content-length']}, timing: ${JSON.stringify(timing)} => ${localFile}`);
|
||||
} catch (err: any) {
|
||||
@@ -486,17 +649,6 @@ export class PackageSyncerService extends AbstractService {
|
||||
if (!pkg) {
|
||||
pkg = await this.packageRepository.findPackage(scope, name);
|
||||
}
|
||||
if (pkg) {
|
||||
// check again, make sure prefix version not exists
|
||||
const existsPkgVersion = await this.packageRepository.findPackageVersion(pkg.packageId, version);
|
||||
if (existsPkgVersion) {
|
||||
await rm(localFile, { force: true });
|
||||
logs.push(`[${isoNow()}] 🐛 [${syncIndex}] Synced version ${version} already exists, skip publish it`);
|
||||
await this.taskService.appendTaskLog(task, logs.join('\n'));
|
||||
logs = [];
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
const publishCmd = {
|
||||
scope,
|
||||
@@ -505,6 +657,7 @@ export class PackageSyncerService extends AbstractService {
|
||||
description,
|
||||
packageJson: item,
|
||||
readme,
|
||||
registryId: registry?.registryId,
|
||||
dist: {
|
||||
localFile,
|
||||
},
|
||||
@@ -513,12 +666,15 @@ export class PackageSyncerService extends AbstractService {
|
||||
skipRefreshPackageManifests: true,
|
||||
};
|
||||
try {
|
||||
// 当 version 记录已经存在时,还需要校验一下 pkg.manifests 是否存在
|
||||
const pkgVersion = await this.packageManagerService.publish(publishCmd, users[0]);
|
||||
updateVersions.push(pkgVersion.version);
|
||||
logs.push(`[${isoNow()}] 🟢 [${syncIndex}] Synced version ${version} success, packageVersionId: ${pkgVersion.packageVersionId}, db id: ${pkgVersion.id}`);
|
||||
} catch (err: any) {
|
||||
if (err.name === 'ForbiddenError') {
|
||||
logs.push(`[${isoNow()}] 🐛 [${syncIndex}] Synced version ${version} already exists, skip publish error`);
|
||||
logs.push(`[${isoNow()}] 🐛 [${syncIndex}] Synced version ${version} already exists, skip publish, try to set in local manifest`);
|
||||
// 如果 pkg.manifests 不存在,需要补充一下
|
||||
updateVersions.push(version);
|
||||
} else {
|
||||
err.taskId = task.taskId;
|
||||
this.logger.error(err);
|
||||
@@ -530,10 +686,14 @@ export class PackageSyncerService extends AbstractService {
|
||||
logs = [];
|
||||
await rm(localFile, { force: true });
|
||||
if (!skipDependencies) {
|
||||
const dependencies = item.dependencies || {};
|
||||
const dependencies: Record<string, string> = item.dependencies || {};
|
||||
for (const dependencyName in dependencies) {
|
||||
dependenciesSet.add(dependencyName);
|
||||
}
|
||||
const optionalDependencies: Record<string, string> = item.optionalDependencies || {};
|
||||
for (const dependencyName in optionalDependencies) {
|
||||
dependenciesSet.add(dependencyName);
|
||||
}
|
||||
}
|
||||
}
|
||||
// try to read package entity again after first sync
|
||||
@@ -597,6 +757,17 @@ export class PackageSyncerService extends AbstractService {
|
||||
let shouldRefreshDistTags = false;
|
||||
for (const tag in distTags) {
|
||||
const version = distTags[tag];
|
||||
const utf8mb3Regex = /[\u0020-\uD7FF\uE000-\uFFFD]/;
|
||||
if (!utf8mb3Regex.test(tag)) {
|
||||
logs.push(`[${isoNow()}] 🚧 invalid tag(${tag}: ${version}), tag name is out of utf8mb3, skip`);
|
||||
continue;
|
||||
}
|
||||
// 新 tag 指向的版本既不在存量数据里,也不在本次同步版本列表里
|
||||
// 例如 latest 对应的 version 写入失败跳过
|
||||
if (!existsVersionMap[version] && !updateVersions.includes(version)) {
|
||||
logs.push(`[${isoNow()}] 🚧 invalid tag(${tag}: ${version}), version is not exists, skip`);
|
||||
continue;
|
||||
}
|
||||
const changed = await this.packageManagerService.savePackageTag(pkg, tag, version);
|
||||
if (changed) {
|
||||
changedTags.push({ action: 'change', tag, version });
|
||||
@@ -629,16 +800,10 @@ export class PackageSyncerService extends AbstractService {
|
||||
// 4.1 find out remove maintainers
|
||||
const removedMaintainers: unknown[] = [];
|
||||
const existsMaintainers = existsData && existsData.maintainers || [];
|
||||
let shouldRefreshMaintainers = false;
|
||||
for (const maintainer of existsMaintainers) {
|
||||
let npmUserName = maintainer.name;
|
||||
if (npmUserName.startsWith('npm:')) {
|
||||
// fix cache npm user name
|
||||
npmUserName = npmUserName.replace('npm:', '');
|
||||
shouldRefreshMaintainers = true;
|
||||
}
|
||||
if (!(npmUserName in maintainersMap)) {
|
||||
const user = await this.userRepository.findUserByName(`npm:${npmUserName}`);
|
||||
const { name } = maintainer;
|
||||
if (!(name in maintainersMap)) {
|
||||
const user = await this.userRepository.findUserByName(`${registry?.userPrefix || 'npm:'}${name}`);
|
||||
if (user) {
|
||||
await this.packageManagerService.removePackageMaintainer(pkg, user);
|
||||
removedMaintainers.push(maintainer);
|
||||
@@ -647,14 +812,11 @@ export class PackageSyncerService extends AbstractService {
|
||||
}
|
||||
if (removedMaintainers.length > 0) {
|
||||
logs.push(`[${isoNow()}] 🟢 Removed ${removedMaintainers.length} maintainers: ${JSON.stringify(removedMaintainers)}`);
|
||||
} else if (shouldRefreshMaintainers) {
|
||||
await this.packageManagerService.refreshPackageMaintainersToDists(pkg);
|
||||
logs.push(`[${isoNow()}] 🟢 Refresh maintainers`);
|
||||
}
|
||||
|
||||
// 5. add deps sync task
|
||||
for (const dependencyName of dependenciesSet) {
|
||||
const existsTask = await this.taskRepository.findTaskByTargetName(fullname, TaskType.SyncPackage, TaskState.Waiting);
|
||||
const existsTask = await this.taskRepository.findTaskByTargetName(dependencyName, TaskType.SyncPackage, TaskState.Waiting);
|
||||
if (existsTask) {
|
||||
logs.push(`[${isoNow()}] 📖 Has dependency "${dependencyName}" sync task: ${existsTask.taskId}, db id: ${existsTask.id}`);
|
||||
continue;
|
||||
|
||||
147
app/core/service/PackageVersionFileService.ts
Normal file
147
app/core/service/PackageVersionFileService.ts
Normal file
@@ -0,0 +1,147 @@
|
||||
import fs from 'node:fs/promises';
|
||||
import { join, dirname, basename } from 'node:path';
|
||||
import { randomUUID } from 'node:crypto';
|
||||
import tar from 'tar';
|
||||
import {
|
||||
AccessLevel,
|
||||
SingletonProto,
|
||||
Inject,
|
||||
} from '@eggjs/tegg';
|
||||
import { AbstractService } from '../../common/AbstractService';
|
||||
import {
|
||||
calculateIntegrity,
|
||||
} from '../../common/PackageUtil';
|
||||
import { createTempDir, mimeLookup } from '../../common/FileUtil';
|
||||
import {
|
||||
PackageRepository,
|
||||
} from '../../repository/PackageRepository';
|
||||
import { PackageVersionFileRepository } from '../../repository/PackageVersionFileRepository';
|
||||
import { DistRepository } from '../../repository/DistRepository';
|
||||
import { PackageVersionFile } from '../entity/PackageVersionFile';
|
||||
import { PackageVersion } from '../entity/PackageVersion';
|
||||
import { Package } from '../entity/Package';
|
||||
|
||||
@SingletonProto({
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
})
|
||||
export class PackageVersionFileService extends AbstractService {
|
||||
@Inject()
|
||||
private readonly packageRepository: PackageRepository;
|
||||
@Inject()
|
||||
private readonly packageVersionFileRepository: PackageVersionFileRepository;
|
||||
@Inject()
|
||||
private readonly distRepository: DistRepository;
|
||||
|
||||
async listPackageVersionFiles(pkgVersion: PackageVersion, directory: string) {
|
||||
await this.#ensurePackageVersionFilesSync(pkgVersion);
|
||||
return await this.packageVersionFileRepository.listPackageVersionFiles(pkgVersion.packageVersionId, directory);
|
||||
}
|
||||
|
||||
async showPackageVersionFile(pkgVersion: PackageVersion, path: string) {
|
||||
await this.#ensurePackageVersionFilesSync(pkgVersion);
|
||||
const { directory, name } = this.#getDirectoryAndName(path);
|
||||
return await this.packageVersionFileRepository.findPackageVersionFile(
|
||||
pkgVersion.packageVersionId, directory, name);
|
||||
}
|
||||
|
||||
async #ensurePackageVersionFilesSync(pkgVersion: PackageVersion) {
|
||||
const hasFiles = await this.packageVersionFileRepository.hasPackageVersionFiles(pkgVersion.packageVersionId);
|
||||
if (!hasFiles) {
|
||||
await this.syncPackageVersionFiles(pkgVersion);
|
||||
}
|
||||
}
|
||||
|
||||
async syncPackageVersionFiles(pkgVersion: PackageVersion) {
|
||||
const files: PackageVersionFile[] = [];
|
||||
const pkg = await this.packageRepository.findPackageByPackageId(pkgVersion.packageId);
|
||||
if (!pkg) return files;
|
||||
const dirname = `unpkg_${pkg.fullname.replace('/', '_')}@${pkgVersion.version}_${randomUUID()}`;
|
||||
const tmpdir = await createTempDir(this.config.dataDir, dirname);
|
||||
const tarFile = `${tmpdir}.tgz`;
|
||||
const paths: string[] = [];
|
||||
try {
|
||||
this.logger.info('[PackageVersionFileService.syncPackageVersionFiles:download-start] dist:%s(path:%s, size:%s) => tarFile:%s',
|
||||
pkgVersion.tarDist.distId, pkgVersion.tarDist.path, pkgVersion.tarDist.size, tarFile);
|
||||
await this.distRepository.downloadDistToFile(pkgVersion.tarDist, tarFile);
|
||||
this.logger.info('[PackageVersionFileService.syncPackageVersionFiles:extract-start] tmpdir:%s', tmpdir);
|
||||
await tar.extract({
|
||||
file: tarFile,
|
||||
cwd: tmpdir,
|
||||
strip: 1,
|
||||
onentry: entry => {
|
||||
if (entry.type !== 'File') return;
|
||||
// ignore hidden dir
|
||||
if (entry.path.includes('/./')) return;
|
||||
// https://github.com/cnpm/cnpmcore/issues/452#issuecomment-1570077310
|
||||
// strip first dir, e.g.: 'package/', 'lodash-es/'
|
||||
paths.push('/' + entry.path.split('/').slice(1).join('/'));
|
||||
},
|
||||
});
|
||||
for (const path of paths) {
|
||||
const localFile = join(tmpdir, path);
|
||||
const file = await this.#savePackageVersionFile(pkg, pkgVersion, path, localFile);
|
||||
files.push(file);
|
||||
}
|
||||
this.logger.info('[PackageVersionFileService.syncPackageVersionFiles:success] packageVersionId: %s, %d paths, %d files, tmpdir: %s',
|
||||
pkgVersion.packageVersionId, paths.length, files.length, tmpdir);
|
||||
return files;
|
||||
} catch (err) {
|
||||
this.logger.warn('[PackageVersionFileService.syncPackageVersionFiles:error] packageVersionId: %s, %d paths, tmpdir: %s, error: %s',
|
||||
pkgVersion.packageVersionId, paths.length, tmpdir, err);
|
||||
// ignore TAR_BAD_ARCHIVE error
|
||||
if (err.code === 'TAR_BAD_ARCHIVE') return files;
|
||||
throw err;
|
||||
} finally {
|
||||
try {
|
||||
await fs.rm(tarFile, { force: true });
|
||||
await fs.rm(tmpdir, { recursive: true, force: true });
|
||||
} catch (err) {
|
||||
this.logger.warn('[PackageVersionFileService.syncPackageVersionFiles:warn] remove tmpdir: %s, error: %s',
|
||||
tmpdir, err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async #savePackageVersionFile(pkg: Package, pkgVersion: PackageVersion, path: string, localFile: string) {
|
||||
const { directory, name } = this.#getDirectoryAndName(path);
|
||||
let file = await this.packageVersionFileRepository.findPackageVersionFile(
|
||||
pkgVersion.packageVersionId, directory, name);
|
||||
if (file) return file;
|
||||
const stat = await fs.stat(localFile);
|
||||
const distIntegrity = await calculateIntegrity(localFile);
|
||||
// make sure dist.path store to ascii, e.g. '/resource/ToOneFromχ.js' => '/resource/ToOneFrom%CF%87.js'
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/encodeURI
|
||||
const distPath = encodeURI(path);
|
||||
const dist = pkg.createPackageVersionFile(distPath, pkgVersion.version, {
|
||||
size: stat.size,
|
||||
shasum: distIntegrity.shasum,
|
||||
integrity: distIntegrity.integrity,
|
||||
});
|
||||
await this.distRepository.saveDist(dist, localFile);
|
||||
file = PackageVersionFile.create({
|
||||
packageVersionId: pkgVersion.packageVersionId,
|
||||
directory,
|
||||
name,
|
||||
dist,
|
||||
contentType: mimeLookup(path),
|
||||
mtime: pkgVersion.publishTime,
|
||||
});
|
||||
try {
|
||||
await this.packageVersionFileRepository.createPackageVersionFile(file);
|
||||
this.logger.info('[PackageVersionFileService.#savePackageVersionFile:success] fileId: %s, size: %s, path: %s',
|
||||
file.packageVersionFileId, dist.size, file.path);
|
||||
} catch (err) {
|
||||
// ignore Duplicate entry
|
||||
if (err.code === 'ER_DUP_ENTRY') return file;
|
||||
throw err;
|
||||
}
|
||||
return file;
|
||||
}
|
||||
|
||||
#getDirectoryAndName(path: string) {
|
||||
return {
|
||||
directory: dirname(path),
|
||||
name: basename(path),
|
||||
};
|
||||
}
|
||||
}
|
||||
159
app/core/service/RegistryManagerService.ts
Normal file
159
app/core/service/RegistryManagerService.ts
Normal file
@@ -0,0 +1,159 @@
|
||||
import {
|
||||
AccessLevel,
|
||||
SingletonProto,
|
||||
Inject,
|
||||
} from '@eggjs/tegg';
|
||||
import { E400, NotFoundError } from 'egg-errors';
|
||||
import { RegistryRepository } from '../../repository/RegistryRepository';
|
||||
import { AbstractService } from '../../common/AbstractService';
|
||||
import { Registry } from '../entity/Registry';
|
||||
import { PageOptions, PageResult } from '../util/EntityUtil';
|
||||
import { ScopeManagerService } from './ScopeManagerService';
|
||||
import { TaskService } from './TaskService';
|
||||
import { Task } from '../entity/Task';
|
||||
import { ChangesStreamMode, PresetRegistryName } from '../../common/constants';
|
||||
import { RegistryType } from '../../common/enum/Registry';
|
||||
|
||||
export interface CreateRegistryCmd extends Pick<Registry, 'changeStream' | 'host' | 'userPrefix' | 'type' | 'name'> {
|
||||
operatorId?: string;
|
||||
}
|
||||
export interface UpdateRegistryCmd extends Pick<Registry, 'changeStream' | 'host' | 'userPrefix' | 'type' | 'name' | 'registryId'> {
|
||||
operatorId?: string;
|
||||
}
|
||||
export interface RemoveRegistryCmd extends Pick<Registry, 'registryId'> {
|
||||
operatorId?: string;
|
||||
}
|
||||
|
||||
export interface StartSyncCmd {
|
||||
registryId: string;
|
||||
since?: string;
|
||||
operatorId?: string;
|
||||
}
|
||||
|
||||
@SingletonProto({
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
})
|
||||
export class RegistryManagerService extends AbstractService {
|
||||
@Inject()
|
||||
private readonly registryRepository: RegistryRepository;
|
||||
@Inject()
|
||||
private readonly scopeManagerService: ScopeManagerService;
|
||||
@Inject()
|
||||
private readonly taskService: TaskService;
|
||||
|
||||
async createSyncChangesStream(startSyncCmd: StartSyncCmd): Promise<void> {
|
||||
const { registryId, operatorId = '-', since } = startSyncCmd;
|
||||
this.logger.info('[RegistryManagerService.startSyncChangesStream:prepare] operatorId: %s, registryId: %s, since: %s', operatorId, registryId, since);
|
||||
const registry = await this.registryRepository.findRegistryByRegistryId(registryId);
|
||||
if (!registry) {
|
||||
throw new NotFoundError(`registry ${registryId} not found`);
|
||||
}
|
||||
|
||||
// 防止和 GLOBAL_WORKER 冲突,只能有一个默认的全局 registry
|
||||
const scopesCount = await this.scopeManagerService.countByRegistryId(registryId);
|
||||
if (scopesCount === 0) {
|
||||
throw new E400(`registry ${registryId} has no scopes, please create scopes first`);
|
||||
}
|
||||
|
||||
// 启动 changeStream
|
||||
const targetName = `${registry.name.toUpperCase()}_WORKER`;
|
||||
await this.taskService.createTask(Task.createChangesStream(targetName, registryId, since), false);
|
||||
}
|
||||
|
||||
async createRegistry(createCmd: CreateRegistryCmd): Promise<Registry> {
|
||||
const { name, changeStream = '', host, userPrefix = '', type, operatorId = '-' } = createCmd;
|
||||
this.logger.info('[RegistryManagerService.createRegistry:prepare] operatorId: %s, createCmd: %j', operatorId, createCmd);
|
||||
const registry = Registry.create({
|
||||
name,
|
||||
changeStream,
|
||||
host,
|
||||
userPrefix,
|
||||
type,
|
||||
});
|
||||
await this.registryRepository.saveRegistry(registry);
|
||||
return registry;
|
||||
}
|
||||
|
||||
// 更新部分 registry 信息
|
||||
// 不允许 userPrefix 字段变更
|
||||
async updateRegistry(updateCmd: UpdateRegistryCmd) {
|
||||
const { name, changeStream, host, type, registryId, operatorId = '-' } = updateCmd;
|
||||
this.logger.info('[RegistryManagerService.updateRegistry:prepare] operatorId: %s, updateCmd: %j', operatorId, updateCmd);
|
||||
const registry = await this.registryRepository.findRegistryByRegistryId(registryId);
|
||||
if (!registry) {
|
||||
throw new NotFoundError(`registry ${registryId} not found`);
|
||||
}
|
||||
Object.assign(registry, {
|
||||
name,
|
||||
changeStream,
|
||||
host,
|
||||
type,
|
||||
});
|
||||
await this.registryRepository.saveRegistry(registry);
|
||||
}
|
||||
|
||||
// list all registries with scopes
|
||||
async listRegistries(page: PageOptions): Promise<PageResult<Registry>> {
|
||||
return await this.registryRepository.listRegistries(page);
|
||||
}
|
||||
|
||||
async findByRegistryId(registryId: string): Promise<Registry | null> {
|
||||
return await this.registryRepository.findRegistryByRegistryId(registryId);
|
||||
}
|
||||
|
||||
async findByRegistryName(registryName?: string): Promise<Registry | null> {
|
||||
return await this.registryRepository.findRegistry(registryName);
|
||||
}
|
||||
|
||||
// 删除 Registry 方法
|
||||
// 可选传入 operatorId 作为参数,用于记录操作人员
|
||||
// 同时删除对应的 scope 数据
|
||||
async remove(removeCmd: RemoveRegistryCmd): Promise<void> {
|
||||
const { registryId, operatorId = '-' } = removeCmd;
|
||||
this.logger.info('[RegistryManagerService.remove:prepare] operatorId: %s, registryId: %s', operatorId, registryId);
|
||||
await this.registryRepository.removeRegistry(registryId);
|
||||
await this.scopeManagerService.removeByRegistryId({ registryId, operatorId });
|
||||
}
|
||||
|
||||
async ensureSelfRegistry(): Promise<Registry> {
|
||||
const existRegistry = await this.registryRepository.findRegistry(PresetRegistryName.self);
|
||||
if (existRegistry) {
|
||||
return existRegistry;
|
||||
}
|
||||
|
||||
const { registry: registryHost } = this.config.cnpmcore;
|
||||
|
||||
const newRegistry = await this.createRegistry({
|
||||
name: PresetRegistryName.self,
|
||||
host: registryHost,
|
||||
type: RegistryType.Cnpmcore,
|
||||
changeStream: '',
|
||||
userPrefix: '',
|
||||
});
|
||||
|
||||
return newRegistry;
|
||||
|
||||
}
|
||||
|
||||
async ensureDefaultRegistry(): Promise<Registry> {
|
||||
const existRegistry = await this.registryRepository.findRegistry(PresetRegistryName.default);
|
||||
if (existRegistry) {
|
||||
return existRegistry;
|
||||
}
|
||||
|
||||
// 从配置文件默认生成
|
||||
const { changesStreamRegistryMode, changesStreamRegistry: changesStreamHost, sourceRegistry: host } = this.config.cnpmcore;
|
||||
const type = changesStreamRegistryMode === ChangesStreamMode.json ? RegistryType.Cnpmcore : RegistryType.Npm;
|
||||
const registry = await this.createRegistry({
|
||||
name: PresetRegistryName.default,
|
||||
type,
|
||||
userPrefix: 'npm:',
|
||||
host,
|
||||
changeStream: `${changesStreamHost}/_changes`,
|
||||
});
|
||||
|
||||
return registry;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
74
app/core/service/ScopeManagerService.ts
Normal file
74
app/core/service/ScopeManagerService.ts
Normal file
@@ -0,0 +1,74 @@
|
||||
import {
|
||||
AccessLevel,
|
||||
SingletonProto,
|
||||
Inject,
|
||||
} from '@eggjs/tegg';
|
||||
import { ScopeRepository } from '../../repository/ScopeRepository';
|
||||
import { AbstractService } from '../../common/AbstractService';
|
||||
import { Scope } from '../entity/Scope';
|
||||
import { PageOptions, PageResult } from '../util/EntityUtil';
|
||||
|
||||
export interface CreateScopeCmd extends Pick<Scope, 'name' | 'registryId'> {
|
||||
operatorId?: string;
|
||||
}
|
||||
export interface UpdateRegistryCmd extends Pick<Scope, 'name' | 'scopeId' | 'registryId'> {
|
||||
operatorId?: string;
|
||||
}
|
||||
|
||||
export interface RemoveScopeCmd {
|
||||
scopeId: string;
|
||||
operatorId?: string;
|
||||
}
|
||||
|
||||
export interface RemoveScopeByRegistryIdCmd {
|
||||
registryId: string;
|
||||
operatorId?: string;
|
||||
}
|
||||
@SingletonProto({
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
})
|
||||
export class ScopeManagerService extends AbstractService {
|
||||
@Inject()
|
||||
private readonly scopeRepository: ScopeRepository;
|
||||
|
||||
async findByName(name: string): Promise<Scope | null> {
|
||||
const scope = await this.scopeRepository.findByName(name);
|
||||
return scope;
|
||||
}
|
||||
|
||||
async countByRegistryId(registryId: string): Promise<number> {
|
||||
const count = await this.scopeRepository.countByRegistryId(registryId);
|
||||
return count;
|
||||
}
|
||||
|
||||
async createScope(createCmd: CreateScopeCmd): Promise<Scope> {
|
||||
const { name, registryId, operatorId } = createCmd;
|
||||
this.logger.info('[ScopeManagerService.CreateScope:prepare] operatorId: %s, createCmd: %s', operatorId, createCmd);
|
||||
const scope = Scope.create({
|
||||
name,
|
||||
registryId,
|
||||
});
|
||||
await this.scopeRepository.saveScope(scope);
|
||||
return scope;
|
||||
}
|
||||
|
||||
async listScopes(page: PageOptions): Promise<PageResult<Scope>> {
|
||||
return await this.scopeRepository.listScopes(page);
|
||||
}
|
||||
|
||||
async listScopesByRegistryId(registryId: string, page: PageOptions): Promise<PageResult<Scope>> {
|
||||
return await this.scopeRepository.listScopesByRegistryId(registryId, page);
|
||||
}
|
||||
|
||||
async removeByRegistryId(removeCmd: RemoveScopeByRegistryIdCmd): Promise<void> {
|
||||
const { registryId, operatorId } = removeCmd;
|
||||
this.logger.info('[ScopeManagerService.remove:prepare] operatorId: %s, registryId: %s', operatorId, registryId);
|
||||
return await this.scopeRepository.removeScopeByRegistryId(registryId);
|
||||
}
|
||||
|
||||
async remove(removeCmd: RemoveScopeCmd): Promise<void> {
|
||||
const { scopeId, operatorId } = removeCmd;
|
||||
this.logger.info('[ScopeManagerService.remove:prepare] operatorId: %s, scopeId: %s', operatorId, scopeId);
|
||||
return await this.scopeRepository.removeScope(scopeId);
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import {
|
||||
AccessLevel,
|
||||
ContextProto,
|
||||
SingletonProto,
|
||||
Inject,
|
||||
} from '@eggjs/tegg';
|
||||
import { NFSAdapter } from '../../common/adapter/NFSAdapter';
|
||||
@@ -8,9 +8,9 @@ import { TaskState, TaskType } from '../../common/enum/Task';
|
||||
import { AbstractService } from '../../common/AbstractService';
|
||||
import { TaskRepository } from '../../repository/TaskRepository';
|
||||
import { Task } from '../entity/Task';
|
||||
import { QueueAdapter } from '../../common/adapter/QueueAdapter';
|
||||
import { QueueAdapter } from '../../common/typing';
|
||||
|
||||
@ContextProto({
|
||||
@SingletonProto({
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
})
|
||||
export class TaskService extends AbstractService {
|
||||
@@ -28,21 +28,27 @@ export class TaskService extends AbstractService {
|
||||
public async createTask(task: Task, addTaskQueueOnExists: boolean) {
|
||||
const existsTask = await this.taskRepository.findTaskByTargetName(task.targetName, task.type);
|
||||
if (existsTask) {
|
||||
if (addTaskQueueOnExists && existsTask.state === TaskState.Waiting) {
|
||||
const queueLength = await this.getTaskQueueLength(task.type);
|
||||
if (queueLength < this.config.cnpmcore.taskQueueHighWaterSize) {
|
||||
// make sure waiting task in queue
|
||||
await this.queueAdapter.push<string>(task.type, existsTask.taskId);
|
||||
this.logger.info('[TaskService.createTask:exists-to-queue] taskType: %s, targetName: %s, taskId: %s, queue size: %s',
|
||||
task.type, task.targetName, task.taskId, queueLength);
|
||||
// 如果任务还未被触发,就不继续重复创建
|
||||
// 如果任务正在执行,可能任务状态已更新,这种情况需要继续创建
|
||||
if (existsTask.state === TaskState.Waiting) {
|
||||
// 提高任务的优先级
|
||||
if (addTaskQueueOnExists) {
|
||||
const queueLength = await this.getTaskQueueLength(task.type);
|
||||
if (queueLength < this.config.cnpmcore.taskQueueHighWaterSize) {
|
||||
// make sure waiting task in queue
|
||||
await this.queueAdapter.push<string>(task.type, existsTask.taskId);
|
||||
this.logger.info('[TaskService.createTask:exists-to-queue] taskType: %s, targetName: %s, taskId: %s, queue size: %s',
|
||||
task.type, task.targetName, task.taskId, queueLength);
|
||||
}
|
||||
}
|
||||
}
|
||||
return existsTask;
|
||||
}
|
||||
await this.taskRepository.saveTask(task);
|
||||
const queueSize = await this.queueAdapter.push<string>(task.type, task.taskId);
|
||||
await this.queueAdapter.push<string>(task.type, task.taskId);
|
||||
const queueLength = await this.getTaskQueueLength(task.type);
|
||||
this.logger.info('[TaskService.createTask:new] taskType: %s, targetName: %s, taskId: %s, queue size: %s',
|
||||
task.type, task.targetName, task.taskId, queueSize);
|
||||
task.type, task.targetName, task.taskId, queueLength);
|
||||
return task;
|
||||
}
|
||||
|
||||
@@ -51,34 +57,48 @@ export class TaskService extends AbstractService {
|
||||
await this.appendLogToNFS(task, appendLog);
|
||||
}
|
||||
task.state = TaskState.Waiting;
|
||||
// make sure updatedAt changed
|
||||
task.updatedAt = new Date();
|
||||
await this.taskRepository.saveTask(task);
|
||||
const queueSize = await this.queueAdapter.push<string>(task.type, task.taskId);
|
||||
await this.queueAdapter.push<string>(task.type, task.taskId);
|
||||
const queueLength = await this.getTaskQueueLength(task.type);
|
||||
this.logger.info('[TaskService.retryTask:save] taskType: %s, targetName: %s, taskId: %s, queue size: %s',
|
||||
task.type, task.targetName, task.taskId, queueSize);
|
||||
task.type, task.targetName, task.taskId, queueLength);
|
||||
}
|
||||
|
||||
public async findTask(taskId: string) {
|
||||
return await this.taskRepository.findTask(taskId);
|
||||
}
|
||||
|
||||
public async findTasks(taskIdList: Array<string>) {
|
||||
return await this.taskRepository.findTasks(taskIdList);
|
||||
}
|
||||
|
||||
public async findTaskLog(task: Task) {
|
||||
return await this.nfsAdapter.getDownloadUrlOrStream(task.logPath);
|
||||
}
|
||||
|
||||
public async findExecuteTask(taskType: TaskType) {
|
||||
const taskId = await this.queueAdapter.pop<string>(taskType);
|
||||
if (taskId) {
|
||||
const task = await this.taskRepository.findTask(taskId);
|
||||
if (task) {
|
||||
task.setExecuteWorker();
|
||||
task.state = TaskState.Processing;
|
||||
task.attempts += 1;
|
||||
await this.taskRepository.saveTask(task);
|
||||
return task;
|
||||
let taskId = await this.queueAdapter.pop<string>(taskType);
|
||||
let task: Task | null;
|
||||
|
||||
while (taskId) {
|
||||
task = await this.taskRepository.findTask(taskId);
|
||||
|
||||
// 任务已删除或任务已执行
|
||||
// 继续取下一个任务
|
||||
if (task === null || task?.state !== TaskState.Waiting) {
|
||||
taskId = await this.queueAdapter.pop<string>(taskType);
|
||||
continue;
|
||||
}
|
||||
|
||||
const condition = task.start();
|
||||
const saveSucceed = await this.taskRepository.idempotentSaveTask(task, condition);
|
||||
if (!saveSucceed) {
|
||||
taskId = await this.queueAdapter.pop<string>(taskType);
|
||||
continue;
|
||||
}
|
||||
return task;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -86,30 +106,42 @@ export class TaskService extends AbstractService {
|
||||
// try processing timeout tasks in 10 mins
|
||||
const tasks = await this.taskRepository.findTimeoutTasks(TaskState.Processing, 60000 * 10);
|
||||
for (const task of tasks) {
|
||||
// ignore ChangesStream task, it won't timeout
|
||||
if (task.attempts >= 3 && task.type !== TaskType.ChangesStream) {
|
||||
await this.finishTask(task, TaskState.Timeout);
|
||||
this.logger.warn(
|
||||
'[TaskService.retryExecuteTimeoutTasks:timeout] taskType: %s, targetName: %s, taskId: %s, attempts %s set to fail',
|
||||
try {
|
||||
// ignore ChangesStream task, it won't timeout
|
||||
if (task.attempts >= 3 && task.type !== TaskType.ChangesStream) {
|
||||
await this.finishTask(task, TaskState.Timeout);
|
||||
this.logger.warn(
|
||||
'[TaskService.retryExecuteTimeoutTasks:timeout] taskType: %s, targetName: %s, taskId: %s, attempts %s set to fail',
|
||||
task.type, task.targetName, task.taskId, task.attempts);
|
||||
continue;
|
||||
}
|
||||
if (task.attempts >= 1) {
|
||||
// reset logPath
|
||||
task.resetLogPath();
|
||||
}
|
||||
await this.retryTask(task);
|
||||
this.logger.info(
|
||||
'[TaskService.retryExecuteTimeoutTasks:retry] taskType: %s, targetName: %s, taskId: %s, attempts %s will retry again',
|
||||
task.type, task.targetName, task.taskId, task.attempts);
|
||||
} catch (e) {
|
||||
this.logger.error(
|
||||
'[TaskService.retryExecuteTimeoutTasks:error] processing task, taskType: %s, targetName: %s, taskId: %s, attempts %s will retry again',
|
||||
task.type, task.targetName, task.taskId, task.attempts);
|
||||
continue;
|
||||
}
|
||||
if (task.attempts >= 1) {
|
||||
// reset logPath
|
||||
task.resetLogPath();
|
||||
}
|
||||
await this.retryTask(task);
|
||||
this.logger.warn(
|
||||
'[TaskService.retryExecuteTimeoutTasks:retry] taskType: %s, targetName: %s, taskId: %s, attempts %s will retry again',
|
||||
task.type, task.targetName, task.taskId, task.attempts);
|
||||
}
|
||||
// try waiting timeout tasks in 30 mins
|
||||
const waitingTasks = await this.taskRepository.findTimeoutTasks(TaskState.Waiting, 60000 * 30);
|
||||
for (const task of waitingTasks) {
|
||||
await this.retryTask(task);
|
||||
this.logger.warn(
|
||||
'[TaskService.retryExecuteTimeoutTasks:retryWaiting] taskType: %s, targetName: %s, taskId: %s waiting too long',
|
||||
task.type, task.targetName, task.taskId);
|
||||
try {
|
||||
await this.retryTask(task);
|
||||
this.logger.warn(
|
||||
'[TaskService.retryExecuteTimeoutTasks:retryWaiting] taskType: %s, targetName: %s, taskId: %s waiting too long',
|
||||
task.type, task.targetName, task.taskId);
|
||||
} catch (e) {
|
||||
this.logger.error(
|
||||
'[TaskService.retryExecuteTimeoutTasks:error] waiting task, taskType: %s, targetName: %s, taskId: %s, attempts %s will retry again',
|
||||
task.type, task.targetName, task.taskId, task.attempts);
|
||||
}
|
||||
}
|
||||
return {
|
||||
processing: tasks.length,
|
||||
@@ -119,7 +151,6 @@ export class TaskService extends AbstractService {
|
||||
|
||||
public async appendTaskLog(task: Task, appendLog: string) {
|
||||
await this.appendLogToNFS(task, appendLog);
|
||||
task.updatedAt = new Date();
|
||||
await this.taskRepository.saveTask(task);
|
||||
}
|
||||
|
||||
|
||||
84
app/core/service/TokenService.ts
Normal file
84
app/core/service/TokenService.ts
Normal file
@@ -0,0 +1,84 @@
|
||||
import dayjs from 'dayjs';
|
||||
import {
|
||||
AccessLevel,
|
||||
SingletonProto,
|
||||
Inject,
|
||||
} from '@eggjs/tegg';
|
||||
import { isEmpty } from 'lodash';
|
||||
import { AbstractService } from '../../common/AbstractService';
|
||||
import { Token, isGranularToken } from '../entity/Token';
|
||||
import { TokenPackage as TokenPackageModel } from '../../../app/repository/model/TokenPackage';
|
||||
import { Package as PackageModel } from '../../../app/repository/model/Package';
|
||||
import { ModelConvertor } from '../../../app/repository/util/ModelConvertor';
|
||||
import { Package as PackageEntity } from '../entity/Package';
|
||||
import { ForbiddenError, UnauthorizedError } from 'egg-errors';
|
||||
import { getScopeAndName } from '../../../app/common/PackageUtil';
|
||||
import { sha512 } from '../../../app/common/UserUtil';
|
||||
import { UserRepository } from '../../../app/repository/UserRepository';
|
||||
|
||||
@SingletonProto({
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
})
|
||||
export class TokenService extends AbstractService {
|
||||
@Inject()
|
||||
private readonly TokenPackage: typeof TokenPackageModel;
|
||||
@Inject()
|
||||
private readonly Package: typeof PackageModel;
|
||||
@Inject()
|
||||
private readonly userRepository: UserRepository;
|
||||
|
||||
public async listTokenPackages(token: Token) {
|
||||
if (isGranularToken(token)) {
|
||||
const models = await this.TokenPackage.find({ tokenId: token.tokenId });
|
||||
const packages = await this.Package.find({ packageId: models.map(m => m.packageId) });
|
||||
return packages.map(pkg => ModelConvertor.convertModelToEntity(pkg, PackageEntity));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public async checkGranularTokenAccess(token: Token, fullname: string) {
|
||||
// skip classic token
|
||||
if (!isGranularToken(token)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// check for expires
|
||||
if (dayjs(token.expiredAt).isBefore(new Date())) {
|
||||
throw new UnauthorizedError('Token expired');
|
||||
}
|
||||
|
||||
// check for scope whitelist
|
||||
const [ scope, name ] = getScopeAndName(fullname);
|
||||
// check for packages whitelist
|
||||
const allowedPackages = await this.listTokenPackages(token);
|
||||
|
||||
// check for scope & packages access
|
||||
if (isEmpty(allowedPackages) && isEmpty(token.allowedScopes)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const existPkgConfig = allowedPackages?.find(pkg => pkg.scope === scope && pkg.name === name);
|
||||
if (existPkgConfig) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const existScopeConfig = token.allowedScopes?.find(s => s === scope);
|
||||
if (existScopeConfig) {
|
||||
return true;
|
||||
}
|
||||
|
||||
throw new ForbiddenError(`can't access package "${fullname}"`);
|
||||
|
||||
}
|
||||
|
||||
async getUserAndToken(authorization: string) {
|
||||
if (!authorization) return null;
|
||||
const matchs = /^Bearer ([\w\.]+?)$/.exec(authorization);
|
||||
if (!matchs) return null;
|
||||
const tokenValue = matchs[1];
|
||||
const tokenKey = sha512(tokenValue);
|
||||
const authorizedUserAndToken = await this.userRepository.findUserAndTokenByTokenKey(tokenKey);
|
||||
return authorizedUserAndToken;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,21 +1,24 @@
|
||||
import crypto from 'crypto';
|
||||
import {
|
||||
AccessLevel,
|
||||
ContextProto,
|
||||
SingletonProto,
|
||||
Inject,
|
||||
} from '@eggjs/tegg';
|
||||
import { NotFoundError, ForbiddenError } from 'egg-errors';
|
||||
import { UserRepository } from '../../repository/UserRepository';
|
||||
import { User as UserEntity } from '../entity/User';
|
||||
import { Token as TokenEntity } from '../entity/Token';
|
||||
import { Token as TokenEntity, TokenType } from '../entity/Token';
|
||||
import { WebauthnCredential as WebauthnCredentialEntity } from '../entity/WebauthnCredential';
|
||||
import { LoginResultCode } from '../../common/enum/User';
|
||||
import { integrity, checkIntegrity, randomToken, sha512 } from '../../common/UserUtil';
|
||||
import { AbstractService } from '../../common/AbstractService';
|
||||
|
||||
type Optional<T, K extends keyof T> = Omit < T, K > & Partial<T> ;
|
||||
|
||||
type CreateUser = {
|
||||
name: string;
|
||||
password: string;
|
||||
email: string;
|
||||
password: string;
|
||||
ip: string;
|
||||
};
|
||||
|
||||
@@ -25,13 +28,32 @@ type LoginResult = {
|
||||
token?: TokenEntity;
|
||||
};
|
||||
|
||||
type CreateTokenOptions = {
|
||||
type CreateTokenOption = CreateClassicTokenOptions | CreateGranularTokenOptions;
|
||||
|
||||
type CreateGranularTokenOptions = {
|
||||
type: TokenType.granular;
|
||||
name: string;
|
||||
description?: string;
|
||||
allowedScopes?: string[];
|
||||
allowedPackages?: string[];
|
||||
isReadonly?: boolean;
|
||||
cidrWhitelist?: string[];
|
||||
expires: number;
|
||||
};
|
||||
|
||||
type CreateClassicTokenOptions = {
|
||||
isReadonly?: boolean;
|
||||
isAutomation?: boolean;
|
||||
cidrWhitelist?: string[];
|
||||
};
|
||||
|
||||
@ContextProto({
|
||||
type CreateWebauthnCredentialOptions = {
|
||||
credentialId: string;
|
||||
publicKey: string;
|
||||
browserType?: string;
|
||||
};
|
||||
|
||||
@SingletonProto({
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
})
|
||||
export class UserService extends AbstractService {
|
||||
@@ -43,6 +65,10 @@ export class UserService extends AbstractService {
|
||||
return checkIntegrity(plain, user.passwordIntegrity);
|
||||
}
|
||||
|
||||
async findUserByName(name: string): Promise<UserEntity | null> {
|
||||
return await this.userRepository.findUserByName(name);
|
||||
}
|
||||
|
||||
async login(name: string, password: string): Promise<LoginResult> {
|
||||
const user = await this.userRepository.findUserByName(name);
|
||||
if (!user) return { code: LoginResultCode.UserNotFound };
|
||||
@@ -53,6 +79,23 @@ export class UserService extends AbstractService {
|
||||
return { code: LoginResultCode.Success, user, token };
|
||||
}
|
||||
|
||||
async ensureTokenByUser({ name, email, password = crypto.randomUUID(), ip }: Optional<CreateUser, 'password'>) {
|
||||
let user = await this.userRepository.findUserByName(name);
|
||||
if (!user) {
|
||||
const createRes = await this.create({
|
||||
name,
|
||||
email,
|
||||
// Authentication via sso
|
||||
// should use token instead of password
|
||||
password,
|
||||
ip,
|
||||
});
|
||||
user = createRes.user;
|
||||
}
|
||||
const token = await this.createToken(user.userId);
|
||||
return { user, token };
|
||||
}
|
||||
|
||||
async create(createUser: CreateUser) {
|
||||
const passwordSalt = crypto.randomBytes(30).toString('hex');
|
||||
const plain = `${passwordSalt}${createUser.password}`;
|
||||
@@ -70,8 +113,8 @@ export class UserService extends AbstractService {
|
||||
return { user: userEntity, token };
|
||||
}
|
||||
|
||||
async savePublicUser(name: string, email: string): Promise<{ changed: boolean, user: UserEntity }> {
|
||||
const storeName = name.startsWith('name:') ? name : `npm:${name}`;
|
||||
async saveUser(userPrefix = 'npm:', name: string, email: string): Promise<{ changed: boolean, user: UserEntity }> {
|
||||
const storeName = name.startsWith('name:') ? name : `${userPrefix}${name}`;
|
||||
let user = await this.userRepository.findUserByName(storeName);
|
||||
if (!user) {
|
||||
const passwordSalt = crypto.randomBytes(20).toString('hex');
|
||||
@@ -96,9 +139,10 @@ export class UserService extends AbstractService {
|
||||
return { changed: true, user };
|
||||
}
|
||||
|
||||
async createToken(userId: string, options: CreateTokenOptions = {}) {
|
||||
async createToken(userId: string, options: CreateTokenOption = {}) {
|
||||
// https://github.blog/2021-09-23-announcing-npms-new-access-token-format/
|
||||
// https://github.blog/2021-04-05-behind-githubs-new-authentication-token-formats/
|
||||
// https://github.blog/changelog/2022-12-06-limit-scope-of-npm-tokens-with-the-new-granular-access-tokens/
|
||||
const token = randomToken(this.config.cnpmcore.name);
|
||||
const tokenKey = sha512(token);
|
||||
const tokenMark = token.substring(0, token.indexOf('_') + 4);
|
||||
@@ -106,9 +150,7 @@ export class UserService extends AbstractService {
|
||||
tokenKey,
|
||||
tokenMark,
|
||||
userId,
|
||||
cidrWhitelist: options.cidrWhitelist ?? [],
|
||||
isReadonly: options.isReadonly ?? false,
|
||||
isAutomation: options.isAutomation ?? false,
|
||||
...options,
|
||||
});
|
||||
await this.userRepository.saveToken(tokenEntity);
|
||||
tokenEntity.token = token;
|
||||
@@ -129,4 +171,28 @@ export class UserService extends AbstractService {
|
||||
}
|
||||
await this.userRepository.removeToken(token.tokenId);
|
||||
}
|
||||
|
||||
async findWebauthnCredential(userId: string, browserType?: string) {
|
||||
const credential = await this.userRepository.findCredentialByUserIdAndBrowserType(userId, browserType || null);
|
||||
return credential;
|
||||
}
|
||||
|
||||
async createWebauthnCredential(userId: string, options: CreateWebauthnCredentialOptions) {
|
||||
const credentialEntity = WebauthnCredentialEntity.create({
|
||||
userId,
|
||||
credentialId: options.credentialId,
|
||||
publicKey: options.publicKey,
|
||||
browserType: options.browserType,
|
||||
});
|
||||
await this.userRepository.saveCredential(credentialEntity);
|
||||
return credentialEntity;
|
||||
}
|
||||
|
||||
async removeWebauthnCredential(userId: string, browserType?: string) {
|
||||
const credential = await this.userRepository.findCredentialByUserIdAndBrowserType(userId, browserType || null);
|
||||
if (credential) {
|
||||
await this.userRepository.removeCredential(credential.wancId);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,10 +1,24 @@
|
||||
import { EntityData } from '../entity/Entity';
|
||||
import ObjectID from 'bson-objectid';
|
||||
import { E400 } from 'egg-errors';
|
||||
import { EntityData } from '../entity/Entity';
|
||||
|
||||
type PartialBy<T, K extends keyof T> = Omit<T, K> & Partial<Pick<T, K>>;
|
||||
|
||||
export type EasyData<T extends EntityData, Id extends keyof T> = PartialBy<T, 'createdAt' | 'updatedAt' | Id>;
|
||||
|
||||
const MAX_PAGE_SIZE = 100 as const;
|
||||
export interface PageOptions {
|
||||
pageSize?: number;
|
||||
pageIndex?: number;
|
||||
}
|
||||
export interface PageResult<T> {
|
||||
count: number;
|
||||
data: Array<T>
|
||||
}
|
||||
export interface PageLimitOptions {
|
||||
offset: number;
|
||||
limit: number;
|
||||
}
|
||||
|
||||
export class EntityUtil {
|
||||
static defaultData<T extends EntityData, Id extends keyof T>(data: EasyData<T, Id>, id: Id): T {
|
||||
@@ -17,4 +31,15 @@ export class EntityUtil {
|
||||
static createId(): string {
|
||||
return new ObjectID().toHexString();
|
||||
}
|
||||
|
||||
static convertPageOptionsToLimitOption(page: PageOptions): PageLimitOptions {
|
||||
const { pageIndex = 0, pageSize = 20 } = page;
|
||||
if (pageSize > MAX_PAGE_SIZE) {
|
||||
throw new E400(`max page size is 100, current request is ${pageSize}`);
|
||||
}
|
||||
return {
|
||||
offset: pageIndex * pageSize,
|
||||
limit: pageSize,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
53
app/infra/AuthAdapter.ts
Normal file
53
app/infra/AuthAdapter.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import {
|
||||
AccessLevel,
|
||||
EggContext,
|
||||
Inject,
|
||||
SingletonProto,
|
||||
} from '@eggjs/tegg';
|
||||
import { Redis } from 'ioredis';
|
||||
import { randomUUID } from 'crypto';
|
||||
import { AuthClient, AuthUrlResult, userResult } from '../common/typing';
|
||||
|
||||
const ONE_DAY = 3600 * 24;
|
||||
|
||||
type SSO_USER = {
|
||||
name: string;
|
||||
email: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Use sort set to keep queue in order and keep same value only insert once
|
||||
*/
|
||||
@SingletonProto({
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
name: 'authAdapter',
|
||||
})
|
||||
export class AuthAdapter implements AuthClient {
|
||||
@Inject()
|
||||
readonly redis: Redis;
|
||||
|
||||
@Inject()
|
||||
readonly user: SSO_USER;
|
||||
|
||||
async getAuthUrl(ctx: EggContext): Promise<AuthUrlResult> {
|
||||
const sessionId = randomUUID();
|
||||
await this.redis.setex(sessionId, ONE_DAY, '');
|
||||
|
||||
// INTEGRATE.md
|
||||
const registry = ctx.app.config.cnpmcore.registry;
|
||||
return {
|
||||
loginUrl: `${registry}/-/v1/login/request/session/${sessionId}`,
|
||||
doneUrl: `${registry}/-/v1/login/done/session/${sessionId}`,
|
||||
};
|
||||
}
|
||||
|
||||
// should implements in infra
|
||||
async ensureCurrentUser() {
|
||||
if (this.user) {
|
||||
const { name, email } = this.user;
|
||||
return { name, email } as userResult;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,22 +1,19 @@
|
||||
import {
|
||||
SingletonProto,
|
||||
AccessLevel,
|
||||
LifecycleInit,
|
||||
Inject,
|
||||
EggObjectLifecycle,
|
||||
SingletonProto,
|
||||
} from '@eggjs/tegg';
|
||||
import {
|
||||
EggLogger,
|
||||
EggAppConfig,
|
||||
} from 'egg';
|
||||
import { EggAppConfig, EggLogger } from 'egg';
|
||||
import FSClient from 'fs-cnpm';
|
||||
import { AppendResult, NFSClient, UploadOptions, UploadResult } from '../common/typing';
|
||||
import { AppendResult, NFSClient, UploadOptions, UploadResult, DownloadOptions } from '../common/typing';
|
||||
import { Readable } from 'stream';
|
||||
|
||||
@SingletonProto({
|
||||
name: 'nfsClient',
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
})
|
||||
export class NFSClientAdapter implements EggObjectLifecycle, NFSClient {
|
||||
export class NFSClientAdapter implements NFSClient {
|
||||
@Inject()
|
||||
private logger: EggLogger;
|
||||
|
||||
@@ -31,7 +28,8 @@ export class NFSClientAdapter implements EggObjectLifecycle, NFSClient {
|
||||
|
||||
url?(key: string): string;
|
||||
|
||||
async init() {
|
||||
@LifecycleInit()
|
||||
protected async init() {
|
||||
// NFS interface https://github.com/cnpm/cnpmjs.org/wiki/NFS-Guide
|
||||
if (this.config.nfs.client) {
|
||||
this._client = this.config.nfs.client;
|
||||
@@ -79,4 +77,8 @@ export class NFSClientAdapter implements EggObjectLifecycle, NFSClient {
|
||||
}
|
||||
return await this._client.uploadBuffer(bytes, options);
|
||||
}
|
||||
|
||||
async download(key: string, filePath: string, options: DownloadOptions): Promise<void> {
|
||||
return await this._client.download(key, filePath, options);
|
||||
}
|
||||
}
|
||||
|
||||
47
app/infra/QueueAdapter.ts
Normal file
47
app/infra/QueueAdapter.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
import {
|
||||
AccessLevel,
|
||||
Inject,
|
||||
SingletonProto,
|
||||
} from '@eggjs/tegg';
|
||||
import { Redis } from 'ioredis';
|
||||
import { QueueAdapter } from '../common/typing';
|
||||
|
||||
/**
|
||||
* Use sort set to keep queue in order and keep same value only insert once
|
||||
*/
|
||||
@SingletonProto({
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
name: 'queueAdapter',
|
||||
})
|
||||
export class RedisQueueAdapter implements QueueAdapter {
|
||||
@Inject()
|
||||
private readonly redis: Redis; // 由 redis 插件引入
|
||||
|
||||
private getQueueName(key: string) {
|
||||
return `CNPMCORE_Q_V2_${key}`;
|
||||
}
|
||||
|
||||
private getQueueScoreName(key: string) {
|
||||
return `CNPMCORE_Q_S_V2_${key}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* If queue has the same item, return false
|
||||
* If queue not has the same item, return true
|
||||
*/
|
||||
async push<T>(key: string, item: T): Promise<boolean> {
|
||||
const score = await this.redis.incr(this.getQueueScoreName(key));
|
||||
const res = await this.redis.zadd(this.getQueueName(key), score, JSON.stringify(item));
|
||||
return res !== 0;
|
||||
}
|
||||
|
||||
async pop<T>(key: string) {
|
||||
const [ json ] = await this.redis.zpopmin(this.getQueueName(key));
|
||||
if (!json) return null;
|
||||
return JSON.parse(json) as T;
|
||||
}
|
||||
|
||||
async length(key: string) {
|
||||
return await this.redis.zcount(this.getQueueName(key), '-inf', '+inf');
|
||||
}
|
||||
}
|
||||
@@ -1,39 +1,90 @@
|
||||
import {
|
||||
AccessLevel,
|
||||
ContextProto,
|
||||
Inject,
|
||||
EggContext,
|
||||
ContextProto,
|
||||
} from '@eggjs/tegg';
|
||||
import { EggAppConfig, EggLogger } from 'egg';
|
||||
import { UnauthorizedError, ForbiddenError } from 'egg-errors';
|
||||
import { UserRepository } from '../repository/UserRepository';
|
||||
import { PackageRepository } from '../repository/PackageRepository';
|
||||
import { Package as PackageEntity } from '../core/entity/Package';
|
||||
import { User as UserEntity } from '../core/entity/User';
|
||||
import { Token as TokenEntity } from '../core/entity/Token';
|
||||
import { sha512 } from '../common/UserUtil';
|
||||
import { getScopeAndName } from '../common/PackageUtil';
|
||||
import { RegistryManagerService } from '../core/service/RegistryManagerService';
|
||||
import { TokenService } from '../core/service/TokenService';
|
||||
|
||||
// https://docs.npmjs.com/creating-and-viewing-access-tokens#creating-tokens-on-the-website
|
||||
type TokenRole = 'read' | 'publish' | 'setting';
|
||||
export type TokenRole = 'read' | 'publish' | 'setting';
|
||||
|
||||
@ContextProto({
|
||||
// only inject on port module
|
||||
accessLevel: AccessLevel.PRIVATE,
|
||||
})
|
||||
export class UserRoleManager {
|
||||
@Inject()
|
||||
private readonly userRepository: UserRepository;
|
||||
@Inject()
|
||||
private readonly packageRepository: PackageRepository;
|
||||
@Inject()
|
||||
private readonly config: EggAppConfig;
|
||||
@Inject()
|
||||
protected logger: EggLogger;
|
||||
@Inject()
|
||||
private readonly registryManagerService: RegistryManagerService;
|
||||
@Inject()
|
||||
private readonly tokenService: TokenService;
|
||||
|
||||
private handleAuthorized = false;
|
||||
private currentAuthorizedUser: UserEntity;
|
||||
private currentAuthorizedToken: TokenEntity;
|
||||
|
||||
// check publish access
|
||||
// 1. admin has all access
|
||||
// 2. has published in current registry
|
||||
// 3. pkg scope is allowed to publish
|
||||
// use AbstractController#ensurePublishAccess ensure pkg exists;
|
||||
public async checkPublishAccess(ctx: EggContext, fullname: string) {
|
||||
|
||||
const user = await this.requiredAuthorizedUser(ctx, 'publish');
|
||||
|
||||
// 1. admin has all access
|
||||
const isAdmin = await this.isAdmin(ctx);
|
||||
if (isAdmin) {
|
||||
return user;
|
||||
}
|
||||
|
||||
// 2. check for checkGranularTokenAccess
|
||||
const authorizedUserAndToken = await this.getAuthorizedUserAndToken(ctx);
|
||||
const { token } = authorizedUserAndToken!;
|
||||
await this.tokenService.checkGranularTokenAccess(token, fullname);
|
||||
|
||||
// 3. has published in current registry
|
||||
const [ scope, name ] = getScopeAndName(fullname);
|
||||
const pkg = await this.packageRepository.findPackage(scope, name);
|
||||
const selfRegistry = await this.registryManagerService.ensureSelfRegistry();
|
||||
const inSelfRegistry = pkg?.registryId === selfRegistry.registryId;
|
||||
if (inSelfRegistry) {
|
||||
// 3.1 check in Maintainers table
|
||||
// Higher priority than scope check
|
||||
await this.requiredPackageMaintainer(pkg, user);
|
||||
return user;
|
||||
}
|
||||
|
||||
if (pkg && !scope && !inSelfRegistry) {
|
||||
// 3.2 public package can't publish in other registry
|
||||
// scope package can be migrated into self registry
|
||||
throw new ForbiddenError(`Can\'t modify npm public package "${fullname}"`);
|
||||
}
|
||||
|
||||
// 4 check scope is allowed to publish
|
||||
await this.requiredPackageScope(scope, user);
|
||||
if (pkg) {
|
||||
// published scoped package
|
||||
await this.requiredPackageMaintainer(pkg!, user);
|
||||
}
|
||||
|
||||
return user;
|
||||
}
|
||||
|
||||
// {
|
||||
// 'user-agent': 'npm/8.1.2 node/v16.13.1 darwin arm64 workspaces/false',
|
||||
// 'npm-command': 'adduser',
|
||||
@@ -53,20 +104,16 @@ export class UserRoleManager {
|
||||
user: this.currentAuthorizedUser,
|
||||
};
|
||||
}
|
||||
|
||||
this.handleAuthorized = true;
|
||||
const authorization = ctx.get('authorization');
|
||||
if (!authorization) return null;
|
||||
const matchs = /^Bearer ([\w\.]+?)$/.exec(authorization);
|
||||
if (!matchs) return null;
|
||||
const tokenValue = matchs[1];
|
||||
const tokenKey = sha512(tokenValue);
|
||||
const authorizedUserAndToken = await this.userRepository.findUserAndTokenByTokenKey(tokenKey);
|
||||
if (authorizedUserAndToken) {
|
||||
this.currentAuthorizedToken = authorizedUserAndToken.token;
|
||||
this.currentAuthorizedUser = authorizedUserAndToken.user;
|
||||
ctx.userId = authorizedUserAndToken.user.userId;
|
||||
const authorizedUserAndToken = await this.tokenService.getUserAndToken(authorization);
|
||||
if (!authorizedUserAndToken) {
|
||||
return null;
|
||||
}
|
||||
this.currentAuthorizedToken = authorizedUserAndToken.token;
|
||||
this.currentAuthorizedUser = authorizedUserAndToken.user;
|
||||
ctx.userId = authorizedUserAndToken.user.userId;
|
||||
return authorizedUserAndToken;
|
||||
}
|
||||
|
||||
@@ -106,23 +153,6 @@ export class UserRoleManager {
|
||||
}
|
||||
|
||||
public async requiredPackageMaintainer(pkg: PackageEntity, user: UserEntity) {
|
||||
// should be private package
|
||||
if (!pkg.isPrivate) {
|
||||
// admins can modified public package
|
||||
if (this.config.cnpmcore.admins[user.name]) {
|
||||
this.logger.warn('[UserRoleManager.requiredPackageMaintainer] admin "%s" modified public package "%s"',
|
||||
user.name, pkg.fullname);
|
||||
return;
|
||||
}
|
||||
throw new ForbiddenError(`Can\'t modify npm public package "${pkg.fullname}"`);
|
||||
}
|
||||
|
||||
// admins can modified private package (publish to cnpmcore)
|
||||
if (pkg.isPrivate && this.config.cnpmcore.admins[user.name] === user.email) {
|
||||
this.logger.warn('[UserRoleManager.requiredPackageMaintainer] admin "%s" modified private package "%s"',
|
||||
user.name, pkg.fullname);
|
||||
return;
|
||||
}
|
||||
|
||||
const maintainers = await this.packageRepository.listPackageMaintainers(pkg.packageId);
|
||||
const maintainer = maintainers.find(m => m.userId === user.userId);
|
||||
@@ -134,14 +164,15 @@ export class UserRoleManager {
|
||||
|
||||
public async requiredPackageScope(scope: string, user: UserEntity) {
|
||||
const cnpmcoreConfig = this.config.cnpmcore;
|
||||
if (!cnpmcoreConfig.allowPublishNonScopePackage) {
|
||||
const allowScopes = user.scopes ?? cnpmcoreConfig.allowScopes;
|
||||
if (!scope) {
|
||||
throw new ForbiddenError(`Package scope required, legal scopes: "${allowScopes.join(', ')}"`);
|
||||
}
|
||||
if (!allowScopes.includes(scope)) {
|
||||
throw new ForbiddenError(`Scope "${scope}" not match legal scopes: "${allowScopes.join(', ')}"`);
|
||||
}
|
||||
if (cnpmcoreConfig.allowPublishNonScopePackage) {
|
||||
return;
|
||||
}
|
||||
const allowScopes = user.scopes ?? cnpmcoreConfig.allowScopes;
|
||||
if (!scope) {
|
||||
throw new ForbiddenError(`Package scope required, legal scopes: "${allowScopes.join(', ')}"`);
|
||||
}
|
||||
if (!allowScopes.includes(scope)) {
|
||||
throw new ForbiddenError(`Scope "${scope}" not match legal scopes: "${allowScopes.join(', ')}"`);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>CNPM Binaries Mirror</title>
|
||||
</head>
|
||||
<body>
|
||||
</head>
|
||||
<body>
|
||||
<script>
|
||||
// Forked from https://chromedriver.storage.googleapis.com/index.html
|
||||
// Split a string in 2 parts. The first is the leading number, if any,
|
||||
@@ -52,11 +52,11 @@
|
||||
// the lowest.
|
||||
if (isNaN(numA) == false) return -1
|
||||
if (isNaN(numB) == false) return 1
|
||||
|
||||
// They are both strings.
|
||||
|
||||
// They are both strings.
|
||||
return (a < b) ? -1 : (a > b ? 1 : 0)
|
||||
}
|
||||
|
||||
|
||||
// Helper function to retrieve the value of a GET query parameter.
|
||||
// Greatly inspired from http://alturl.com/8rj7a
|
||||
function getParameter(parameterName) {
|
||||
@@ -66,26 +66,26 @@
|
||||
if (queryString.length <= 0) {
|
||||
return '';
|
||||
}
|
||||
|
||||
|
||||
// Find the beginning of the string
|
||||
begin = queryString.indexOf(parameterName);
|
||||
|
||||
|
||||
// If the parameter name is not found, skip it, otherwise return the
|
||||
// value.
|
||||
if (begin == -1) {
|
||||
return '';
|
||||
}
|
||||
|
||||
|
||||
// Add the length (integer) to the beginning.
|
||||
begin += parameterName.length;
|
||||
|
||||
|
||||
// Multiple parameters are separated by the '&' sign.
|
||||
end = queryString.indexOf ('&', begin);
|
||||
|
||||
|
||||
if (end == -1) {
|
||||
end = queryString.length;
|
||||
}
|
||||
|
||||
|
||||
// Return the string.
|
||||
return escape(unescape(queryString.substring(begin, end)));
|
||||
}
|
||||
@@ -94,7 +94,7 @@
|
||||
function displayList(items, root, path) {
|
||||
// Display the header
|
||||
document.write('<h1>Index of /' + path + '</h1>');
|
||||
|
||||
|
||||
// Start the table for the results.
|
||||
document.write('<table style="border-spacing:15px 0px;">');
|
||||
|
||||
@@ -103,18 +103,18 @@
|
||||
if (sortOrder != 'desc') {
|
||||
sortLink += '&sort=desc';
|
||||
}
|
||||
|
||||
|
||||
// Display the table header.
|
||||
document.write('<tr><th><img src="https://gw.alipayobjects.com/mdn/rms_fa382b/afts/img/A*v6fRRLopV_0AAAAAAAAAAAAAARQnAQ" alt="[ICO]"></th>');
|
||||
document.write('<th><a href="' + sortLink + '">Name</a></th>');
|
||||
document.write('<th>Last modified</th>');
|
||||
document.write('<th>Size</th>');
|
||||
document.write('<tr><th colspan="5"><hr></th></tr>');
|
||||
|
||||
|
||||
// Display the 'go back' button.
|
||||
if (path != '') {
|
||||
var backpath = location.pathname;
|
||||
|
||||
|
||||
// If there is more than one section delimited by '/' in the current
|
||||
// path we truncate the last section and append the rest to backpath.
|
||||
var delimiter = path.lastIndexOf('/');
|
||||
@@ -125,15 +125,15 @@
|
||||
backpath += path.substr(0, delimiter+1);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
document.write('<tr><td valign="top"><img src="https://gw.alipayobjects.com/mdn/rms_fa382b/afts/img/A*3QmJSqp2zpUAAAAAAAAAAAAAARQnAQ" alt="[DIR]"></td>');
|
||||
document.write('<td><a href="');
|
||||
document.write(backpath);
|
||||
document.write('">Parent Directory</a></td>');
|
||||
document.write('<td> </td>');
|
||||
document.write('<td align="right"> - </td></tr>');
|
||||
document.write('<td align="right"> - </td></tr>');
|
||||
}
|
||||
|
||||
|
||||
// Set up the variables.
|
||||
var directories = new Array();
|
||||
var files = new Array();
|
||||
@@ -146,7 +146,7 @@
|
||||
directories.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
files.sort(alphanumCompare);
|
||||
directories.sort(alphanumCompare);
|
||||
|
||||
@@ -155,13 +155,18 @@
|
||||
files.reverse();
|
||||
directories.reverse();
|
||||
}
|
||||
|
||||
|
||||
// Display the directories.
|
||||
for (var i = 0; i < directories.length; i++) {
|
||||
var lnk = location.pathname.substr(0, location.pathname.indexOf('?'));
|
||||
var item = directories[i];
|
||||
lnk += '?path=' + path + item.name;
|
||||
|
||||
if (path && !path.endsWith('/')) {
|
||||
lnk += '?path=' + path + '/' + item.name;
|
||||
} else {
|
||||
lnk += '?path=' + path + item.name;
|
||||
}
|
||||
|
||||
|
||||
document.write('<tr>');
|
||||
document.write('<td valign="top"><img src="https://gw.alipayobjects.com/mdn/rms_fa382b/afts/img/A*ct35SJLile8AAAAAAAAAAAAAARQnAQ" alt="[DIR]"></td>');
|
||||
document.write('<td><a href="' + lnk + '">' +
|
||||
@@ -170,7 +175,7 @@
|
||||
document.write('<td align="right">-</td>');
|
||||
document.write('</tr>');
|
||||
}
|
||||
|
||||
|
||||
// Display the files.
|
||||
for (var i = 0; i < files.length; i++) {
|
||||
var item = files[i];
|
||||
@@ -189,16 +194,16 @@
|
||||
if (sizeUnit !== '') {
|
||||
size = size.toFixed(2) + sizeUnit;
|
||||
}
|
||||
var lastModified = item.date;
|
||||
var lastModified = item.date;
|
||||
// Remove the entries we don't want to show.
|
||||
if (filename == '') {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
if (filename.indexOf('$folder$') >= 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
// Display the row.
|
||||
document.write('<tr>');
|
||||
document.write('<td valign="top"><img src="https://gw.alipayobjects.com/mdn/rms_fa382b/afts/img/A*FKvWRo-vns4AAAAAAAAAAAAAARQnAQ" alt="[DIR]"></td>');
|
||||
@@ -208,13 +213,13 @@
|
||||
document.write('<td align="right">' + size + '</td>');
|
||||
document.write('</tr>');
|
||||
}
|
||||
|
||||
|
||||
// Close the table.
|
||||
document.write('<tr><th colspan="5"><hr></th></tr>');
|
||||
document.write('</table>');
|
||||
document.title = 'CNPM Binaries Mirror';
|
||||
}
|
||||
|
||||
|
||||
function fetchAndDisplay() {
|
||||
var path = getParameter('path');
|
||||
var lastSlash = location.pathname.lastIndexOf("/");
|
||||
@@ -238,6 +243,6 @@
|
||||
}
|
||||
}
|
||||
fetchAndDisplay();
|
||||
</script>
|
||||
</body>
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
148
app/port/config.ts
Normal file
148
app/port/config.ts
Normal file
@@ -0,0 +1,148 @@
|
||||
import { SyncDeleteMode, SyncMode, ChangesStreamMode } from '../common/constants';
|
||||
|
||||
export { cnpmcoreConfig } from '../../config/config.default';
|
||||
|
||||
export type CnpmcoreConfig = {
|
||||
name: string,
|
||||
/**
|
||||
* enable hook or not
|
||||
*/
|
||||
hookEnable: boolean,
|
||||
/**
|
||||
* mac custom hooks count
|
||||
*/
|
||||
hooksLimit: number,
|
||||
/**
|
||||
* upstream registry url
|
||||
*/
|
||||
sourceRegistry: string,
|
||||
/**
|
||||
* upstream registry is base on `cnpmcore` or not
|
||||
* if your upstream is official npm registry, please turn it off
|
||||
*/
|
||||
sourceRegistryIsCNpm: boolean,
|
||||
/**
|
||||
* sync upstream first
|
||||
*/
|
||||
syncUpstreamFirst: boolean,
|
||||
/**
|
||||
* sync upstream timeout, default is 3mins
|
||||
*/
|
||||
sourceRegistrySyncTimeout: number,
|
||||
/**
|
||||
* sync task high water size, default is 100
|
||||
*/
|
||||
taskQueueHighWaterSize: number,
|
||||
/**
|
||||
* sync mode
|
||||
* - none: don't sync npm package
|
||||
* - admin: don't sync npm package,only admin can create sync task by sync contorller.
|
||||
* - all: sync all npm packages
|
||||
* - exist: only sync exist packages, effected when `enableCheckRecentlyUpdated` or `enableChangesStream` is enabled
|
||||
*/
|
||||
syncMode: SyncMode,
|
||||
syncDeleteMode: SyncDeleteMode,
|
||||
syncPackageWorkerMaxConcurrentTasks: number,
|
||||
triggerHookWorkerMaxConcurrentTasks: number,
|
||||
createTriggerHookWorkerMaxConcurrentTasks: number,
|
||||
/**
|
||||
* stop syncing these packages in future
|
||||
*/
|
||||
syncPackageBlockList: string[],
|
||||
/**
|
||||
* check recently from https://www.npmjs.com/browse/updated, if use set changesStreamRegistry to cnpmcore,
|
||||
* maybe you should disable it
|
||||
*/
|
||||
enableCheckRecentlyUpdated: boolean,
|
||||
/**
|
||||
* mirror binary, default is false
|
||||
*/
|
||||
enableSyncBinary: boolean,
|
||||
/**
|
||||
* sync binary source api, default is `${sourceRegistry}/-/binary`
|
||||
*/
|
||||
syncBinaryFromAPISource: string,
|
||||
/**
|
||||
* enable sync downloads data from source registry https://github.com/cnpm/cnpmcore/issues/108
|
||||
* all three parameters must be configured at the same time to take effect
|
||||
*/
|
||||
enableSyncDownloadData: boolean,
|
||||
syncDownloadDataSourceRegistry: string,
|
||||
/**
|
||||
* should be YYYY-MM-DD format
|
||||
*/
|
||||
syncDownloadDataMaxDate: string,
|
||||
/**
|
||||
* @see https://github.com/npm/registry-follower-tutorial
|
||||
*/
|
||||
enableChangesStream: boolean,
|
||||
checkChangesStreamInterval: number,
|
||||
changesStreamRegistry: string,
|
||||
/**
|
||||
* handle _changes request mode, default is 'streaming', please set it to 'json' when on cnpmcore registry
|
||||
*/
|
||||
changesStreamRegistryMode: ChangesStreamMode,
|
||||
/**
|
||||
* registry url
|
||||
*/
|
||||
registry: string,
|
||||
/**
|
||||
* https://docs.npmjs.com/cli/v6/using-npm/config#always-auth npm <= 6
|
||||
* if `alwaysAuth=true`, all api request required access token
|
||||
*/
|
||||
alwaysAuth: boolean,
|
||||
/**
|
||||
* white scope list
|
||||
*/
|
||||
allowScopes: string [],
|
||||
/**
|
||||
* allow publish non-scope package, disable by default
|
||||
*/
|
||||
allowPublishNonScopePackage: boolean,
|
||||
/**
|
||||
* Public registration is allowed, otherwise only admins can login
|
||||
*/
|
||||
allowPublicRegistration: boolean,
|
||||
/**
|
||||
* default system admins
|
||||
*/
|
||||
admins: Record<string, string>,
|
||||
/**
|
||||
* use webauthn for login, https://webauthn.guide/
|
||||
* only support platform authenticators, browser support: https://webauthn.me/browser-support
|
||||
*/
|
||||
enableWebAuthn: boolean,
|
||||
/**
|
||||
* http response cache control header
|
||||
*/
|
||||
enableCDN: boolean,
|
||||
/**
|
||||
* if you are using CDN, can override it
|
||||
* it meaning cache 300s on CDN server and client side.
|
||||
*/
|
||||
cdnCacheControlHeader: string,
|
||||
/**
|
||||
* if you are using CDN, can set it to 'Accept, Accept-Encoding'
|
||||
*/
|
||||
cdnVaryHeader: string,
|
||||
/**
|
||||
* store full package version manifests data to database table(package_version_manifests), default is false
|
||||
*/
|
||||
enableStoreFullPackageVersionManifestsToDatabase: boolean,
|
||||
/**
|
||||
* only support npm as client and npm >= 7.0.0 allow publish action
|
||||
*/
|
||||
enableNpmClientAndVersionCheck: boolean,
|
||||
/**
|
||||
* sync when package not found, only effect when syncMode = all/exist
|
||||
*/
|
||||
syncNotFound: boolean,
|
||||
/**
|
||||
* redirect to source registry when package not found
|
||||
*/
|
||||
redirectNotFound: boolean,
|
||||
/**
|
||||
* enable unpkg features, https://github.com/cnpm/cnpmcore/issues/452
|
||||
*/
|
||||
enableUnpkg: boolean,
|
||||
};
|
||||
@@ -21,8 +21,19 @@ import { UserService } from '../../core/service/UserService';
|
||||
import {
|
||||
VersionRule,
|
||||
} from '../typebox';
|
||||
import { SyncMode } from '../../common/constants';
|
||||
|
||||
class PackageNotFoundError extends NotFoundError {}
|
||||
class PackageNotFoundError extends NotFoundError {
|
||||
redirectToSourceRegistry?: string;
|
||||
}
|
||||
|
||||
class ControllerRedirectError extends NotFoundError {
|
||||
location: string;
|
||||
constructor(location: string) {
|
||||
super();
|
||||
this.location = location;
|
||||
}
|
||||
}
|
||||
|
||||
export abstract class AbstractController extends MiddlewareController {
|
||||
@Inject()
|
||||
@@ -42,23 +53,89 @@ export abstract class AbstractController extends MiddlewareController {
|
||||
return this.config.cnpmcore.sourceRegistry;
|
||||
}
|
||||
|
||||
protected get enableSyncAll() {
|
||||
return this.config.cnpmcore.syncMode === 'all';
|
||||
protected get enableSync() {
|
||||
return this.config.cnpmcore.syncMode !== SyncMode.none;
|
||||
}
|
||||
|
||||
protected isPrivateScope(scope: string) {
|
||||
return scope && this.config.cnpmcore.allowScopes.includes(scope);
|
||||
}
|
||||
|
||||
protected async ensurePublishAccess(ctx: EggContext, fullname: string, checkPkgExist = true) {
|
||||
const user = await this.userRoleManager.checkPublishAccess(ctx, fullname);
|
||||
let pkg: PackageEntity | null = null;
|
||||
if (checkPkgExist) {
|
||||
const [ scope, name ] = getScopeAndName(fullname);
|
||||
pkg = await this.packageRepository.findPackage(scope, name);
|
||||
if (!pkg) {
|
||||
throw this.createPackageNotFoundError(fullname, undefined);
|
||||
}
|
||||
}
|
||||
return {
|
||||
pkg,
|
||||
user,
|
||||
};
|
||||
}
|
||||
|
||||
protected get syncNotFound() {
|
||||
return this.config.cnpmcore.syncNotFound;
|
||||
}
|
||||
|
||||
protected get redirectNotFound() {
|
||||
return this.config.cnpmcore.redirectNotFound;
|
||||
}
|
||||
|
||||
protected getAllowSync(ctx: EggContext): boolean {
|
||||
let allowSync = false;
|
||||
|
||||
// request not by node, consider it request from web, don't sync
|
||||
const ua = ctx.get('user-agent');
|
||||
if (!ua || !ua.includes('node')) {
|
||||
return allowSync;
|
||||
}
|
||||
|
||||
// if request with `/xxx?write=true`, meaning the read request using for write, don't sync
|
||||
if (ctx.query.write) {
|
||||
return allowSync;
|
||||
}
|
||||
|
||||
allowSync = true;
|
||||
return allowSync;
|
||||
}
|
||||
|
||||
protected createControllerRedirectError(location: string) {
|
||||
return new ControllerRedirectError(location);
|
||||
}
|
||||
|
||||
protected createPackageNotFoundError(fullname: string, version?: string) {
|
||||
const message = version ? `${fullname}@${version} not found` : `${fullname} not found`;
|
||||
const err = new PackageNotFoundError(message);
|
||||
return err;
|
||||
}
|
||||
|
||||
protected createPackageNotFoundErrorWithRedirect(fullname: string, version?: string, allowSync = false) {
|
||||
// const err = new PackageNotFoundError(message);
|
||||
const err = this.createPackageNotFoundError(fullname, version);
|
||||
const [ scope ] = getScopeAndName(fullname);
|
||||
// dont sync private scope
|
||||
if (!this.isPrivateScope(scope)) {
|
||||
// syncMode = none, redirect public package to source registry
|
||||
if (!this.enableSyncAll) {
|
||||
err.redirectToSourceRegistry = this.sourceRegistry;
|
||||
// syncMode = none/admin, redirect public package to source registry
|
||||
if (!this.enableSync && this.config.cnpmcore.syncMode !== SyncMode.admin) {
|
||||
if (this.redirectNotFound) {
|
||||
err.redirectToSourceRegistry = this.sourceRegistry;
|
||||
}
|
||||
} else {
|
||||
// syncMode = all/exist
|
||||
if (allowSync && this.syncNotFound) {
|
||||
// ErrorHandler will use syncPackage to create sync task
|
||||
err.syncPackage = {
|
||||
fullname,
|
||||
};
|
||||
}
|
||||
if (allowSync && this.redirectNotFound) {
|
||||
// redirect when package not found
|
||||
err.redirectToSourceRegistry = this.sourceRegistry;
|
||||
}
|
||||
}
|
||||
}
|
||||
return err;
|
||||
@@ -74,23 +151,12 @@ export abstract class AbstractController extends MiddlewareController {
|
||||
return await this.getPackageEntity(scope, name);
|
||||
}
|
||||
|
||||
// 1. get package
|
||||
// 2. check current user is maintainer
|
||||
// 3. make sure current token can publish
|
||||
protected async getPackageEntityAndRequiredMaintainer(ctx: EggContext, fullname: string): Promise<PackageEntity> {
|
||||
const [ scope, name ] = getScopeAndName(fullname);
|
||||
const pkg = await this.getPackageEntity(scope, name);
|
||||
const authorizedUser = await this.userRoleManager.requiredAuthorizedUser(ctx, 'publish');
|
||||
await this.userRoleManager.requiredPackageMaintainer(pkg, authorizedUser);
|
||||
return pkg;
|
||||
}
|
||||
|
||||
// try to get package entity, throw NotFoundError when package not exists
|
||||
protected async getPackageEntity(scope: string, name: string): Promise<PackageEntity> {
|
||||
const packageEntity = await this.packageRepository.findPackage(scope, name);
|
||||
if (!packageEntity) {
|
||||
const fullname = getFullname(scope, name);
|
||||
throw this.createPackageNotFoundError(fullname);
|
||||
throw this.createPackageNotFoundErrorWithRedirect(fullname);
|
||||
}
|
||||
return packageEntity;
|
||||
}
|
||||
|
||||
55
app/port/controller/AccessController.ts
Normal file
55
app/port/controller/AccessController.ts
Normal file
@@ -0,0 +1,55 @@
|
||||
import {
|
||||
HTTPController,
|
||||
HTTPMethod,
|
||||
HTTPMethodEnum,
|
||||
HTTPParam,
|
||||
} from '@eggjs/tegg';
|
||||
import { AbstractController } from './AbstractController';
|
||||
import { FULLNAME_REG_STRING, getFullname, getScopeAndName } from '../../common/PackageUtil';
|
||||
import { PackageAccessLevel } from '../../common/constants';
|
||||
import { ForbiddenError, NotFoundError } from 'egg-errors';
|
||||
|
||||
@HTTPController()
|
||||
export class AccessController extends AbstractController {
|
||||
@HTTPMethod({
|
||||
path: `/-/package/:fullname(${FULLNAME_REG_STRING})/collaborators`,
|
||||
method: HTTPMethodEnum.GET,
|
||||
})
|
||||
async listCollaborators(@HTTPParam() fullname: string) {
|
||||
const [ scope, name ] = getScopeAndName(fullname);
|
||||
const pkg = await this.packageRepository.findPackage(scope, name);
|
||||
// return 403 if pkg not exists
|
||||
if (!pkg) {
|
||||
throw new ForbiddenError('Forbidden');
|
||||
}
|
||||
|
||||
const maintainers = await this.packageRepository.listPackageMaintainers(pkg!.packageId);
|
||||
const res: Record<string, string> = {};
|
||||
maintainers.forEach(maintainer => {
|
||||
res[maintainer.displayName] = PackageAccessLevel.write;
|
||||
});
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
@HTTPMethod({
|
||||
path: '/-/org/:username/package',
|
||||
method: HTTPMethodEnum.GET,
|
||||
})
|
||||
async listPackagesByUser(@HTTPParam() username: string) {
|
||||
const user = await this.userRepository.findUserByName(username);
|
||||
if (!user) {
|
||||
throw new NotFoundError(`User "${username}" not found`);
|
||||
}
|
||||
|
||||
const pkgs = await this.packageRepository.listPackagesByUserId(user.userId);
|
||||
const res: Record<string, string> = {};
|
||||
pkgs.forEach(pkg => {
|
||||
res[getFullname(pkg.scope, pkg.name)] = PackageAccessLevel.write;
|
||||
});
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
@@ -12,7 +12,8 @@ import { NotFoundError } from 'egg-errors';
|
||||
import { AbstractController } from './AbstractController';
|
||||
import { BinarySyncerService } from '../../core/service/BinarySyncerService';
|
||||
import { Binary } from '../../core/entity/Binary';
|
||||
import binaries from '../../../config/binaries';
|
||||
import binaries, { BinaryName } from '../../../config/binaries';
|
||||
import { BinaryNameRule, BinarySubpathRule } from '../typebox';
|
||||
|
||||
@HTTPController()
|
||||
export class BinarySyncController extends AbstractController {
|
||||
@@ -33,9 +34,10 @@ export class BinarySyncController extends AbstractController {
|
||||
method: HTTPMethodEnum.GET,
|
||||
})
|
||||
async listBinaries() {
|
||||
return Object.values(binaries).map(binaryConfig => {
|
||||
return Object.entries(binaries).map(([ binaryName, binaryConfig ]) => {
|
||||
return {
|
||||
name: `${binaryConfig.category}/`,
|
||||
name: `${binaryName}/`,
|
||||
category: `${binaryConfig.category}/`,
|
||||
description: binaryConfig.description,
|
||||
distUrl: binaryConfig.distUrl,
|
||||
repoUrl: /^https?:\/\//.test(binaryConfig.repo) ? binaryConfig.repo : `https://github.com/${binaryConfig.repo}`,
|
||||
@@ -49,17 +51,39 @@ export class BinarySyncController extends AbstractController {
|
||||
path: '/-/binary/:binaryName(@[^/]{1,220}\/[^/]{1,220}|[^@/]{1,220})/:subpath(.*)',
|
||||
method: HTTPMethodEnum.GET,
|
||||
})
|
||||
async showBinary(@Context() ctx: EggContext, @HTTPParam() binaryName: string, @HTTPParam() subpath: string) {
|
||||
async showBinary(@Context() ctx: EggContext, @HTTPParam() binaryName: BinaryName, @HTTPParam() subpath: string) {
|
||||
// check binaryName valid
|
||||
try {
|
||||
ctx.tValidate(BinaryNameRule, binaryName);
|
||||
} catch {
|
||||
throw new NotFoundError(`Binary "${binaryName}" not found`);
|
||||
}
|
||||
subpath = subpath || '/';
|
||||
if (subpath === '/') {
|
||||
const items = await this.binarySyncerService.listRootBinaries(binaryName);
|
||||
return this.formatItems(items);
|
||||
}
|
||||
try {
|
||||
ctx.tValidate(BinarySubpathRule, subpath);
|
||||
} catch {
|
||||
throw new NotFoundError(`Binary "${binaryName}/${subpath}" not found`);
|
||||
}
|
||||
subpath = `/${subpath}`;
|
||||
const parsed = path.parse(subpath);
|
||||
const parent = parsed.dir === '/' ? '/' : `${parsed.dir}/`;
|
||||
const name = subpath.endsWith('/') ? `${parsed.base}/` : parsed.base;
|
||||
const binary = await this.binarySyncerService.findBinary(binaryName, parent, name);
|
||||
// 首先查询 binary === category 的情况
|
||||
let binary = await this.binarySyncerService.findBinary(binaryName, parent, name);
|
||||
if (!binary) {
|
||||
// 查询不到再去查询 mergeCategory 的情况
|
||||
const category = binaries?.[binaryName]?.category;
|
||||
if (category) {
|
||||
// canvas/v2.6.1/canvas-v2.6.1-node-v57-linux-glibc-x64.tar.gz
|
||||
// -> node-canvas-prebuilt/v2.6.1/node-canvas-prebuilt-v2.6.1-node-v57-linux-glibc-x64.tar.gz
|
||||
binary = await this.binarySyncerService.findBinary(category, parent, name.replace(new RegExp(`^${binaryName}-`), `${category}-`));
|
||||
}
|
||||
}
|
||||
|
||||
if (!binary) {
|
||||
throw new NotFoundError(`Binary "${binaryName}${subpath}" not found`);
|
||||
}
|
||||
@@ -85,7 +109,13 @@ export class BinarySyncController extends AbstractController {
|
||||
path: '/-/binary/:binaryName(@[^/]{1,220}\/[^/]{1,220}|[^@/]{1,220})',
|
||||
method: HTTPMethodEnum.GET,
|
||||
})
|
||||
async showBinaryIndex(@Context() ctx: EggContext, @HTTPParam() binaryName: string) {
|
||||
async showBinaryIndex(@Context() ctx: EggContext, @HTTPParam() binaryName: BinaryName) {
|
||||
// check binaryName valid
|
||||
try {
|
||||
ctx.tValidate(BinaryNameRule, binaryName);
|
||||
} catch (e) {
|
||||
throw new NotFoundError(`Binary "${binaryName}" not found`);
|
||||
}
|
||||
return await this.showBinary(ctx, binaryName, '/');
|
||||
}
|
||||
|
||||
|
||||
@@ -8,10 +8,44 @@ import {
|
||||
Inject,
|
||||
} from '@eggjs/tegg';
|
||||
import { AbstractController } from './AbstractController';
|
||||
import { CacheService } from '../../core/service/CacheService';
|
||||
import { CacheService, DownloadInfo, UpstreamRegistryInfo } from '../../core/service/CacheService';
|
||||
|
||||
const startTime = new Date();
|
||||
|
||||
// registry 站点信息数据 SiteTotalData
|
||||
// SiteEnvInfo: 环境、运行时相关信息,实时查询
|
||||
// UpstreamInfo: 上游信息,实时查询
|
||||
// TotalInfo: 总数据信息,定时任务每分钟生成
|
||||
// LegacyInfo: 旧版兼容信息
|
||||
type SiteTotalData = LegacyInfo & SiteEnvInfo & TotalInfo;
|
||||
|
||||
type LegacyInfo = {
|
||||
source_registry: string,
|
||||
changes_stream_registry: string,
|
||||
sync_changes_steam: any,
|
||||
};
|
||||
|
||||
type SiteEnvInfo = {
|
||||
sync_model: string;
|
||||
sync_binary: boolean;
|
||||
instance_start_time: Date;
|
||||
node_version: string;
|
||||
app_version: string;
|
||||
engine: string;
|
||||
cache_time: string;
|
||||
};
|
||||
|
||||
type TotalInfo = {
|
||||
last_package: string;
|
||||
last_package_version: string;
|
||||
doc_count: number | bigint;
|
||||
doc_version_count: number | bigint;
|
||||
update_seq: number | bigint;
|
||||
download: DownloadInfo;
|
||||
upstream_registries?: UpstreamRegistryInfo[];
|
||||
};
|
||||
|
||||
|
||||
@HTTPController()
|
||||
export class HomeController extends AbstractController {
|
||||
@Inject()
|
||||
@@ -23,9 +57,12 @@ export class HomeController extends AbstractController {
|
||||
path: '/',
|
||||
method: HTTPMethodEnum.GET,
|
||||
})
|
||||
// 2023-1-20
|
||||
// 原有 LegacyInfo 字段继续保留,由于 ChangesStream 信息通过 registry 表配置,可能会过期
|
||||
// 新增 upstream_registries 字段,展示上游源站 registry 信息列表
|
||||
async showTotal() {
|
||||
const totalData = await this.cacheService.getTotalData();
|
||||
const data = {
|
||||
const data: SiteTotalData = {
|
||||
last_package: totalData.lastPackage,
|
||||
last_package_version: totalData.lastPackageVersion,
|
||||
doc_count: totalData.packageCount,
|
||||
@@ -42,6 +79,7 @@ export class HomeController extends AbstractController {
|
||||
source_registry: this.config.cnpmcore.sourceRegistry,
|
||||
changes_stream_registry: this.config.cnpmcore.changesStreamRegistry,
|
||||
cache_time: totalData.cacheTime,
|
||||
upstream_registries: totalData.upstreamRegistries,
|
||||
};
|
||||
return data;
|
||||
}
|
||||
|
||||
128
app/port/controller/HookController.ts
Normal file
128
app/port/controller/HookController.ts
Normal file
@@ -0,0 +1,128 @@
|
||||
import {
|
||||
Context,
|
||||
EggContext,
|
||||
HTTPBody,
|
||||
HTTPController,
|
||||
HTTPMethod,
|
||||
HTTPMethodEnum,
|
||||
HTTPParam,
|
||||
Inject,
|
||||
} from '@eggjs/tegg';
|
||||
import { HookManageService } from '../../core/service/HookManageService';
|
||||
import { TaskService } from '../../core/service/TaskService';
|
||||
import { UserRoleManager } from '../UserRoleManager';
|
||||
import { HookType } from '../../common/enum/Hook';
|
||||
import { TriggerHookTask } from '../../core/entity/Task';
|
||||
import { HookConvertor } from './convertor/HookConvertor';
|
||||
import { CreateHookRequestRule, UpdateHookRequestRule } from '../typebox';
|
||||
|
||||
export interface CreateHookRequest {
|
||||
type: string;
|
||||
name: string;
|
||||
endpoint: string;
|
||||
secret: string;
|
||||
}
|
||||
|
||||
export interface UpdateHookRequest {
|
||||
endpoint: string;
|
||||
secret: string;
|
||||
}
|
||||
|
||||
@HTTPController({
|
||||
path: '/-/npm',
|
||||
})
|
||||
export class HookController {
|
||||
@Inject()
|
||||
private readonly hookManageService: HookManageService;
|
||||
|
||||
@Inject()
|
||||
private readonly taskService: TaskService;
|
||||
|
||||
@Inject()
|
||||
private readonly userRoleManager: UserRoleManager;
|
||||
|
||||
@HTTPMethod({
|
||||
path: '/v1/hooks/hook',
|
||||
method: HTTPMethodEnum.POST,
|
||||
})
|
||||
async createHook(@Context() ctx: EggContext, @HTTPBody() req: CreateHookRequest) {
|
||||
ctx.tValidate(CreateHookRequestRule, req);
|
||||
const user = await this.userRoleManager.requiredAuthorizedUser(ctx, 'setting');
|
||||
const hook = await this.hookManageService.createHook({
|
||||
ownerId: user.userId,
|
||||
type: req.type as HookType,
|
||||
name: req.name,
|
||||
endpoint: req.endpoint,
|
||||
secret: req.secret,
|
||||
});
|
||||
return HookConvertor.convertToHookVo(hook, user);
|
||||
}
|
||||
|
||||
@HTTPMethod({
|
||||
path: '/v1/hooks/hook/:id',
|
||||
method: HTTPMethodEnum.PUT,
|
||||
})
|
||||
async updateHook(@Context() ctx: EggContext, @HTTPParam() id: string, @HTTPBody() req: UpdateHookRequest) {
|
||||
ctx.tValidate(UpdateHookRequestRule, req);
|
||||
const user = await this.userRoleManager.requiredAuthorizedUser(ctx, 'setting');
|
||||
const hook = await this.hookManageService.updateHook({
|
||||
operatorId: user.userId,
|
||||
hookId: id,
|
||||
endpoint: req.endpoint,
|
||||
secret: req.secret,
|
||||
});
|
||||
let task: TriggerHookTask | null = null;
|
||||
if (hook.latestTaskId) {
|
||||
task = await this.taskService.findTask(hook.latestTaskId) as TriggerHookTask;
|
||||
}
|
||||
return HookConvertor.convertToHookVo(hook, user, task);
|
||||
}
|
||||
|
||||
@HTTPMethod({
|
||||
path: '/v1/hooks/hook/:id',
|
||||
method: HTTPMethodEnum.DELETE,
|
||||
})
|
||||
async deleteHook(@Context() ctx: EggContext, @HTTPParam() id: string) {
|
||||
const user = await this.userRoleManager.requiredAuthorizedUser(ctx, 'setting');
|
||||
const hook = await this.hookManageService.deleteHook({
|
||||
operatorId: user.userId,
|
||||
hookId: id,
|
||||
});
|
||||
let task: TriggerHookTask | null = null;
|
||||
if (hook.latestTaskId) {
|
||||
task = await this.taskService.findTask(hook.latestTaskId) as TriggerHookTask;
|
||||
}
|
||||
return HookConvertor.convertToDeleteHookVo(hook, user, task);
|
||||
}
|
||||
|
||||
@HTTPMethod({
|
||||
path: '/v1/hooks',
|
||||
method: HTTPMethodEnum.GET,
|
||||
})
|
||||
async listHooks(@Context() ctx: EggContext) {
|
||||
const user = await this.userRoleManager.requiredAuthorizedUser(ctx, 'read');
|
||||
const hooks = await this.hookManageService.listHooksByOwnerId(user.userId);
|
||||
const tasks = await this.taskService.findTasks(hooks.map(t => t.latestTaskId).filter((t): t is string => !!t));
|
||||
const res = hooks.map(hook => {
|
||||
const task = tasks.find(t => t.taskId === hook.latestTaskId) as TriggerHookTask;
|
||||
return HookConvertor.convertToHookVo(hook, user, task);
|
||||
});
|
||||
return {
|
||||
objects: res,
|
||||
};
|
||||
}
|
||||
|
||||
@HTTPMethod({
|
||||
path: '/v1/hooks/hook/:id',
|
||||
method: HTTPMethodEnum.GET,
|
||||
})
|
||||
async getHook(@Context() ctx: EggContext, @HTTPParam() id: string) {
|
||||
const user = await this.userRoleManager.requiredAuthorizedUser(ctx, 'read');
|
||||
const hook = await this.hookManageService.getHookByOwnerId(id, user.userId);
|
||||
let task: TriggerHookTask | null = null;
|
||||
if (hook.latestTaskId) {
|
||||
task = await this.taskService.findTask(hook.latestTaskId) as TriggerHookTask;
|
||||
}
|
||||
return HookConvertor.convertToHookVo(hook, user, task);
|
||||
}
|
||||
}
|
||||
@@ -7,6 +7,7 @@ import {
|
||||
Context,
|
||||
EggContext,
|
||||
Inject,
|
||||
Middleware,
|
||||
} from '@eggjs/tegg';
|
||||
import { ForbiddenError } from 'egg-errors';
|
||||
import { AbstractController } from './AbstractController';
|
||||
@@ -14,6 +15,7 @@ import { FULLNAME_REG_STRING } from '../../common/PackageUtil';
|
||||
import { PackageManagerService } from '../../core/service/PackageManagerService';
|
||||
import { PackageVersionBlockRepository } from '../../repository/PackageVersionBlockRepository';
|
||||
import { BlockPackageRule, BlockPackageType } from '../typebox';
|
||||
import { AdminAccess } from '../middleware/AdminAccess';
|
||||
|
||||
@HTTPController()
|
||||
export class PackageBlockController extends AbstractController {
|
||||
@@ -27,11 +29,8 @@ export class PackageBlockController extends AbstractController {
|
||||
path: `/-/package/:fullname(${FULLNAME_REG_STRING})/blocks`,
|
||||
method: HTTPMethodEnum.PUT,
|
||||
})
|
||||
@Middleware(AdminAccess)
|
||||
async blockPackage(@Context() ctx: EggContext, @HTTPParam() fullname: string, @HTTPBody() data: BlockPackageType) {
|
||||
const isAdmin = await this.userRoleManager.isAdmin(ctx);
|
||||
if (!isAdmin) {
|
||||
throw new ForbiddenError('Not allow to block package');
|
||||
}
|
||||
const params = { fullname, reason: data.reason };
|
||||
ctx.tValidate(BlockPackageRule, params);
|
||||
const packageEntity = await this.getPackageEntityByFullname(params.fullname);
|
||||
@@ -57,11 +56,8 @@ export class PackageBlockController extends AbstractController {
|
||||
path: `/-/package/:fullname(${FULLNAME_REG_STRING})/blocks`,
|
||||
method: HTTPMethodEnum.DELETE,
|
||||
})
|
||||
@Middleware(AdminAccess)
|
||||
async unblockPackage(@Context() ctx: EggContext, @HTTPParam() fullname: string) {
|
||||
const isAdmin = await this.userRoleManager.isAdmin(ctx);
|
||||
if (!isAdmin) {
|
||||
throw new ForbiddenError('Not allow to unblock package');
|
||||
}
|
||||
const packageEntity = await this.getPackageEntityByFullname(fullname);
|
||||
if (packageEntity.isPrivate) {
|
||||
throw new ForbiddenError(`Can\'t unblock private package "${fullname}"`);
|
||||
|
||||
@@ -8,20 +8,29 @@ import {
|
||||
EggContext,
|
||||
Inject,
|
||||
HTTPQuery,
|
||||
BackgroundTaskHelper,
|
||||
} from '@eggjs/tegg';
|
||||
import { ForbiddenError, NotFoundError } from 'egg-errors';
|
||||
import { AbstractController } from './AbstractController';
|
||||
import { FULLNAME_REG_STRING, getScopeAndName } from '../../common/PackageUtil';
|
||||
import { Task } from '../../core/entity/Task';
|
||||
import { PackageSyncerService } from '../../core/service/PackageSyncerService';
|
||||
import { RegistryManagerService } from '../../core/service/RegistryManagerService';
|
||||
import { TaskState } from '../../common/enum/Task';
|
||||
import { SyncPackageTaskRule, SyncPackageTaskType } from '../typebox';
|
||||
import { SyncMode } from '../../common/constants';
|
||||
|
||||
@HTTPController()
|
||||
export class PackageSyncController extends AbstractController {
|
||||
@Inject()
|
||||
private packageSyncerService: PackageSyncerService;
|
||||
|
||||
@Inject()
|
||||
private backgroundTaskHelper: BackgroundTaskHelper;
|
||||
|
||||
@Inject()
|
||||
private registryManagerService: RegistryManagerService;
|
||||
|
||||
private async _executeTaskAsync(task: Task) {
|
||||
const startTime = Date.now();
|
||||
this.logger.info('[PackageSyncController:executeTask:start] taskId: %s, targetName: %s, attempts: %s, params: %j, updatedAt: %s, delay %sms',
|
||||
@@ -46,43 +55,66 @@ export class PackageSyncController extends AbstractController {
|
||||
method: HTTPMethodEnum.PUT,
|
||||
})
|
||||
async createSyncTask(@Context() ctx: EggContext, @HTTPParam() fullname: string, @HTTPBody() data: SyncPackageTaskType) {
|
||||
if (!this.enableSyncAll) {
|
||||
if (!this.enableSync) {
|
||||
throw new ForbiddenError('Not allow to sync package');
|
||||
}
|
||||
const tips = data.tips || `Sync cause by "${ctx.href}", parent traceId: ${ctx.tracer.traceId}`;
|
||||
const isAdmin = await this.userRoleManager.isAdmin(ctx);
|
||||
|
||||
if (this.config.cnpmcore.syncMode === SyncMode.admin && !isAdmin) {
|
||||
throw new ForbiddenError('Only admin allow to sync package');
|
||||
}
|
||||
|
||||
const params = {
|
||||
fullname,
|
||||
remoteAuthToken: data.remoteAuthToken,
|
||||
tips,
|
||||
skipDependencies: !!data.skipDependencies,
|
||||
syncDownloadData: !!data.syncDownloadData,
|
||||
force: !!data.force,
|
||||
// only admin allow to sync history version
|
||||
forceSyncHistory: !!data.forceSyncHistory && isAdmin,
|
||||
};
|
||||
ctx.tValidate(SyncPackageTaskRule, params);
|
||||
const [ scope, name ] = getScopeAndName(params.fullname);
|
||||
const packageEntity = await this.packageRepository.findPackage(scope, name);
|
||||
if (packageEntity?.isPrivate) {
|
||||
const registry = await this.registryManagerService.findByRegistryName(data?.registryName);
|
||||
|
||||
if (!registry && data.registryName) {
|
||||
throw new ForbiddenError(`Can\'t find target registry "${data.registryName}"`);
|
||||
}
|
||||
if (packageEntity?.isPrivate && !registry) {
|
||||
throw new ForbiddenError(`Can\'t sync private package "${params.fullname}"`);
|
||||
}
|
||||
if (params.syncDownloadData && !this.packageSyncerService.allowSyncDownloadData) {
|
||||
throw new ForbiddenError('Not allow to sync package download data');
|
||||
}
|
||||
if (registry && packageEntity?.registryId && packageEntity.registryId !== registry.registryId) {
|
||||
throw new ForbiddenError(`The package is synced from ${packageEntity.registryId}`);
|
||||
}
|
||||
const authorized = await this.userRoleManager.getAuthorizedUserAndToken(ctx);
|
||||
const task = await this.packageSyncerService.createTask(params.fullname, {
|
||||
authorIp: ctx.ip,
|
||||
authorId: authorized?.user.userId,
|
||||
remoteAuthToken: params.remoteAuthToken,
|
||||
tips: params.tips,
|
||||
skipDependencies: params.skipDependencies,
|
||||
syncDownloadData: params.syncDownloadData,
|
||||
forceSyncHistory: params.forceSyncHistory,
|
||||
registryId: registry?.registryId,
|
||||
});
|
||||
ctx.logger.info('[PackageSyncController.createSyncTask:success] taskId: %s, fullname: %s',
|
||||
task.taskId, fullname);
|
||||
if (data.force) {
|
||||
const isAdmin = await this.userRoleManager.isAdmin(ctx);
|
||||
if (isAdmin) {
|
||||
// execute task in background
|
||||
this._executeTaskAsync(task);
|
||||
ctx.logger.info('[PackageSyncController.createSyncTask:execute-immediately] taskId: %s',
|
||||
task.taskId);
|
||||
// set background task timeout to 5min
|
||||
this.backgroundTaskHelper.timeout = 1000 * 60 * 5;
|
||||
this.backgroundTaskHelper.run(async () => {
|
||||
ctx.logger.info('[PackageSyncController.createSyncTask:execute-immediately] taskId: %s',
|
||||
task.taskId);
|
||||
// execute task in background
|
||||
await this._executeTaskAsync(task);
|
||||
});
|
||||
}
|
||||
}
|
||||
ctx.status = 201;
|
||||
@@ -153,6 +185,7 @@ export class PackageSyncController extends AbstractController {
|
||||
skipDependencies: nodeps === 'true',
|
||||
syncDownloadData: false,
|
||||
force: false,
|
||||
forceSyncHistory: false,
|
||||
};
|
||||
const task = await this.createSyncTask(ctx, fullname, options);
|
||||
return {
|
||||
|
||||
@@ -46,7 +46,8 @@ export class PackageTagController extends AbstractController {
|
||||
async saveTag(@Context() ctx: EggContext, @HTTPParam() fullname: string, @HTTPParam() tag: string, @HTTPBody() version: string) {
|
||||
const data = { tag, version };
|
||||
ctx.tValidate(TagWithVersionRule, data);
|
||||
const pkg = await this.getPackageEntityAndRequiredMaintainer(ctx, fullname);
|
||||
const ensureRes = await this.ensurePublishAccess(ctx, fullname, true);
|
||||
const pkg = ensureRes.pkg!;
|
||||
const packageVersion = await this.getPackageVersionEntity(pkg, data.version);
|
||||
await this.packageManagerService.savePackageTag(pkg, data.tag, packageVersion.version);
|
||||
return { ok: true };
|
||||
@@ -64,7 +65,8 @@ export class PackageTagController extends AbstractController {
|
||||
if (tag === 'latest') {
|
||||
throw new ForbiddenError('Can\'t remove the "latest" tag');
|
||||
}
|
||||
const pkg = await this.getPackageEntityAndRequiredMaintainer(ctx, fullname);
|
||||
const ensureRes = await this.ensurePublishAccess(ctx, fullname, true);
|
||||
const pkg = ensureRes.pkg!;
|
||||
await this.packageManagerService.removePackageTag(pkg, data.tag);
|
||||
return { ok: true };
|
||||
}
|
||||
|
||||
207
app/port/controller/PackageVersionFileController.ts
Normal file
207
app/port/controller/PackageVersionFileController.ts
Normal file
@@ -0,0 +1,207 @@
|
||||
import {
|
||||
HTTPController,
|
||||
HTTPMethod,
|
||||
HTTPMethodEnum,
|
||||
HTTPParam,
|
||||
HTTPQuery,
|
||||
Inject,
|
||||
Context,
|
||||
EggContext,
|
||||
Middleware,
|
||||
} from '@eggjs/tegg';
|
||||
import { NotFoundError } from 'egg-errors';
|
||||
import { join } from 'node:path';
|
||||
import { AbstractController } from './AbstractController';
|
||||
import { AdminAccess } from '../middleware/AdminAccess';
|
||||
import { getScopeAndName, FULLNAME_REG_STRING } from '../../common/PackageUtil';
|
||||
import { PackageVersionFileService } from '../../core/service/PackageVersionFileService';
|
||||
import { PackageManagerService } from '../../core/service/PackageManagerService';
|
||||
import { PackageVersionFile } from '../../core/entity/PackageVersionFile';
|
||||
import { PackageVersion } from '../../core/entity/PackageVersion';
|
||||
import { DistRepository } from '../../repository/DistRepository';
|
||||
|
||||
type FileItem = {
|
||||
path: string,
|
||||
type: 'file',
|
||||
contentType: string,
|
||||
integrity: string;
|
||||
lastModified: Date,
|
||||
size: number,
|
||||
};
|
||||
|
||||
type DirectoryItem = {
|
||||
path: string,
|
||||
type: 'directory',
|
||||
files: (DirectoryItem | FileItem)[],
|
||||
};
|
||||
|
||||
function formatFileItem(file: PackageVersionFile): FileItem {
|
||||
return {
|
||||
path: file.path,
|
||||
type: 'file',
|
||||
contentType: file.contentType,
|
||||
integrity: file.dist.integrity,
|
||||
lastModified: file.mtime,
|
||||
size: file.dist.size,
|
||||
};
|
||||
}
|
||||
|
||||
const META_CACHE_CONTROL = 'public, s-maxage=600, max-age=60';
|
||||
const FILE_CACHE_CONTROL = 'public, max-age=31536000';
|
||||
|
||||
@HTTPController()
|
||||
export class PackageVersionFileController extends AbstractController {
|
||||
@Inject()
|
||||
private packageManagerService: PackageManagerService;
|
||||
@Inject()
|
||||
private packageVersionFileService: PackageVersionFileService;
|
||||
@Inject()
|
||||
private distRepository: DistRepository;
|
||||
|
||||
#requireUnpkgEnable() {
|
||||
if (!this.config.cnpmcore.enableUnpkg) {
|
||||
throw new NotFoundError();
|
||||
}
|
||||
}
|
||||
|
||||
@HTTPMethod({
|
||||
// PUT /:fullname/:versionOrTag/files
|
||||
path: `/:fullname(${FULLNAME_REG_STRING})/:versionOrTag/files`,
|
||||
method: HTTPMethodEnum.PUT,
|
||||
})
|
||||
@Middleware(AdminAccess)
|
||||
async sync(@HTTPParam() fullname: string, @HTTPParam() versionOrTag: string) {
|
||||
this.#requireUnpkgEnable();
|
||||
const [ scope, name ] = getScopeAndName(fullname);
|
||||
const { packageVersion } = await this.packageManagerService.showPackageVersionByVersionOrTag(
|
||||
scope, name, versionOrTag);
|
||||
if (!packageVersion) {
|
||||
throw new NotFoundError(`${fullname}@${versionOrTag} not found`);
|
||||
}
|
||||
const files = await this.packageVersionFileService.syncPackageVersionFiles(packageVersion);
|
||||
return files.map(file => formatFileItem(file));
|
||||
}
|
||||
|
||||
@HTTPMethod({
|
||||
// GET /:fullname/:versionOrTag/files => /:fullname/:versionOrTag/files/${pkg.main}
|
||||
// GET /:fullname/:versionOrTag/files?meta
|
||||
// GET /:fullname/:versionOrTag/files/
|
||||
path: `/:fullname(${FULLNAME_REG_STRING})/:versionOrTag/files`,
|
||||
method: HTTPMethodEnum.GET,
|
||||
})
|
||||
async listFiles(@Context() ctx: EggContext,
|
||||
@HTTPParam() fullname: string,
|
||||
@HTTPParam() versionOrTag: string,
|
||||
@HTTPQuery() meta: string) {
|
||||
this.#requireUnpkgEnable();
|
||||
ctx.vary(this.config.cnpmcore.cdnVaryHeader);
|
||||
const [ scope, name ] = getScopeAndName(fullname);
|
||||
const packageVersion = await this.#getPackageVersion(ctx, fullname, scope, name, versionOrTag);
|
||||
ctx.set('cache-control', META_CACHE_CONTROL);
|
||||
const hasMeta = typeof meta === 'string' || ctx.path.endsWith('/files/');
|
||||
// meta request
|
||||
if (hasMeta) {
|
||||
const files = await this.#listFilesByDirectory(packageVersion, '/');
|
||||
if (!files) {
|
||||
throw new NotFoundError(`${fullname}@${versionOrTag}/files not found`);
|
||||
}
|
||||
return files;
|
||||
}
|
||||
const { manifest } = await this.packageManagerService.showPackageVersionManifest(scope, name, versionOrTag);
|
||||
// GET /foo/1.0.0/files => /foo/1.0.0/files/{main}
|
||||
// ignore empty entry exp: @types/node@20.2.5/
|
||||
const indexFile = manifest?.main || 'index.js';
|
||||
ctx.redirect(join(ctx.path, indexFile));
|
||||
}
|
||||
|
||||
@HTTPMethod({
|
||||
// GET /:fullname/:versionOrTag/files/:path
|
||||
// GET /:fullname/:versionOrTag/files/:path?meta
|
||||
path: `/:fullname(${FULLNAME_REG_STRING})/:versionOrTag/files/:path(.+)`,
|
||||
method: HTTPMethodEnum.GET,
|
||||
})
|
||||
async raw(@Context() ctx: EggContext,
|
||||
@HTTPParam() fullname: string,
|
||||
@HTTPParam() versionOrTag: string,
|
||||
@HTTPParam() path: string,
|
||||
@HTTPQuery() meta: string) {
|
||||
this.#requireUnpkgEnable();
|
||||
ctx.vary(this.config.cnpmcore.cdnVaryHeader);
|
||||
const [ scope, name ] = getScopeAndName(fullname);
|
||||
path = `/${path}`;
|
||||
const packageVersion = await this.#getPackageVersion(ctx, fullname, scope, name, versionOrTag);
|
||||
if (path.endsWith('/')) {
|
||||
const directory = path.substring(0, path.length - 1);
|
||||
const files = await this.#listFilesByDirectory(packageVersion, directory);
|
||||
if (!files) {
|
||||
throw new NotFoundError(`${fullname}@${versionOrTag}/files${directory} not found`);
|
||||
}
|
||||
ctx.set('cache-control', META_CACHE_CONTROL);
|
||||
return files;
|
||||
}
|
||||
|
||||
const file = await this.packageVersionFileService.showPackageVersionFile(packageVersion, path);
|
||||
if (!file) {
|
||||
throw new NotFoundError(`File ${fullname}@${versionOrTag}${path} not found`);
|
||||
}
|
||||
const hasMeta = typeof meta === 'string';
|
||||
if (hasMeta) {
|
||||
ctx.set('cache-control', META_CACHE_CONTROL);
|
||||
return formatFileItem(file);
|
||||
}
|
||||
ctx.set('cache-control', FILE_CACHE_CONTROL);
|
||||
ctx.type = file.contentType;
|
||||
if (file.contentType === 'text/html' || file.contentType === 'text/xml') {
|
||||
ctx.attachment(file.path);
|
||||
}
|
||||
return await this.distRepository.getDistStream(file.dist);
|
||||
}
|
||||
|
||||
async #getPackageVersion(ctx: EggContext, fullname: string, scope: string, name: string, versionOrTag: string) {
|
||||
const { blockReason, packageVersion } = await this.packageManagerService.showPackageVersionByVersionOrTag(
|
||||
scope, name, versionOrTag);
|
||||
if (blockReason) {
|
||||
this.setCDNHeaders(ctx);
|
||||
throw this.createPackageBlockError(blockReason, fullname, versionOrTag);
|
||||
}
|
||||
if (!packageVersion) {
|
||||
throw new NotFoundError(`${fullname}@${versionOrTag} not found`);
|
||||
}
|
||||
if (packageVersion.version !== versionOrTag) {
|
||||
ctx.set('cache-control', META_CACHE_CONTROL);
|
||||
const location = ctx.url.replace(`/${fullname}/${versionOrTag}/files`, `/${fullname}/${packageVersion.version}/files`);
|
||||
throw this.createControllerRedirectError(location);
|
||||
}
|
||||
return packageVersion;
|
||||
}
|
||||
|
||||
async #listFilesByDirectory(packageVersion: PackageVersion, directory: string) {
|
||||
const files = await this.packageVersionFileService.listPackageVersionFiles(packageVersion, directory);
|
||||
if (!files || files.length === 0) return null;
|
||||
// convert files to directory and file
|
||||
const directories = new Map<string, DirectoryItem>();
|
||||
for (const file of files) {
|
||||
// make sure parent directories exists
|
||||
const splits = file.directory.split('/');
|
||||
for (const [ index, name ] of splits.entries()) {
|
||||
const parentPath = index === 0 ? '' : `/${splits.slice(1, index).join('/')}`;
|
||||
const directoryPath = parentPath !== '/' ? `${parentPath}/${name}` : `/${name}`;
|
||||
let directoryItem = directories.get(directoryPath);
|
||||
if (!directoryItem) {
|
||||
directoryItem = {
|
||||
path: directoryPath,
|
||||
type: 'directory',
|
||||
files: [],
|
||||
};
|
||||
directories.set(directoryPath, directoryItem);
|
||||
if (parentPath) {
|
||||
// only set the first time
|
||||
directories.get(parentPath!)!.files.push(directoryItem);
|
||||
}
|
||||
}
|
||||
}
|
||||
directories.get(file.directory)!.files.push(formatFileItem(file));
|
||||
}
|
||||
return directories.get(directory);
|
||||
}
|
||||
}
|
||||
109
app/port/controller/RegistryController.ts
Normal file
109
app/port/controller/RegistryController.ts
Normal file
@@ -0,0 +1,109 @@
|
||||
import {
|
||||
Context,
|
||||
EggContext,
|
||||
HTTPBody,
|
||||
HTTPController,
|
||||
HTTPMethod,
|
||||
HTTPMethodEnum,
|
||||
HTTPParam,
|
||||
HTTPQuery,
|
||||
Inject,
|
||||
Middleware,
|
||||
} from '@eggjs/tegg';
|
||||
import { NotFoundError } from 'egg-errors';
|
||||
import { AbstractController } from './AbstractController';
|
||||
import { Static } from 'egg-typebox-validate/typebox';
|
||||
import { RegistryManagerService } from '../../core/service/RegistryManagerService';
|
||||
import { AdminAccess } from '../middleware/AdminAccess';
|
||||
import { ScopeManagerService } from '../../core/service/ScopeManagerService';
|
||||
import { RegistryCreateOptions, QueryPageOptions, RegistryCreateSyncOptions } from '../typebox';
|
||||
|
||||
@HTTPController()
|
||||
export class RegistryController extends AbstractController {
|
||||
@Inject()
|
||||
private readonly registryManagerService: RegistryManagerService;
|
||||
@Inject()
|
||||
private readonly scopeManagerService: ScopeManagerService;
|
||||
|
||||
@HTTPMethod({
|
||||
path: '/-/registry',
|
||||
method: HTTPMethodEnum.GET,
|
||||
})
|
||||
async listRegistries(@HTTPQuery() pageSize: Static<typeof QueryPageOptions>['pageSize'], @HTTPQuery() pageIndex: Static<typeof QueryPageOptions>['pageIndex']) {
|
||||
const registries = await this.registryManagerService.listRegistries({ pageSize, pageIndex });
|
||||
return registries;
|
||||
}
|
||||
|
||||
@HTTPMethod({
|
||||
path: '/-/registry/:id',
|
||||
method: HTTPMethodEnum.GET,
|
||||
})
|
||||
async showRegistry(@HTTPParam() id: string) {
|
||||
const registry = await this.registryManagerService.findByRegistryId(id);
|
||||
if (!registry) {
|
||||
throw new NotFoundError('registry not found');
|
||||
}
|
||||
return registry;
|
||||
}
|
||||
|
||||
@HTTPMethod({
|
||||
path: '/-/registry/:id/scopes',
|
||||
method: HTTPMethodEnum.GET,
|
||||
})
|
||||
async showRegistryScopes(@HTTPParam() id: string, @HTTPQuery() pageSize: Static<typeof QueryPageOptions>['pageSize'], @HTTPQuery() pageIndex: Static<typeof QueryPageOptions>['pageIndex']) {
|
||||
const registry = await this.registryManagerService.findByRegistryId(id);
|
||||
if (!registry) {
|
||||
throw new NotFoundError('registry not found');
|
||||
}
|
||||
const scopes = await this.scopeManagerService.listScopesByRegistryId(id, { pageIndex, pageSize });
|
||||
return scopes;
|
||||
}
|
||||
|
||||
@HTTPMethod({
|
||||
path: '/-/registry',
|
||||
method: HTTPMethodEnum.POST,
|
||||
})
|
||||
@Middleware(AdminAccess)
|
||||
async createRegistry(@Context() ctx: EggContext, @HTTPBody() registryOptions: Static<typeof RegistryCreateOptions>) {
|
||||
ctx.tValidate(RegistryCreateOptions, registryOptions);
|
||||
const authorizedUser = await this.userRoleManager.requiredAuthorizedUser(ctx, 'setting');
|
||||
const { name, changeStream, host, userPrefix = '', type } = registryOptions;
|
||||
await this.registryManagerService.createRegistry({
|
||||
name,
|
||||
changeStream,
|
||||
host,
|
||||
userPrefix,
|
||||
operatorId: authorizedUser.userId,
|
||||
type,
|
||||
});
|
||||
return { ok: true };
|
||||
}
|
||||
|
||||
@HTTPMethod({
|
||||
path: '/-/registry/:id/sync',
|
||||
method: HTTPMethodEnum.POST,
|
||||
})
|
||||
@Middleware(AdminAccess)
|
||||
async createRegistrySyncTask(@Context() ctx: EggContext, @HTTPParam() id: string, @HTTPBody() registryOptions: Static<typeof RegistryCreateSyncOptions>) {
|
||||
ctx.tValidate(RegistryCreateSyncOptions, registryOptions);
|
||||
const { since } = registryOptions;
|
||||
const registry = await this.registryManagerService.findByRegistryId(id);
|
||||
if (!registry) {
|
||||
throw new NotFoundError('registry not found');
|
||||
}
|
||||
const authorizedUser = await this.userRoleManager.requiredAuthorizedUser(ctx, 'setting');
|
||||
await this.registryManagerService.createSyncChangesStream({ registryId: registry.registryId, since, operatorId: authorizedUser.userId });
|
||||
return { ok: true };
|
||||
}
|
||||
|
||||
@HTTPMethod({
|
||||
path: '/-/registry/:id',
|
||||
method: HTTPMethodEnum.DELETE,
|
||||
})
|
||||
@Middleware(AdminAccess)
|
||||
async removeRegistry(@Context() ctx: EggContext, @HTTPParam() id: string) {
|
||||
const authorizedUser = await this.userRoleManager.requiredAuthorizedUser(ctx, 'setting');
|
||||
await this.registryManagerService.remove({ registryId: id, operatorId: authorizedUser.userId });
|
||||
return { ok: true };
|
||||
}
|
||||
}
|
||||
63
app/port/controller/ScopeController.ts
Normal file
63
app/port/controller/ScopeController.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import {
|
||||
Context,
|
||||
EggContext,
|
||||
HTTPBody,
|
||||
HTTPController,
|
||||
HTTPMethod,
|
||||
HTTPMethodEnum,
|
||||
HTTPParam,
|
||||
Inject,
|
||||
Middleware,
|
||||
} from '@eggjs/tegg';
|
||||
import { E400 } from 'egg-errors';
|
||||
import { AbstractController } from './AbstractController';
|
||||
import { Static } from 'egg-typebox-validate/typebox';
|
||||
import { AdminAccess } from '../middleware/AdminAccess';
|
||||
import { ScopeManagerService } from '../../core/service/ScopeManagerService';
|
||||
import { RegistryManagerService } from '../../core/service/RegistryManagerService';
|
||||
import { ScopeCreateOptions } from '../typebox';
|
||||
|
||||
|
||||
@HTTPController()
|
||||
export class ScopeController extends AbstractController {
|
||||
@Inject()
|
||||
private readonly scopeManagerService: ScopeManagerService;
|
||||
|
||||
@Inject()
|
||||
private readonly registryManagerService: RegistryManagerService;
|
||||
|
||||
@HTTPMethod({
|
||||
path: '/-/scope',
|
||||
method: HTTPMethodEnum.POST,
|
||||
})
|
||||
@Middleware(AdminAccess)
|
||||
async createScope(@Context() ctx: EggContext, @HTTPBody() scopeOptions: Static<typeof ScopeCreateOptions>) {
|
||||
const authorizedUser = await this.userRoleManager.requiredAuthorizedUser(ctx, 'setting');
|
||||
ctx.tValidate(ScopeCreateOptions, scopeOptions);
|
||||
const { name, registryId } = scopeOptions;
|
||||
|
||||
const registry = await this.registryManagerService.findByRegistryId(registryId);
|
||||
if (!registry) {
|
||||
throw new E400(`registry ${registryId} not found`);
|
||||
}
|
||||
|
||||
await this.scopeManagerService.createScope({
|
||||
name,
|
||||
registryId,
|
||||
operatorId: authorizedUser.userId,
|
||||
});
|
||||
return { ok: true };
|
||||
}
|
||||
|
||||
@HTTPMethod({
|
||||
path: '/-/scope/:id',
|
||||
method: HTTPMethodEnum.DELETE,
|
||||
})
|
||||
@Middleware(AdminAccess)
|
||||
async removeScope(@Context() ctx: EggContext, @HTTPParam() id: string) {
|
||||
const authorizedUser = await this.userRoleManager.requiredAuthorizedUser(ctx, 'setting');
|
||||
await this.scopeManagerService.remove({ scopeId: id, operatorId: authorizedUser.userId });
|
||||
return { ok: true };
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
import { UnauthorizedError } from 'egg-errors';
|
||||
import { ForbiddenError, UnauthorizedError } from 'egg-errors';
|
||||
import { AuthAdapter } from '../../infra/AuthAdapter';
|
||||
import {
|
||||
HTTPController,
|
||||
HTTPMethod,
|
||||
@@ -7,9 +8,13 @@ import {
|
||||
HTTPParam,
|
||||
Context,
|
||||
EggContext,
|
||||
Inject,
|
||||
} from '@eggjs/tegg';
|
||||
import { Static, Type } from '@sinclair/typebox';
|
||||
import { AbstractController } from './AbstractController';
|
||||
import { TokenType, isGranularToken } from '../../core/entity/Token';
|
||||
import { TokenService } from '../../../app/core/service/TokenService';
|
||||
import { getFullname } from '../../../app/common/PackageUtil';
|
||||
|
||||
// Creating and viewing access tokens
|
||||
// https://docs.npmjs.com/creating-and-viewing-access-tokens#viewing-access-tokens
|
||||
@@ -23,8 +28,24 @@ const TokenOptionsRule = Type.Object({
|
||||
});
|
||||
type TokenOptions = Static<typeof TokenOptionsRule>;
|
||||
|
||||
const GranularTokenOptionsRule = Type.Object({
|
||||
automation: Type.Optional(Type.Boolean()),
|
||||
readonly: Type.Optional(Type.Boolean()),
|
||||
cidr_whitelist: Type.Optional(Type.Array(Type.String({ maxLength: 100 }), { maxItems: 10 })),
|
||||
name: Type.String({ maxLength: 255 }),
|
||||
description: Type.Optional(Type.String({ maxLength: 255 })),
|
||||
allowedScopes: Type.Optional(Type.Array(Type.String({ maxLength: 100 }), { maxItems: 50 })),
|
||||
allowedPackages: Type.Optional(Type.Array(Type.String({ maxLength: 100 }), { maxItems: 50 })),
|
||||
expires: Type.Number({ minimum: 1, maximum: 365 }),
|
||||
});
|
||||
type GranularTokenOptions = Static<typeof GranularTokenOptionsRule>;
|
||||
|
||||
@HTTPController()
|
||||
export class TokenController extends AbstractController {
|
||||
@Inject()
|
||||
private readonly authAdapter: AuthAdapter;
|
||||
@Inject()
|
||||
private readonly tokenService: TokenService;
|
||||
// https://github.com/npm/npm-profile/blob/main/lib/index.js#L233
|
||||
@HTTPMethod({
|
||||
path: '/-/npm/v1/tokens',
|
||||
@@ -97,18 +118,116 @@ export class TokenController extends AbstractController {
|
||||
// "total": 2,
|
||||
// "urls": {}
|
||||
// }
|
||||
const objects = tokens.map(token => {
|
||||
const objects = tokens.filter(token => !isGranularToken(token))
|
||||
.map(token => {
|
||||
return {
|
||||
token: token.tokenMark,
|
||||
key: token.tokenKey,
|
||||
cidr_whitelist: token.cidrWhitelist,
|
||||
readonly: token.isReadonly,
|
||||
automation: token.isAutomation,
|
||||
created: token.createdAt,
|
||||
updated: token.updatedAt,
|
||||
};
|
||||
});
|
||||
// TODO: paging, urls: { next: string }
|
||||
return { objects, total: objects.length, urls: {} };
|
||||
}
|
||||
|
||||
private async ensureWebUser() {
|
||||
const userRes = await this.authAdapter.ensureCurrentUser();
|
||||
if (!userRes?.name || !userRes?.email) {
|
||||
throw new ForbiddenError('need login first');
|
||||
}
|
||||
const user = await this.userService.findUserByName(userRes.name);
|
||||
if (!user?.userId) {
|
||||
throw new ForbiddenError('invalid user info');
|
||||
}
|
||||
return user;
|
||||
}
|
||||
|
||||
@HTTPMethod({
|
||||
path: '/-/npm/v1/tokens/gat',
|
||||
method: HTTPMethodEnum.POST,
|
||||
})
|
||||
// Create granular access token through HTTP interface
|
||||
// https://docs.npmjs.com/about-access-tokens#about-granular-access-tokens
|
||||
// Mainly has the following limitations:
|
||||
// 1. Need to submit token name and expires
|
||||
// 2. Optional to submit description, allowScopes, allowPackages information
|
||||
// 3. Need to implement ensureCurrentUser method in AuthAdapter, or pass in this.user
|
||||
async createGranularToken(@Context() ctx: EggContext, @HTTPBody() tokenOptions: GranularTokenOptions) {
|
||||
ctx.tValidate(GranularTokenOptionsRule, tokenOptions);
|
||||
const user = await this.ensureWebUser();
|
||||
|
||||
// 生成 Token
|
||||
const { name, description, allowedPackages, allowedScopes, cidr_whitelist, automation, readonly, expires } = tokenOptions;
|
||||
const token = await this.userService.createToken(user.userId, {
|
||||
name,
|
||||
type: TokenType.granular,
|
||||
description,
|
||||
allowedPackages,
|
||||
allowedScopes,
|
||||
isAutomation: automation,
|
||||
isReadonly: readonly,
|
||||
cidrWhitelist: cidr_whitelist,
|
||||
expires,
|
||||
});
|
||||
|
||||
return {
|
||||
name: token.name,
|
||||
token: token.token,
|
||||
key: token.tokenKey,
|
||||
cidr_whitelist: token.cidrWhitelist,
|
||||
readonly: token.isReadonly,
|
||||
automation: token.isAutomation,
|
||||
allowedPackages: token.allowedPackages,
|
||||
allowedScopes: token.allowedScopes,
|
||||
created: token.createdAt,
|
||||
updated: token.updatedAt,
|
||||
};
|
||||
}
|
||||
|
||||
@HTTPMethod({
|
||||
path: '/-/npm/v1/tokens/gat',
|
||||
method: HTTPMethodEnum.GET,
|
||||
})
|
||||
async listGranularTokens() {
|
||||
const user = await this.ensureWebUser();
|
||||
const tokens = await this.userRepository.listTokens(user.userId);
|
||||
const granularTokens = tokens.filter(token => isGranularToken(token));
|
||||
|
||||
for (const token of granularTokens) {
|
||||
const packages = await this.tokenService.listTokenPackages(token);
|
||||
if (Array.isArray(packages)) {
|
||||
token.allowedPackages = packages.map(p => getFullname(p.scope, p.name));
|
||||
}
|
||||
}
|
||||
const objects = granularTokens.map(token => {
|
||||
const { name, description, expiredAt, allowedPackages, allowedScopes } = token;
|
||||
return {
|
||||
name,
|
||||
description,
|
||||
allowedPackages,
|
||||
allowedScopes,
|
||||
expiredAt,
|
||||
token: token.tokenMark,
|
||||
key: token.tokenKey,
|
||||
cidr_whitelist: token.cidrWhitelist,
|
||||
readonly: token.isReadonly,
|
||||
automation: token.isAutomation,
|
||||
created: token.createdAt,
|
||||
updated: token.updatedAt,
|
||||
};
|
||||
});
|
||||
// TODO: paging, urls: { next: string }
|
||||
return { objects, total: objects.length, urls: {} };
|
||||
return { objects, total: granularTokens.length, urls: {} };
|
||||
}
|
||||
|
||||
@HTTPMethod({
|
||||
path: '/-/npm/v1/tokens/gat/:tokenKey',
|
||||
method: HTTPMethodEnum.DELETE,
|
||||
})
|
||||
async removeGranularToken(@HTTPParam() tokenKey: string) {
|
||||
const user = await this.ensureWebUser();
|
||||
await this.userService.removeToken(user.userId, tokenKey);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -159,7 +159,7 @@ export class UserController extends AbstractController {
|
||||
async whoami(@Context() ctx: EggContext) {
|
||||
const authorizedUser = await this.userRoleManager.requiredAuthorizedUser(ctx, 'read');
|
||||
return {
|
||||
username: authorizedUser.name,
|
||||
username: authorizedUser.displayName,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
61
app/port/controller/convertor/HookConvertor.ts
Normal file
61
app/port/controller/convertor/HookConvertor.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import { Hook } from '../../../core/entity/Hook';
|
||||
import { TriggerHookTask } from '../../../core/entity/Task';
|
||||
import { User } from '../../../core/entity/User';
|
||||
import { HookType } from '../../../common/enum/Hook';
|
||||
|
||||
export interface HookVo {
|
||||
id: string;
|
||||
username: string;
|
||||
name: string;
|
||||
endpoint: string;
|
||||
secret: string;
|
||||
type: HookType;
|
||||
created: Date;
|
||||
updated: Date;
|
||||
delivered: boolean,
|
||||
last_delivery: Date | null,
|
||||
response_code: number,
|
||||
status: 'active',
|
||||
}
|
||||
|
||||
export interface DeleteHookVo {
|
||||
id: string;
|
||||
username: string;
|
||||
name: string;
|
||||
endpoint: string;
|
||||
secret: string;
|
||||
type: HookType;
|
||||
created: Date;
|
||||
updated: Date;
|
||||
delivered: boolean,
|
||||
last_delivery: Date | null,
|
||||
response_code: number,
|
||||
status: 'active',
|
||||
deleted: boolean,
|
||||
}
|
||||
|
||||
export class HookConvertor {
|
||||
static convertToHookVo(hook: Hook, user: User, task?: TriggerHookTask | null | undefined): HookVo {
|
||||
return {
|
||||
id: hook.hookId,
|
||||
username: user.name,
|
||||
name: hook.name,
|
||||
endpoint: hook.endpoint,
|
||||
secret: hook.secret,
|
||||
type: hook.type,
|
||||
created: hook.createdAt,
|
||||
updated: hook.updatedAt,
|
||||
delivered: !!task,
|
||||
last_delivery: task?.updatedAt || null,
|
||||
response_code: task?.data.responseStatus || 0,
|
||||
status: 'active',
|
||||
};
|
||||
}
|
||||
|
||||
static convertToDeleteHookVo(hook: Hook, user: User, task?: TriggerHookTask | null): DeleteHookVo {
|
||||
const vo = HookConvertor.convertToHookVo(hook, user, task);
|
||||
return Object.assign(vo, {
|
||||
deleted: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -14,6 +14,8 @@ import {
|
||||
import { AbstractController } from '../AbstractController';
|
||||
import { FULLNAME_REG_STRING } from '../../../common/PackageUtil';
|
||||
import { PackageManagerService } from '../../../core/service/PackageManagerService';
|
||||
import { Package } from '../../../core/entity/Package';
|
||||
import { PackageVersion } from '../../../core/entity/PackageVersion';
|
||||
|
||||
@HTTPController()
|
||||
export class RemovePackageVersionController extends AbstractController {
|
||||
@@ -21,29 +23,75 @@ export class RemovePackageVersionController extends AbstractController {
|
||||
private packageManagerService: PackageManagerService;
|
||||
|
||||
// https://github.com/npm/cli/blob/latest/lib/commands/unpublish.js#L101
|
||||
// https://github.com/npm/libnpmpublish/blob/main/unpublish.js#L43
|
||||
// https://github.com/npm/libnpmpublish/blob/main/unpublish.js#L84
|
||||
// await npmFetch(`${tarballUrl}/-rev/${_rev}`, {
|
||||
// ...opts,
|
||||
// method: 'DELETE',
|
||||
// ignoreBody: true,
|
||||
// })
|
||||
@HTTPMethod({
|
||||
// DELETE /@cnpm/foo/-/foo-4.0.0.tgz/-rev/61af62d6295fcbd9f8f1c08f
|
||||
// DELETE /:fullname/-/:filenameWithVersion.tgz/-rev/:rev
|
||||
path: `/:fullname(${FULLNAME_REG_STRING})/-/:filenameWithVersion.tgz/-rev/:rev`,
|
||||
method: HTTPMethodEnum.DELETE,
|
||||
})
|
||||
async remove(@Context() ctx: EggContext, @HTTPParam() fullname: string, @HTTPParam() filenameWithVersion: string) {
|
||||
async removeByTarballUrl(@Context() ctx: EggContext, @HTTPParam() fullname: string, @HTTPParam() filenameWithVersion: string) {
|
||||
const npmCommand = ctx.get('npm-command');
|
||||
if (npmCommand !== 'unpublish') {
|
||||
throw new BadRequestError('Only allow "unpublish" npm-command');
|
||||
}
|
||||
const pkg = await this.getPackageEntityAndRequiredMaintainer(ctx, fullname);
|
||||
const ensureRes = await this.ensurePublishAccess(ctx, fullname, true);
|
||||
const pkg = ensureRes.pkg!;
|
||||
const version = this.getAndCheckVersionFromFilename(ctx, fullname, filenameWithVersion);
|
||||
const packageVersion = await this.getPackageVersionEntity(pkg, version);
|
||||
await this.#removePackageVersion(pkg, packageVersion);
|
||||
return { ok: true };
|
||||
}
|
||||
|
||||
// https://github.com/npm/libnpmpublish/blob/main/unpublish.js#L43
|
||||
// npm http fetch DELETE 404 http://localhost:62649/@cnpm%2ffoo/-rev/1-642f6e8b52d7b8eb03aef23f
|
||||
// await npmFetch(`${pkgUri}/-rev/${pkg._rev}`, {
|
||||
// ...opts,
|
||||
// method: 'DELETE',
|
||||
// ignoreBody: true,
|
||||
// })
|
||||
@HTTPMethod({
|
||||
// DELETE /@cnpm/foo/-rev/61af62d6295fcbd9f8f1c08f
|
||||
// DELETE /:fullname/-rev/:rev
|
||||
path: `/:fullname(${FULLNAME_REG_STRING})/-rev/:rev`,
|
||||
method: HTTPMethodEnum.DELETE,
|
||||
})
|
||||
async removeByPkgUri(@Context() ctx: EggContext, @HTTPParam() fullname: string) {
|
||||
const npmCommand = ctx.get('npm-command');
|
||||
if (npmCommand !== 'unpublish') {
|
||||
throw new BadRequestError('Only allow "unpublish" npm-command');
|
||||
}
|
||||
const ensureRes = await this.ensurePublishAccess(ctx, fullname, true);
|
||||
const pkg = ensureRes.pkg!;
|
||||
// try to remove the latest version first
|
||||
const packageTag = await this.packageRepository.findPackageTag(pkg.packageId, 'latest');
|
||||
let packageVersion: PackageVersion | null = null;
|
||||
if (packageTag) {
|
||||
packageVersion = await this.packageRepository.findPackageVersion(pkg.packageId, packageTag.version);
|
||||
}
|
||||
if (packageVersion) {
|
||||
await this.#removePackageVersion(pkg, packageVersion);
|
||||
} else {
|
||||
this.logger.info('[PackageController:unpublishPackage] %s, packageId: %s',
|
||||
pkg.fullname, pkg.packageId);
|
||||
await this.packageManagerService.unpublishPackage(pkg);
|
||||
}
|
||||
return { ok: true };
|
||||
}
|
||||
|
||||
async #removePackageVersion(pkg: Package, packageVersion: PackageVersion) {
|
||||
// https://docs.npmjs.com/policies/unpublish
|
||||
// can unpublish anytime within the first 72 hours after publishing
|
||||
if (pkg.isPrivate && Date.now() - packageVersion.publishTime.getTime() >= 3600000 * 72) {
|
||||
throw new ForbiddenError(`${pkg.fullname}@${version} unpublish is not allowed after 72 hours of released`);
|
||||
throw new ForbiddenError(`${pkg.fullname}@${packageVersion.version} unpublish is not allowed after 72 hours of released`);
|
||||
}
|
||||
ctx.logger.info('[PackageController:removeVersion] %s@%s, packageVersionId: %s',
|
||||
pkg.fullname, version, packageVersion.packageVersionId);
|
||||
this.logger.info('[PackageController:removeVersion] %s@%s, packageVersionId: %s',
|
||||
pkg.fullname, packageVersion.version, packageVersion.packageVersionId);
|
||||
await this.packageManagerService.removePackageVersion(pkg, packageVersion);
|
||||
return { ok: true };
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user