Compare commits
308 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
95543b1f9d | ||
|
|
d7de1cded8 | ||
|
|
0f11e7730a | ||
|
|
0d32146562 | ||
|
|
6c29f084b2 | ||
|
|
01385f4954 | ||
|
|
098c75a4ca | ||
|
|
c3059c7138 | ||
|
|
0c987a7225 | ||
|
|
89f1250927 | ||
|
|
e1848c71ec | ||
|
|
e8a3ee5208 | ||
|
|
3671c52513 | ||
|
|
35a7be34e8 | ||
|
|
957f43dab1 | ||
|
|
4213bd8823 | ||
|
|
f3f3584d62 | ||
|
|
80663505cb | ||
|
|
e5162f20aa | ||
|
|
b6c96defa4 | ||
|
|
3e1dbd819c | ||
|
|
faee3b96f1 | ||
|
|
606c983363 | ||
|
|
cb931417c1 | ||
|
|
324511d159 | ||
|
|
2e51399db1 | ||
|
|
5b1da74746 | ||
|
|
c7df471c0e | ||
|
|
c16ec7bad0 | ||
|
|
17b1b81eea | ||
|
|
bb0bdef070 | ||
|
|
87da4c359c | ||
|
|
8587d27d85 | ||
|
|
13b2da0411 | ||
|
|
3aa4c688b5 | ||
|
|
0a5500bafd | ||
|
|
f94531cf35 | ||
|
|
c3a22fd319 | ||
|
|
b23f3fe22e | ||
|
|
3dbff542ba | ||
|
|
1a4a1c5cf5 | ||
|
|
2cc0f361fb | ||
|
|
722a5d70b9 | ||
|
|
64951041eb | ||
|
|
1001e1ffa2 | ||
|
|
b4cf36e289 | ||
|
|
bed4778cbc | ||
|
|
5b96443cf1 | ||
|
|
e4cd535486 | ||
|
|
5b98c9dd2b | ||
|
|
e932624c6f | ||
|
|
69ef574527 | ||
|
|
5a42764806 | ||
|
|
eaed6fe478 | ||
|
|
8b341d2299 | ||
|
|
87b6cbedef | ||
|
|
3e395405c1 | ||
|
|
b8c7c06536 | ||
|
|
76e3f267c1 | ||
|
|
12aa425c26 | ||
|
|
01f393eb94 | ||
|
|
490dce3ad2 | ||
|
|
cb3768c82f | ||
|
|
54e3335abd | ||
|
|
a1a3859d75 | ||
|
|
1239dc9f49 | ||
|
|
9efd2932e8 | ||
|
|
f6f53149a1 | ||
|
|
2e90486454 | ||
|
|
297bd7a745 | ||
|
|
b8f2ac5f85 | ||
|
|
c5f1642a1f | ||
|
|
2d8bad3ca2 | ||
|
|
81620e3ed5 | ||
|
|
de3a6153b0 | ||
|
|
3203c64c48 | ||
|
|
d6c243cf6b | ||
|
|
0a6eab325e | ||
|
|
a17aed8fb1 | ||
|
|
770fc7fdaf | ||
|
|
de97428ffd | ||
|
|
22da5c7f70 | ||
|
|
f4f09c2d66 | ||
|
|
3dce867d3a | ||
|
|
af3672ebdb | ||
|
|
e89d8528df | ||
|
|
58a1a9c2af | ||
|
|
ce0fd9d9c2 | ||
|
|
080035f7bc | ||
|
|
ffd90473f7 | ||
|
|
facb26d192 | ||
|
|
53060c817a | ||
|
|
9f4b8ebb00 | ||
|
|
5c132b882b | ||
|
|
a1e1072d1a | ||
|
|
5d5f40ba26 | ||
|
|
b322f2c9ec | ||
|
|
dd5ee4ff30 | ||
|
|
0c9a515874 | ||
|
|
884ff50a2f | ||
|
|
1517750513 | ||
|
|
64beee1ba1 | ||
|
|
acb988b553 | ||
|
|
ffe723e65f | ||
|
|
ed4d5d07ad | ||
|
|
b92354d280 | ||
|
|
f51f6028ac | ||
|
|
3f3751b2d5 | ||
|
|
7938919d81 | ||
|
|
52c2494355 | ||
|
|
0d4fec90dd | ||
|
|
b8d055a74e | ||
|
|
5c8b024a0a | ||
|
|
914aee2560 | ||
|
|
bc068d165c | ||
|
|
9ffb09eaa8 | ||
|
|
cbefb5c6d0 | ||
|
|
0a64698ec0 | ||
|
|
1922bf2f76 | ||
|
|
47da2f40cf | ||
|
|
305457777e | ||
|
|
ae88145317 | ||
|
|
069afb98cc | ||
|
|
b6c781ec25 | ||
|
|
05b3b798b6 | ||
|
|
e72e396e3c | ||
|
|
d095d3f48c | ||
|
|
b0f7bf0967 | ||
|
|
02a1ee35d7 | ||
|
|
1e9d710b0f | ||
|
|
d18981e658 | ||
|
|
5103409f40 | ||
|
|
ea4823c017 | ||
|
|
4427a4fca5 | ||
|
|
455fc3a444 | ||
|
|
386974272d | ||
|
|
9f4f1f1e28 | ||
|
|
aba2b36291 | ||
|
|
f1fc2492b6 | ||
|
|
c23a6699f3 | ||
|
|
1850c8b2d4 | ||
|
|
c70fdccc04 | ||
|
|
a580b05004 | ||
|
|
43636bd80b | ||
|
|
3a3aa818a3 | ||
|
|
4012f584ba | ||
|
|
2780c532e1 | ||
|
|
638a3da767 | ||
|
|
acffb14ea0 | ||
|
|
feba680795 | ||
|
|
57226c57a6 | ||
|
|
167e37c241 | ||
|
|
9bb12fde12 | ||
|
|
1d128e280c | ||
|
|
f240799fa2 | ||
|
|
dd15b08fa2 | ||
|
|
6de0876d35 | ||
|
|
63a8473af7 | ||
|
|
9b01383210 | ||
|
|
b808ebcd60 | ||
|
|
71cc3381d7 | ||
|
|
914b59c7ef | ||
|
|
ac4709a7be | ||
|
|
99a5ef1715 | ||
|
|
2edbec6008 | ||
|
|
7158e66c9f | ||
|
|
4facf90ae0 | ||
|
|
12650acf72 | ||
|
|
2b812a161e | ||
|
|
d6f0e1d866 | ||
|
|
efac8a97e2 | ||
|
|
99a86600db | ||
|
|
b0cd0ba387 | ||
|
|
d987bf4a55 | ||
|
|
91aea0f106 | ||
|
|
75d3a66b5c | ||
|
|
44ca113931 | ||
|
|
f7c49e55fa | ||
|
|
e6ed2215a4 | ||
|
|
526b66a93c | ||
|
|
0a0c4e72ae | ||
|
|
9a7994090b | ||
|
|
c1de249445 | ||
|
|
bd49917b86 | ||
|
|
381a10cd6e | ||
|
|
ca6ce4e860 | ||
|
|
571d265065 | ||
|
|
753e519f17 | ||
|
|
038736dd60 | ||
|
|
317e24da55 | ||
|
|
9664504151 | ||
|
|
dcc5509dac | ||
|
|
c8f5ee82f1 | ||
|
|
cacf5e9da3 | ||
|
|
0b6223882e | ||
|
|
9beaf4164c | ||
|
|
96648fddaf | ||
|
|
6f9f8abc16 | ||
|
|
a2470650d5 | ||
|
|
8b1f526966 | ||
|
|
e442580b81 | ||
|
|
8a927fcc2d | ||
|
|
05301166a2 | ||
|
|
c5c6145fda | ||
|
|
3383d7f403 | ||
|
|
ff00e42668 | ||
|
|
ade9305342 | ||
|
|
a51891d3b9 | ||
|
|
65d6f4489f | ||
|
|
8366ee70a4 | ||
|
|
6bfbe35c65 | ||
|
|
cedb959f65 | ||
|
|
101c9b30b5 | ||
|
|
cdca770a0b | ||
|
|
668eed2d50 | ||
|
|
dbf5b5248a | ||
|
|
21cbc1849f | ||
|
|
67f1a2476d | ||
|
|
c0f96d72e5 | ||
|
|
abad15b8e0 | ||
|
|
58d19b17f0 | ||
|
|
b94c8efd6c | ||
|
|
c71d185ee1 | ||
|
|
468f9e4e36 | ||
|
|
ebc212c1c4 | ||
|
|
049b186a0e | ||
|
|
6664189a91 | ||
|
|
64bb78cf8a | ||
|
|
f7d9d49b4c | ||
|
|
4bc0c9ca59 | ||
|
|
ae83136e62 | ||
|
|
6b4f9af947 | ||
|
|
a6737e6150 | ||
|
|
2ec6bd94b2 | ||
|
|
26d2ef2124 | ||
|
|
9004ce7a1c | ||
|
|
6e326790c4 | ||
|
|
4644c1e788 | ||
|
|
838eecff2d | ||
|
|
08678c70db | ||
|
|
0f7aa4a50f | ||
|
|
05445b49c3 | ||
|
|
ad86be312e | ||
|
|
039a56f471 | ||
|
|
3310b0e435 | ||
|
|
a0096685fc | ||
|
|
b0e0a2d464 | ||
|
|
d5bf9ceb1b | ||
|
|
10b97c8697 | ||
|
|
5a8a4eb10c | ||
|
|
7e176f2f42 | ||
|
|
6cc2f2d830 | ||
|
|
dddb10e510 | ||
|
|
0c4a52d220 | ||
|
|
c3e481c5c4 | ||
|
|
6c519f73ce | ||
|
|
87ca86f1db | ||
|
|
fcca3c30ce | ||
|
|
37b50842fd | ||
|
|
e62fa26788 | ||
|
|
64dfcb35a4 | ||
|
|
acfd66748f | ||
|
|
072e146e5b | ||
|
|
8e1f4ca880 | ||
|
|
603bb82b1f | ||
|
|
0179ef364a | ||
|
|
f03d48e511 | ||
|
|
18ef7f49af | ||
|
|
9ea70088fb | ||
|
|
5bedb25f9d | ||
|
|
31946ba10e | ||
|
|
cde4f03c30 | ||
|
|
c3c7b391c0 | ||
|
|
079176926d | ||
|
|
e01d39ef4e | ||
|
|
22d401ee1f | ||
|
|
3cdb7cc9df | ||
|
|
5ad775e411 | ||
|
|
707a1d3809 | ||
|
|
9fcbb00406 | ||
|
|
413ec5685e | ||
|
|
f66057794e | ||
|
|
9a5e8c387a | ||
|
|
d24e3bd235 | ||
|
|
d6d72650dd | ||
|
|
4596b21271 | ||
|
|
c33f10e0ab | ||
|
|
88b6afb66e | ||
|
|
6d156a5c96 | ||
|
|
89f6b989c1 | ||
|
|
5e4d988c2f | ||
|
|
9b2dc41134 | ||
|
|
3f95d0fadd | ||
|
|
6dd241d690 | ||
|
|
868c8d305e | ||
|
|
bcf67c4cea | ||
|
|
941b277244 | ||
|
|
7f858482f7 | ||
|
|
6e45ac5a63 | ||
|
|
10d7a8499e | ||
|
|
2b2e13c01d | ||
|
|
ffe8fa7d19 | ||
|
|
39de1c7df2 | ||
|
|
73b4383f5c | ||
|
|
9916bd9ecf | ||
|
|
0ac275a348 | ||
|
|
3f9c91c430 | ||
|
|
c7106008d9 |
36
.docker/alpine/Dockerfile
Normal file
36
.docker/alpine/Dockerfile
Normal file
@@ -0,0 +1,36 @@
|
||||
FROM node:22-alpine
|
||||
|
||||
# Create app directory
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# Install app dependencies
|
||||
COPY . .
|
||||
|
||||
RUN .docker/build.sh
|
||||
|
||||
ENV NODE_ENV=production \
|
||||
EGG_SERVER_ENV=prod \
|
||||
CNPMCORE_CONFIG_REGISTRY= \
|
||||
CNPMCORE_CONFIG_SOURCE_REGISTRY=https://registry.npmmirror.com \
|
||||
CNPMCORE_CONFIG_SOURCE_REGISTRY_IS_CNPM=true \
|
||||
CNPMCORE_DATABASE_TYPE= \
|
||||
CNPMCORE_DATABASE_NAME= \
|
||||
CNPMCORE_DATABASE_HOST= \
|
||||
CNPMCORE_DATABASE_PORT=3306 \
|
||||
CNPMCORE_DATABASE_USER= \
|
||||
CNPMCORE_DATABASE_PASSWORD= \
|
||||
CNPMCORE_REDIS_HOST= \
|
||||
CNPMCORE_REDIS_PORT=6379 \
|
||||
CNPMCORE_REDIS_PASSWORD= \
|
||||
CNPMCORE_REDIS_DB= \
|
||||
CNPMCORE_NFS_TYPE=s3 \
|
||||
CNPMCORE_NFS_S3_CLIENT_ENDPOINT= \
|
||||
CNPMCORE_NFS_S3_CLIENT_BUCKET= \
|
||||
CNPMCORE_NFS_S3_CLIENT_ID= \
|
||||
CNPMCORE_NFS_S3_CLIENT_SECRET= \
|
||||
CNPMCORE_NFS_S3_CLIENT_FORCE_PATH_STYLE=true \
|
||||
CNPMCORE_NFS_S3_CLIENT_DISABLE_URL=true \
|
||||
TZ=Asia/Shanghai
|
||||
|
||||
EXPOSE 7001
|
||||
CMD ["npm", "run", "start:foreground"]
|
||||
7
.docker/build.sh
Executable file
7
.docker/build.sh
Executable file
@@ -0,0 +1,7 @@
|
||||
#!/bin/sh
|
||||
|
||||
node -v && npm -v \
|
||||
&& npm install -g npminstall --registry=https://registry.npmmirror.com \
|
||||
&& npminstall -c \
|
||||
&& npm run tsc \
|
||||
&& npmupdate -c --production
|
||||
36
.docker/debian/Dockerfile
Normal file
36
.docker/debian/Dockerfile
Normal file
@@ -0,0 +1,36 @@
|
||||
FROM node:22-bookworm-slim
|
||||
|
||||
# Create app directory
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# Install app dependencies
|
||||
COPY . .
|
||||
|
||||
RUN .docker/build.sh
|
||||
|
||||
ENV NODE_ENV=production \
|
||||
EGG_SERVER_ENV=prod \
|
||||
CNPMCORE_CONFIG_REGISTRY= \
|
||||
CNPMCORE_CONFIG_SOURCE_REGISTRY=https://registry.npmmirror.com \
|
||||
CNPMCORE_CONFIG_SOURCE_REGISTRY_IS_CNPM=true \
|
||||
CNPMCORE_DATABASE_TYPE= \
|
||||
CNPMCORE_DATABASE_NAME= \
|
||||
CNPMCORE_DATABASE_HOST= \
|
||||
CNPMCORE_DATABASE_PORT=3306 \
|
||||
CNPMCORE_DATABASE_USER= \
|
||||
CNPMCORE_DATABASE_PASSWORD= \
|
||||
CNPMCORE_REDIS_HOST= \
|
||||
CNPMCORE_REDIS_PORT=6379 \
|
||||
CNPMCORE_REDIS_PASSWORD= \
|
||||
CNPMCORE_REDIS_DB= \
|
||||
CNPMCORE_NFS_TYPE=s3 \
|
||||
CNPMCORE_NFS_S3_CLIENT_ENDPOINT= \
|
||||
CNPMCORE_NFS_S3_CLIENT_BUCKET= \
|
||||
CNPMCORE_NFS_S3_CLIENT_ID= \
|
||||
CNPMCORE_NFS_S3_CLIENT_SECRET= \
|
||||
CNPMCORE_NFS_S3_CLIENT_FORCE_PATH_STYLE=true \
|
||||
CNPMCORE_NFS_S3_CLIENT_DISABLE_URL=true \
|
||||
TZ=Asia/Shanghai
|
||||
|
||||
EXPOSE 7001
|
||||
CMD ["npm", "run", "start:foreground"]
|
||||
50
.env.example
Normal file
50
.env.example
Normal file
@@ -0,0 +1,50 @@
|
||||
# CNPMCORE_DATABASE_TYPE=MySQL
|
||||
# CNPMCORE_DATABASE_USER=root
|
||||
# CNPMCORE_DATABASE_PASSWORD=
|
||||
# CNPMCORE_DATABASE_NAME=cnpmcore
|
||||
|
||||
# CNPMCORE_DATABASE_TYPE=PostgreSQL
|
||||
# CNPMCORE_DATABASE_USER=postgres
|
||||
# CNPMCORE_DATABASE_PASSWORD=postgres
|
||||
# CNPMCORE_DATABASE_NAME=cnpmcore
|
||||
|
||||
# CNPMCORE_CONFIG_ENABLE_ES=true
|
||||
# CNPMCORE_CONFIG_ES_CLIENT_NODE=http://localhost:9200
|
||||
# CNPMCORE_CONFIG_ES_CLIENT_AUTH_USERNAME=elastic
|
||||
# CNPMCORE_CONFIG_ES_CLIENT_AUTH_PASSWORD=abcdef
|
||||
|
||||
# https://github.com/cnpm/cnpmcore/blob/next/docs/elasticsearch-setup.md#%E6%96%B0%E5%BB%BA-env-%E6%96%87%E4%BB%B6
|
||||
# Password for the 'elastic' user (at least 6 characters)
|
||||
ELASTIC_PASSWORD="abcdef"
|
||||
|
||||
# Password for the 'kibana_system' user (at least 6 characters)
|
||||
KIBANA_PASSWORD="abcdef"
|
||||
|
||||
# Version of Elastic products
|
||||
STACK_VERSION=8.7.1
|
||||
# enable for arm64
|
||||
# STACK_VERSION_ARM64=-arm64
|
||||
# STACK_PLATFORM=linux/arm64
|
||||
|
||||
# Set the cluster name
|
||||
CLUSTER_NAME=docker-cluster
|
||||
|
||||
# Set to 'basic' or 'trial' to automatically start the 30-day trial
|
||||
LICENSE=basic
|
||||
#LICENSE=trial
|
||||
|
||||
# Port to expose Elasticsearch HTTP API to the host
|
||||
ES_PORT=9200
|
||||
#ES_PORT=127.0.0.1:9200
|
||||
|
||||
# Port to expose Kibana to the host
|
||||
KIBANA_PORT=5601
|
||||
#KIBANA_PORT=80
|
||||
|
||||
# Increase or decrease based on the available host memory (in bytes)
|
||||
ES_MEM_LIMIT=1073741824
|
||||
KB_MEM_LIMIT=1073741824
|
||||
LS_MEM_LIMIT=1073741824
|
||||
|
||||
# SAMPLE Predefined Key only to be used in POC environments
|
||||
ENCRYPTION_KEY=c34d38b3a14956121ff2170e5030b471551370178f43e5626eec58b04a30fae2
|
||||
@@ -1,7 +0,0 @@
|
||||
app/proxy*
|
||||
**/*.d.ts
|
||||
node_modules/
|
||||
dist/
|
||||
coverage/
|
||||
mocks/
|
||||
.react_entries/
|
||||
564
.github/copilot-instructions.md
vendored
Normal file
564
.github/copilot-instructions.md
vendored
Normal file
@@ -0,0 +1,564 @@
|
||||
# cnpmcore - Private NPM Registry for Enterprise
|
||||
|
||||
cnpmcore is a TypeScript-based private NPM registry implementation built with Egg.js framework. It provides enterprise-grade package management with support for MySQL/PostgreSQL databases, Redis caching, and optional Elasticsearch.
|
||||
|
||||
**ALWAYS reference these instructions first** and fallback to search or bash commands only when you encounter unexpected information that does not match the information here.
|
||||
|
||||
## Code Style and Conventions
|
||||
|
||||
### Linting and Formatting
|
||||
- **Linter**: Oxlint (fast Rust-based linter)
|
||||
- **Formatter**: Prettier with specific configuration
|
||||
- **Pre-commit hooks**: Husky + lint-staged automatically format and lint on commit
|
||||
|
||||
**Code Style Rules:**
|
||||
```javascript
|
||||
// From .prettierrc
|
||||
{
|
||||
"singleQuote": true, // Use single quotes
|
||||
"trailingComma": "es5", // ES5 trailing commas
|
||||
"tabWidth": 2, // 2-space indentation
|
||||
"printWidth": 120, // 120 character line width
|
||||
"arrowParens": "avoid" // Avoid parens when possible
|
||||
}
|
||||
|
||||
// From .oxlintrc.json
|
||||
{
|
||||
"max-params": 6, // Maximum 6 function parameters
|
||||
"no-console": "warn", // Warn on console usage
|
||||
"import/no-anonymous-default-export": "error"
|
||||
}
|
||||
```
|
||||
|
||||
**Linting Commands:**
|
||||
```bash
|
||||
npm run lint # Check for linting errors
|
||||
npm run lint:fix # Auto-fix linting issues
|
||||
npm run typecheck # TypeScript type checking without build
|
||||
```
|
||||
|
||||
### TypeScript Conventions
|
||||
- Use strict TypeScript with comprehensive type definitions
|
||||
- Avoid `any` types - use proper typing or `unknown`
|
||||
- Export types and interfaces for reusability
|
||||
- Use ES modules (`import/export`) syntax throughout
|
||||
|
||||
### Testing Conventions
|
||||
- Test files use `.test.ts` suffix
|
||||
- Use `@eggjs/mock` for mocking and testing
|
||||
- Tests organized to mirror source structure in `test/` directory
|
||||
- Use `assert` from `node:assert/strict` for assertions
|
||||
- Mock external dependencies using `mock()` from `@eggjs/mock`
|
||||
|
||||
**Test Naming Pattern:**
|
||||
```typescript
|
||||
describe('test/path/to/SourceFile.test.ts', () => {
|
||||
describe('[HTTP_METHOD /api/path] functionName()', () => {
|
||||
it('should handle expected behavior', async () => {
|
||||
// Test implementation
|
||||
});
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Domain-Driven Design (DDD) Architecture
|
||||
|
||||
cnpmcore follows **Domain-Driven Design** principles with clear separation of concerns:
|
||||
|
||||
### Layer Architecture (Dependency Flow)
|
||||
|
||||
```
|
||||
Controller (HTTP Interface Layer)
|
||||
↓ depends on
|
||||
Service (Business Logic Layer)
|
||||
↓ depends on
|
||||
Repository (Data Access Layer)
|
||||
↓ depends on
|
||||
Model (ORM/Database Layer)
|
||||
|
||||
Entity (Domain Models - no dependencies, pure business logic)
|
||||
Common (Utilities and Adapters - available to all layers)
|
||||
```
|
||||
|
||||
### Layer Responsibilities
|
||||
|
||||
**Controller Layer** (`app/port/controller/`):
|
||||
- HTTP request/response handling
|
||||
- Request validation using `@eggjs/typebox-validate`
|
||||
- User authentication and authorization
|
||||
- **NO business logic** - delegate to Services
|
||||
- Inheritance: `YourController extends AbstractController extends MiddlewareController`
|
||||
|
||||
**Service Layer** (`app/core/service/`):
|
||||
- Core business logic implementation
|
||||
- Orchestration of multiple repositories and entities
|
||||
- Transaction management
|
||||
- Event publishing
|
||||
- NO HTTP concerns, NO direct database access
|
||||
|
||||
**Repository Layer** (`app/repository/`):
|
||||
- Data access and persistence
|
||||
- CRUD operations on Models
|
||||
- Query building and optimization
|
||||
- NO business logic
|
||||
|
||||
**Entity Layer** (`app/core/entity/`):
|
||||
- Domain models with business behavior
|
||||
- Pure business logic (no infrastructure dependencies)
|
||||
- Immutable data structures where possible
|
||||
- Rich domain objects (not anemic models)
|
||||
|
||||
**Model Layer** (`app/repository/model/`):
|
||||
- ORM definitions using Leoric
|
||||
- Database schema mapping
|
||||
- Table and column definitions
|
||||
- NO business logic
|
||||
|
||||
### Repository Method Naming Convention
|
||||
|
||||
**ALWAYS follow these naming patterns:**
|
||||
- `findSomething` - Query a single model/entity
|
||||
- `saveSomething` - Save (create or update) a model
|
||||
- `removeSomething` - Delete a model
|
||||
- `listSomethings` - Query multiple models (use plural)
|
||||
|
||||
### Request Validation Trilogy
|
||||
|
||||
**ALWAYS validate requests in this exact order:**
|
||||
|
||||
1. **Request Parameter Validation** - First line of defense
|
||||
```typescript
|
||||
// Use @eggjs/typebox-validate for type-safe validation
|
||||
// See app/port/typebox.ts for examples
|
||||
```
|
||||
|
||||
2. **User Authentication & Token Permissions**
|
||||
```typescript
|
||||
// Token roles: 'read' | 'publish' | 'setting'
|
||||
const authorizedUser = await this.userRoleManager.requiredAuthorizedUser(ctx, 'publish');
|
||||
```
|
||||
|
||||
3. **Resource Authorization** - Prevent horizontal privilege escalation
|
||||
```typescript
|
||||
// Example: Ensure user is package maintainer
|
||||
await this.userRoleManager.requiredPackageMaintainer(pkg, authorizedUser);
|
||||
// Or use convenience method
|
||||
const { pkg } = await this.ensurePublishAccess(ctx, fullname);
|
||||
```
|
||||
|
||||
### Modifying Database Models
|
||||
|
||||
When changing a Model, update **all 3 locations**:
|
||||
1. SQL migration files: `sql/mysql/*.sql` AND `sql/postgresql/*.sql`
|
||||
2. ORM Model: `app/repository/model/*.ts`
|
||||
3. Domain Entity: `app/core/entity/*.ts`
|
||||
|
||||
**NEVER auto-generate SQL migrations** - manual review is required for safety.
|
||||
|
||||
## Prerequisites and Environment Setup
|
||||
|
||||
- **Node.js**: Version 20.18.0 or higher (required by engines field in package.json)
|
||||
- **Database**: MySQL 5.7+ or PostgreSQL 17+
|
||||
- **Cache**: Redis 6+
|
||||
- **Optional**: Elasticsearch 8.x for enhanced search capabilities
|
||||
|
||||
## Working Effectively
|
||||
|
||||
### Bootstrap and Build
|
||||
```bash
|
||||
# Install dependencies (takes ~2 minutes)
|
||||
npm install
|
||||
|
||||
# Copy environment configuration
|
||||
cp .env.example .env
|
||||
|
||||
# Lint code (very fast, <1 second)
|
||||
npm run lint
|
||||
|
||||
# Fix linting issues
|
||||
npm run lint:fix
|
||||
|
||||
# Build TypeScript (takes ~6 seconds)
|
||||
npm run tsc
|
||||
|
||||
# Production build (takes ~6 seconds)
|
||||
npm run tsc:prod
|
||||
```
|
||||
|
||||
### Database Setup - MySQL (Recommended for Development)
|
||||
```bash
|
||||
# Start MySQL + Redis services via Docker (takes ~1 minute to pull images initially)
|
||||
docker compose -f docker-compose.yml up -d
|
||||
|
||||
# Verify services are running
|
||||
docker compose ps
|
||||
|
||||
# Initialize database (takes <2 seconds)
|
||||
CNPMCORE_DATABASE_NAME=cnpmcore bash ./prepare-database-mysql.sh
|
||||
|
||||
# For tests, create test database
|
||||
mysql -h 127.0.0.1 -P 3306 -u root -e "CREATE DATABASE cnpmcore_unittest;"
|
||||
```
|
||||
|
||||
### Database Setup - PostgreSQL (Alternative)
|
||||
```bash
|
||||
# Start PostgreSQL + Redis services via Docker
|
||||
docker compose -f docker-compose-postgres.yml up -d
|
||||
|
||||
# Initialize database (takes <1 second)
|
||||
CNPMCORE_DATABASE_NAME=cnpmcore bash ./prepare-database-postgresql.sh
|
||||
```
|
||||
|
||||
### Development Server
|
||||
```bash
|
||||
# MySQL development server (starts in ~20 seconds)
|
||||
npm run dev
|
||||
# Server runs on http://127.0.0.1:7001
|
||||
|
||||
# PostgreSQL development server
|
||||
npm run dev:postgresql
|
||||
# Server runs on http://127.0.0.1:7001
|
||||
```
|
||||
|
||||
### Testing
|
||||
```bash
|
||||
# Run full test suite with MySQL - NEVER CANCEL: Takes 4+ minutes. Set timeout to 10+ minutes.
|
||||
npm run test
|
||||
|
||||
# Run full test suite with PostgreSQL - NEVER CANCEL: Takes 4+ minutes. Set timeout to 10+ minutes.
|
||||
npm run test:postgresql
|
||||
|
||||
# Run single test file (for faster iteration, takes ~12 seconds)
|
||||
npm run test:local test/common/CryptoUtil.test.ts
|
||||
|
||||
# Test coverage with MySQL - NEVER CANCEL: Takes 5+ minutes. Set timeout to 15+ minutes.
|
||||
npm run cov
|
||||
|
||||
# Test coverage with PostgreSQL - NEVER CANCEL: Takes 5+ minutes. Set timeout to 15+ minutes.
|
||||
npm run cov:postgresql
|
||||
```
|
||||
|
||||
**CRITICAL TESTING NOTES:**
|
||||
- **NEVER CANCEL** build or test commands - they may take 4-15 minutes to complete
|
||||
- Individual test files run much faster (~12 seconds) for development iteration
|
||||
- Full test suite processes 100+ test files and requires database initialization
|
||||
- Test failures may occur in CI environment; use individual test files for validation
|
||||
|
||||
**Testing Philosophy:**
|
||||
- **Write tests for all new features** - No feature is complete without tests
|
||||
- **Test at the right layer** - Controller tests for HTTP, Service tests for business logic
|
||||
- **Mock external dependencies** - Use `mock()` from `@eggjs/mock`
|
||||
- **Use realistic test data** - Create through `TestUtil` helper methods
|
||||
- **Clean up after tests** - Database is reset between test files
|
||||
- **Test both success and failure cases** - Error paths are equally important
|
||||
|
||||
**Common Test Patterns:**
|
||||
```typescript
|
||||
import { app, mock } from '@eggjs/mock/bootstrap';
|
||||
import { TestUtil } from '../../../test/TestUtil';
|
||||
|
||||
describe('test/path/to/YourController.test.ts', () => {
|
||||
describe('[GET /api/endpoint] methodName()', () => {
|
||||
it('should return expected result', async () => {
|
||||
// Setup
|
||||
const { authorization } = await TestUtil.createUser();
|
||||
|
||||
// Execute
|
||||
const res = await app
|
||||
.httpRequest()
|
||||
.get('/api/endpoint')
|
||||
.set('authorization', authorization)
|
||||
.expect(200);
|
||||
|
||||
// Assert
|
||||
assert.equal(res.body.someField, expectedValue);
|
||||
});
|
||||
|
||||
it('should handle unauthorized access', async () => {
|
||||
const res = await app
|
||||
.httpRequest()
|
||||
.get('/api/endpoint')
|
||||
.expect(401);
|
||||
|
||||
assert.equal(res.body.error, '[UNAUTHORIZED] Login first');
|
||||
});
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### Production Commands
|
||||
```bash
|
||||
# CI pipeline commands - NEVER CANCEL: Takes 5+ minutes. Set timeout to 15+ minutes.
|
||||
npm run ci # MySQL CI (includes lint, test, coverage, build)
|
||||
npm run ci:postgresql # PostgreSQL CI
|
||||
|
||||
# Production start/stop
|
||||
npm run start # Start as daemon
|
||||
npm run stop # Stop daemon
|
||||
npm run start:foreground # Start in foreground for debugging
|
||||
```
|
||||
|
||||
## Validation Scenarios
|
||||
|
||||
**ALWAYS manually validate changes** by running through these scenarios:
|
||||
|
||||
### Basic API Validation
|
||||
```bash
|
||||
# Start development server
|
||||
npm run dev
|
||||
|
||||
# Test registry root endpoint
|
||||
curl http://127.0.0.1:7001
|
||||
# Should return JSON with app metadata and stats
|
||||
|
||||
# Test authentication endpoint
|
||||
curl http://127.0.0.1:7001/-/whoami
|
||||
# Should return authentication error (expected when not logged in)
|
||||
|
||||
# Test package listing (initially empty)
|
||||
curl http://127.0.0.1:7001/-/all
|
||||
```
|
||||
|
||||
### Admin User Setup and Package Publishing
|
||||
```bash
|
||||
# Register admin user (cnpmcore_admin) - requires allowPublicRegistration=true in config
|
||||
npm login --registry=http://127.0.0.1:7001
|
||||
|
||||
# Verify login
|
||||
npm whoami --registry=http://127.0.0.1:7001
|
||||
|
||||
# Test package publishing
|
||||
npm publish --registry=http://127.0.0.1:7001
|
||||
```
|
||||
|
||||
## Architecture and Navigation
|
||||
|
||||
### Project Structure
|
||||
```
|
||||
app/
|
||||
├── common/ # Global utilities and adapters
|
||||
│ ├── adapter/ # External service adapters (NpmRegistry, Binary, etc.)
|
||||
│ └── enum/ # Shared enumerations
|
||||
├── core/ # Business logic layer
|
||||
│ ├── entity/ # Core domain models
|
||||
│ ├── event/ # Event handlers and async processing
|
||||
│ ├── service/ # Core business services
|
||||
│ └── util/ # Internal utilities
|
||||
├── port/ # Interface layer
|
||||
│ ├── controller/ # HTTP controllers
|
||||
│ ├── middleware/ # Express middleware
|
||||
│ ├── schedule/ # Background job schedulers
|
||||
│ └── webauth/ # WebAuth integration
|
||||
├── repository/ # Data access layer
|
||||
│ ├── model/ # ORM models
|
||||
│ └── util/ # Repository utilities
|
||||
└── infra/ # Infrastructure adapters
|
||||
```
|
||||
|
||||
### Key Services and Controllers
|
||||
- **PackageController**: Main package CRUD operations
|
||||
- **PackageManagerService**: Core package management business logic
|
||||
- **BinarySyncerService**: Binary package synchronization
|
||||
- **ChangesStreamService**: NPM registry change stream processing
|
||||
- **UserController**: User authentication and profile management
|
||||
|
||||
### Infrastructure Adapters (`app/infra/`)
|
||||
Enterprise customization layer for PaaS integration. cnpmcore provides default implementations, but enterprises should implement their own based on their infrastructure:
|
||||
|
||||
- **NFSClientAdapter**: File storage abstraction (local/S3/OSS)
|
||||
- **QueueAdapter**: Message queue integration
|
||||
- **AuthAdapter**: Authentication system integration
|
||||
- **BinaryAdapter**: Binary package storage adapter
|
||||
|
||||
These adapters allow cnpmcore to integrate with different cloud providers and enterprise systems without modifying core business logic.
|
||||
|
||||
### Configuration Files
|
||||
- `config/config.default.ts`: Main application configuration
|
||||
- `config/database.ts`: Database connection settings
|
||||
- `config/binaries.ts`: Binary package mirror configurations
|
||||
- `.env`: Environment-specific variables
|
||||
- `tsconfig.json`: TypeScript compilation settings
|
||||
- `tsconfig.prod.json`: Production build settings
|
||||
|
||||
## Common Development Tasks
|
||||
|
||||
### Adding New Features
|
||||
|
||||
**ALWAYS follow this workflow:**
|
||||
|
||||
1. **Plan the change** - Identify which layers need modification
|
||||
2. **Run linter** - `npm run lint:fix` to establish clean baseline
|
||||
3. **Bottom-up implementation** - Build from data layer up to controller:
|
||||
|
||||
a. **Model Layer** (if new data structure needed):
|
||||
- Add SQL migrations: `sql/mysql/*.sql` AND `sql/postgresql/*.sql`
|
||||
- Create Model: `app/repository/model/YourModel.ts`
|
||||
- Run database migration scripts
|
||||
|
||||
b. **Entity Layer** (domain models):
|
||||
- Create Entity: `app/core/entity/YourEntity.ts`
|
||||
- Implement business logic and behavior
|
||||
- Keep entities pure (no infrastructure dependencies)
|
||||
|
||||
c. **Repository Layer** (data access):
|
||||
- Create Repository: `app/repository/YourRepository.ts`
|
||||
- Follow naming: `findX`, `saveX`, `removeX`, `listXs`
|
||||
- Inject dependencies using `@Inject()`
|
||||
|
||||
d. **Service Layer** (business logic):
|
||||
- Create Service: `app/core/service/YourService.ts`
|
||||
- Orchestrate repositories and entities
|
||||
- Use `@SingletonProto()` for service lifecycle
|
||||
|
||||
e. **Controller Layer** (HTTP endpoints):
|
||||
- Create Controller: `app/port/controller/YourController.ts`
|
||||
- Extend `AbstractController`
|
||||
- Add HTTP method decorators: `@HTTPMethod()`, `@HTTPBody()`, etc.
|
||||
- Implement 3-step validation (params → auth → authorization)
|
||||
|
||||
4. **Add tests** - Create test file: `test/path/matching/source/YourFile.test.ts`
|
||||
5. **Lint and test** - `npm run lint:fix && npm run test:local test/your/test.test.ts`
|
||||
6. **Type check** - `npm run typecheck`
|
||||
7. **Commit** - Use semantic commit messages (feat/fix/chore/docs/test)
|
||||
|
||||
**Example Controller Implementation:**
|
||||
```typescript
|
||||
import { AbstractController } from './AbstractController';
|
||||
import { HTTPController, HTTPMethod, HTTPQuery, Inject } from 'egg';
|
||||
|
||||
@HTTPController()
|
||||
export class YourController extends AbstractController {
|
||||
@Inject()
|
||||
private readonly yourService: YourService;
|
||||
|
||||
@HTTPMethod({ path: '/api/path', method: 'GET' })
|
||||
async yourMethod(@HTTPQuery() params: YourQueryType) {
|
||||
// 1. Validate params (done by @HTTPQuery with typebox)
|
||||
// 2. Authenticate user
|
||||
const user = await this.userRoleManager.requiredAuthorizedUser(this.ctx, 'read');
|
||||
// 3. Authorize resource access (if needed)
|
||||
// 4. Delegate to service
|
||||
return await this.yourService.doSomething(params);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Database Migrations
|
||||
- SQL files are in `sql/mysql/` and `sql/postgresql/`
|
||||
- Migration scripts automatically run during database preparation
|
||||
- **NEVER** modify existing migration files - only add new ones
|
||||
|
||||
### Background Jobs
|
||||
- Schedulers are in `app/port/schedule/`
|
||||
- Include sync workers, cleanup tasks, and stream processors
|
||||
- Jobs run automatically when development server starts
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Database Connection Issues
|
||||
```bash
|
||||
# Check if services are running
|
||||
docker compose ps
|
||||
|
||||
# Reset MySQL environment
|
||||
docker compose -f docker-compose.yml down
|
||||
docker compose -f docker-compose.yml up -d
|
||||
CNPMCORE_DATABASE_NAME=cnpmcore bash ./prepare-database-mysql.sh
|
||||
|
||||
# Reset PostgreSQL environment
|
||||
docker compose -f docker-compose-postgres.yml down
|
||||
docker compose -f docker-compose-postgres.yml up -d
|
||||
CNPMCORE_DATABASE_NAME=cnpmcore bash ./prepare-database-postgresql.sh
|
||||
```
|
||||
|
||||
### Build Issues
|
||||
```bash
|
||||
# Clean and rebuild
|
||||
npm run clean
|
||||
npm run tsc
|
||||
|
||||
# Check TypeScript configuration
|
||||
npx tsc --noEmit
|
||||
```
|
||||
|
||||
### Test Issues
|
||||
```bash
|
||||
# Create missing test database
|
||||
mysql -h 127.0.0.1 -P 3306 -u root -e "CREATE DATABASE cnpmcore_unittest;"
|
||||
|
||||
# Run single test for debugging
|
||||
npm run test:local test/common/CryptoUtil.test.ts
|
||||
```
|
||||
|
||||
## CI/CD Integration
|
||||
|
||||
The project uses GitHub Actions with workflows in `.github/workflows/`:
|
||||
- `nodejs.yml`: Main CI pipeline with MySQL, PostgreSQL, and Elasticsearch testing
|
||||
- Multiple Node.js versions tested: 20, 22, 24
|
||||
- **CRITICAL**: CI jobs include long-running tests that can take 15+ minutes per database type
|
||||
|
||||
### Pre-commit Validation
|
||||
**ALWAYS run before committing:**
|
||||
```bash
|
||||
npm run lint:fix # Fix linting issues
|
||||
npm run tsc # Verify TypeScript compilation
|
||||
npm run test:local test/path/to/relevant.test.ts # Run relevant tests
|
||||
```
|
||||
|
||||
## Docker Support
|
||||
|
||||
### Development Environments
|
||||
- `docker-compose.yml`: MySQL + Redis + phpMyAdmin
|
||||
- `docker-compose-postgres.yml`: PostgreSQL + Redis + pgAdmin
|
||||
- `docker-compose-es.yml`: Elasticsearch integration
|
||||
|
||||
### Production Images
|
||||
```bash
|
||||
# Build Alpine image
|
||||
npm run images:alpine
|
||||
|
||||
# Build Debian image
|
||||
npm run images:debian
|
||||
```
|
||||
|
||||
## External Dependencies
|
||||
|
||||
- **Database**: MySQL 9.x or PostgreSQL 17+
|
||||
- **Cache**: Redis 6+
|
||||
- **Search**: Elasticsearch 8.x (optional)
|
||||
- **Storage**: Local filesystem or S3-compatible storage
|
||||
- **Framework**: Egg.js with extensive TypeScript integration
|
||||
|
||||
## Performance Notes
|
||||
|
||||
Command execution times (for timeout planning):
|
||||
|
||||
- **Startup Time**: ~20 seconds for development server
|
||||
- **Build Time**: ~6 seconds for TypeScript compilation
|
||||
- **Test Time**: 4-15 minutes for full suite (database dependent)
|
||||
- **Individual Test**: ~12 seconds for single test file
|
||||
- **Package Installation**: ~2 minutes for npm install
|
||||
- **Database Init**: <2 seconds for either MySQL or PostgreSQL
|
||||
- **Linting**: <1 second (oxlint is very fast)
|
||||
|
||||
Always account for these timings when setting timeouts for automated processes.
|
||||
|
||||
## Semantic Commit Messages
|
||||
|
||||
Use conventional commit format for all commits:
|
||||
|
||||
- `feat:` - New features
|
||||
- `fix:` - Bug fixes
|
||||
- `docs:` - Documentation changes
|
||||
- `chore:` - Maintenance tasks
|
||||
- `test:` - Test additions or modifications
|
||||
- `refactor:` - Code refactoring
|
||||
- `perf:` - Performance improvements
|
||||
|
||||
Examples:
|
||||
```bash
|
||||
feat: add support for GitHub binary mirroring
|
||||
fix: resolve authentication token expiration issue
|
||||
docs: update API documentation for sync endpoints
|
||||
test: add tests for package publication workflow
|
||||
```
|
||||
70
.github/workflows/codeql-analysis.yml
vendored
70
.github/workflows/codeql-analysis.yml
vendored
@@ -1,70 +0,0 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ main ]
|
||||
schedule:
|
||||
- cron: '41 13 * * 3'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'javascript', 'typescript' ]
|
||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||
# Learn more about CodeQL language support at https://git.io/codeql-language-support
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
2
.github/workflows/greetings.yml
vendored
2
.github/workflows/greetings.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/first-interaction@v1
|
||||
- uses: actions/first-interaction@v3
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-message: '我们已经看到你的反馈,如果是功能缺陷,可以提供一下重现该问题的方式;如果是新功能需求,我们会尽快加入讨论。同时我们非常期待你可以加入我们的贡献者行列,让项目可以长期可持续发展。'
|
||||
|
||||
335
.github/workflows/nodejs.yml
vendored
335
.github/workflows/nodejs.yml
vendored
@@ -3,10 +3,226 @@
|
||||
|
||||
name: Node.js CI
|
||||
|
||||
on: [push, pull_request]
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
pull_request:
|
||||
branches: [master]
|
||||
merge_group:
|
||||
|
||||
jobs:
|
||||
typecheck:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
concurrency:
|
||||
group: typecheck-${{ github.workflow }}-#${{ github.event.pull_request.number || github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
steps:
|
||||
- name: Checkout Git Source
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 22
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm i
|
||||
|
||||
- name: Lint
|
||||
run: npm run lint
|
||||
|
||||
- name: Typecheck
|
||||
run: npm run typecheck
|
||||
|
||||
- name: Build
|
||||
run: npm run tsc && npm run tsc:prod
|
||||
|
||||
test-deployment:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
concurrency:
|
||||
group: test-deployment-${{ github.workflow }}-#${{ github.event.pull_request.number || github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
services:
|
||||
mysql:
|
||||
image: mysql:5.7
|
||||
env:
|
||||
MYSQL_ALLOW_EMPTY_PASSWORD: true
|
||||
MYSQL_DATABASE: cnpmcore
|
||||
ports:
|
||||
- 3306:3306
|
||||
options: --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=5
|
||||
redis:
|
||||
# https://docs.github.com/en/actions/using-containerized-services/about-service-containers#example-mapping-redis-ports
|
||||
image: redis
|
||||
ports:
|
||||
# Opens tcp port 6379 on the host and service container
|
||||
- 6379:6379
|
||||
|
||||
steps:
|
||||
- name: Checkout Git Source
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 22
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm i
|
||||
|
||||
- name: Test Deployment
|
||||
run: |
|
||||
npm run build
|
||||
echo "Preparing database..."
|
||||
CNPMCORE_DATABASE_NAME=cnpmcore bash ./prepare-database-mysql.sh
|
||||
echo "Starting cnpmcore..."
|
||||
CNPMCORE_FORCE_LOCAL_FS=true npm run start:foreground &
|
||||
sleep 5
|
||||
echo "Checking cnpmcore is ready..."
|
||||
|
||||
set -Eeuo pipefail
|
||||
URL="http://127.0.0.1:7001"
|
||||
PATTERN="instance_start_time"
|
||||
TIMEOUT=60
|
||||
TMP="$(mktemp)"
|
||||
echo "🔎 Health check $URL, expect 200 & body contains: $PATTERN"
|
||||
deadline=$((SECONDS + TIMEOUT))
|
||||
last_status=""
|
||||
|
||||
while (( SECONDS < deadline )); do
|
||||
last_status="$(curl -sS -o "$TMP" -w '%{http_code}' "$URL" || true)"
|
||||
echo "last_status=$last_status"
|
||||
echo "body=$(cat $TMP)"
|
||||
if [[ "$last_status" == "200" ]] && grep -q "$PATTERN" "$TMP"; then
|
||||
echo "✅ OK"
|
||||
rm -f "$TMP"
|
||||
npx eggctl stop
|
||||
exit 0
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
|
||||
echo "::error::❌ Health check failed: status=$last_status"
|
||||
echo "---- Response body (last try) ----"
|
||||
cat "$TMP" || true
|
||||
rm -f "$TMP"
|
||||
exit 1
|
||||
|
||||
test-postgresql-fs-nfs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [20, 22, 24]
|
||||
os: [ubuntu-latest]
|
||||
# 0-based index
|
||||
shardIndex: [0, 1, 2]
|
||||
shardTotal: [3]
|
||||
|
||||
name: test on postgresql (node@${{ matrix.node-version }}, shard@${{ matrix.shardIndex }}/${{ matrix.shardTotal }})
|
||||
concurrency:
|
||||
group: test-postgresql-fs-nfs-${{ github.workflow }}-#${{ github.event.pull_request.number || github.head_ref || github.ref }}-${{ matrix.node-version }}-${{ matrix.shardIndex }}/${{ matrix.shardTotal }}
|
||||
cancel-in-progress: true
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
services:
|
||||
# https://docs.github.com/en/actions/use-cases-and-examples/using-containerized-services/creating-postgresql-service-containers
|
||||
# Label used to access the service container
|
||||
postgres:
|
||||
# Docker Hub image
|
||||
image: postgres
|
||||
# Provide the password for postgres
|
||||
env:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
# Set health checks to wait until postgres has started
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
ports:
|
||||
# Maps tcp port 5432 on service container to the host
|
||||
- 5432:5432
|
||||
redis:
|
||||
# https://docs.github.com/en/actions/using-containerized-services/about-service-containers#example-mapping-redis-ports
|
||||
image: redis
|
||||
ports:
|
||||
# Opens tcp port 6379 on the host and service container
|
||||
- 6379:6379
|
||||
|
||||
steps:
|
||||
- name: Checkout Git Source
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm i
|
||||
|
||||
# https://github.com/elastic/elastic-github-actions/blob/master/elasticsearch/README.md
|
||||
- name: Configure sysctl limits
|
||||
run: |
|
||||
sudo swapoff -a
|
||||
sudo sysctl -w vm.swappiness=1
|
||||
sudo sysctl -w fs.file-max=262144
|
||||
sudo sysctl -w vm.max_map_count=262144
|
||||
|
||||
- name: Runs Elasticsearch
|
||||
uses: elastic/elastic-github-actions/elasticsearch@master
|
||||
with:
|
||||
stack-version: 8.18.0
|
||||
security-enabled: false
|
||||
|
||||
- name: Wait for Elasticsearch to be ready
|
||||
run: |
|
||||
curl -v http://localhost:9200
|
||||
while ! curl -s http://localhost:9200 | grep -q "elasticsearch"; do
|
||||
echo "Waiting for Elasticsearch to be ready..."
|
||||
sleep 1
|
||||
done
|
||||
|
||||
- name: Continuous Integration
|
||||
run: npm run ci:postgresql
|
||||
env:
|
||||
# The hostname used to communicate with the PostgreSQL service container
|
||||
POSTGRES_HOST: localhost
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
# The default PostgreSQL port
|
||||
POSTGRES_PORT: 5432
|
||||
CNPMCORE_CONFIG_ENABLE_ES: true
|
||||
CNPMCORE_CONFIG_ES_CLIENT_NODES: http://localhost:9200
|
||||
# https://github.com/jamiebuilds/ci-parallel-vars
|
||||
CI_NODE_INDEX: ${{ matrix.shardIndex }}
|
||||
CI_NODE_TOTAL: ${{ matrix.shardTotal }}
|
||||
|
||||
- name: Code Coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
test-mysql57-fs-nfs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [20, 22, 24]
|
||||
os: [ubuntu-latest]
|
||||
# 0-based index
|
||||
shardIndex: [0, 1, 2]
|
||||
shardTotal: [3]
|
||||
|
||||
name: test on mysql (node@${{ matrix.node-version }}, shard@${{ matrix.shardIndex }}/${{ matrix.shardTotal }})
|
||||
concurrency:
|
||||
group: test-mysql57-fs-nfs-${{ github.workflow }}-#${{ github.event.pull_request.number || github.head_ref || github.ref }}-${{ matrix.node-version }}-${{ matrix.shardIndex }}/${{ matrix.shardTotal }}
|
||||
cancel-in-progress: true
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
services:
|
||||
@@ -25,39 +241,43 @@ jobs:
|
||||
# Opens tcp port 6379 on the host and service container
|
||||
- 6379:6379
|
||||
|
||||
steps:
|
||||
- name: Checkout Git Source
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm i
|
||||
|
||||
- name: Continuous Integration
|
||||
run: npm run ci
|
||||
env:
|
||||
# https://github.com/jamiebuilds/ci-parallel-vars
|
||||
CI_NODE_INDEX: ${{ matrix.shardIndex }}
|
||||
CI_NODE_TOTAL: ${{ matrix.shardTotal }}
|
||||
|
||||
- name: Code Coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
test-mysql57-s3-nfs:
|
||||
if: ${{ github.ref_name == 'master' }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [16, 18, 20]
|
||||
node-version: [20, 22]
|
||||
os: [ubuntu-latest]
|
||||
|
||||
steps:
|
||||
- name: Checkout Git Source
|
||||
uses: actions/checkout@v3
|
||||
concurrency:
|
||||
group: test-mysql57-s3-nfs-${{ github.workflow }}-#${{ github.event.pull_request.number || github.head_ref || github.ref }}-${{ matrix.node-version }}
|
||||
cancel-in-progress: true
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm i -g npminstall && npminstall
|
||||
|
||||
- name: Continuous Integration
|
||||
run: npm run ci
|
||||
|
||||
- name: Code Coverage
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
test-mysql57-oss-nfs:
|
||||
runs-on: ${{ matrix.os }}
|
||||
if: |
|
||||
contains('
|
||||
refs/heads/master
|
||||
refs/heads/dev
|
||||
', github.ref)
|
||||
|
||||
services:
|
||||
mysql:
|
||||
@@ -70,40 +290,45 @@ jobs:
|
||||
options: --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=5
|
||||
|
||||
redis:
|
||||
# https://docs.github.com/en/actions/using-containerized-services/about-service-containers#example-mapping-redis-ports
|
||||
image: redis
|
||||
ports:
|
||||
# Opens tcp port 6379 on the host and service container
|
||||
- 6379:6379
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [16, 18, 20]
|
||||
os: [ubuntu-latest]
|
||||
|
||||
steps:
|
||||
- name: Checkout Git Source
|
||||
uses: actions/checkout@v3
|
||||
- name: Checkout Git Source
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm i
|
||||
- name: Install Dependencies
|
||||
run: npm i
|
||||
|
||||
- name: Continuous Integration
|
||||
run: npm run ci
|
||||
env:
|
||||
CNPMCORE_NFS_TYPE: oss
|
||||
CNPMCORE_NFS_OSS_BUCKET: cnpmcore-unittest-github-nodejs-${{ matrix.node-version }}
|
||||
CNPMCORE_NFS_OSS_ENDPOINT: https://oss-us-west-1.aliyuncs.com
|
||||
CNPMCORE_NFS_OSS_ID: ${{ secrets.CNPMCORE_NFS_OSS_ID }}
|
||||
CNPMCORE_NFS_OSS_SECRET: ${{ secrets.CNPMCORE_NFS_OSS_SECRET }}
|
||||
- name: Continuous Integration
|
||||
run: npm run ci "test/cli/npm/install.test.ts"
|
||||
env:
|
||||
CNPMCORE_NFS_TYPE: s3
|
||||
CNPMCORE_NFS_REMOVE_BEFORE_UPLOAD: true
|
||||
CNPMCORE_NFS_S3_CLIENT_BUCKET: cnpmcore-unittest-github-nodejs-${{ matrix.node-version }}
|
||||
CNPMCORE_NFS_S3_CLIENT_ENDPOINT: ${{ secrets.CNPMCORE_NFS_S3_ENDPOINT }}
|
||||
CNPMCORE_NFS_S3_CLIENT_ID: ${{ secrets.CNPMCORE_NFS_S3_ID }}
|
||||
CNPMCORE_NFS_S3_CLIENT_SECRET: ${{ secrets.CNPMCORE_NFS_S3_SECRET }}
|
||||
CNPMCORE_NFS_S3_CLIENT_FORCE_PATH_STYLE: true
|
||||
# CNPMCORE_NFS_S3_CLIENT_DISABLE_URL: true
|
||||
|
||||
- name: Code Coverage
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- name: Code Coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
done:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- test-postgresql-fs-nfs
|
||||
- test-mysql57-fs-nfs
|
||||
- typecheck
|
||||
steps:
|
||||
- run: exit 1
|
||||
if: ${{ always() && (contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled')) }}
|
||||
|
||||
75
.github/workflows/release-image.yml
vendored
Normal file
75
.github/workflows/release-image.yml
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
# https://docs.github.com/en/actions/tutorials/publish-packages/publish-docker-images#publishing-images-to-github-packages
|
||||
name: Create and publish a Docker image
|
||||
|
||||
# Configures this workflow to run manually
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
pull_request:
|
||||
branches: [master]
|
||||
|
||||
# Defines two custom environment variables for the workflow. These are used for the Container registry domain, and a name for the Docker image that this workflow builds.
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
# There is a single job in this workflow. It's configured to run on the latest available version of Ubuntu.
|
||||
jobs:
|
||||
build-and-push-image:
|
||||
runs-on: ubuntu-latest
|
||||
concurrency:
|
||||
group: build-and-push-image-${{ github.workflow }}-#${{ github.event.pull_request.number || github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
# Sets the permissions granted to the `GITHUB_TOKEN` for the actions in this job.
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
attestations: write
|
||||
id-token: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
# Uses the `docker/login-action` action to log in to the Container registry registry using the account and password that will publish the packages. Once published, the packages are scoped to the account defined here.
|
||||
- name: Log in to the Container registry
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# This step uses [docker/metadata-action](https://github.com/docker/metadata-action#about) to extract tags and labels that will be applied to the specified image. The `id` "meta" allows the output of this step to be referenced in a subsequent step. The `images` value provides the base name for the tags and labels.
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
# This step uses the `docker/build-push-action` action to build the image, based on your repository's `Dockerfile`. If the build succeeds, it pushes the image to GitHub Packages.
|
||||
# It uses the `context` parameter to define the build's context as the set of files located in the specified path. For more information, see [Usage](https://github.com/docker/build-push-action#usage) in the README of the `docker/build-push-action` repository.
|
||||
# It uses the `tags` and `labels` parameters to tag and label the image with the output from the "meta" step.
|
||||
- name: Build and push Docker image
|
||||
id: push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: .docker/debian/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
||||
# This step generates an artifact attestation for the image, which is a tamper-proof statement about where and how it was built. It increases supply chain security for people who consume the image. For more information, see [Using artifact attestations to establish provenance for builds](/actions/security-guides/using-artifact-attestations-to-establish-provenance-for-builds).
|
||||
- name: Generate artifact attestation
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/attest-build-provenance@v3
|
||||
with:
|
||||
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME}}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
push-to-registry: true
|
||||
16
.github/workflows/release.yml
vendored
16
.github/workflows/release.yml
vendored
@@ -1,14 +1,18 @@
|
||||
name: Release
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
branches: [master]
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
deployments: write
|
||||
issues: write
|
||||
pull-requests: write
|
||||
id-token: write
|
||||
|
||||
jobs:
|
||||
release:
|
||||
name: Node.js
|
||||
uses: node-modules/github-actions/.github/workflows/node-release.yml@master
|
||||
name: NPM
|
||||
uses: cnpm/github-actions/.github/workflows/npm-release.yml@master
|
||||
secrets:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
GIT_TOKEN: ${{ secrets.GIT_TOKEN }}
|
||||
with:
|
||||
checkTest: false
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -120,3 +120,5 @@ dist
|
||||
.DS_Store
|
||||
run
|
||||
!test/ctx_register.js
|
||||
|
||||
.egg/
|
||||
|
||||
1
.husky/pre-commit
Normal file
1
.husky/pre-commit
Normal file
@@ -0,0 +1 @@
|
||||
npx lint-staged
|
||||
24
.oxlintrc.json
Normal file
24
.oxlintrc.json
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"$schema": "./node_modules/oxlint/configuration_schema.json",
|
||||
// FIXME: @eggjs/oxlint-config too strict, disable it for now, will fix it later
|
||||
// "extends": ["./node_modules/@eggjs/oxlint-config/.oxlintrc.json"],
|
||||
"env": {
|
||||
"node": true,
|
||||
"mocha": true
|
||||
},
|
||||
"rules": {
|
||||
// Project-specific overrides
|
||||
"max-params": ["error", 6],
|
||||
"no-console": "warn",
|
||||
"import/no-anonymous-default-export": "error",
|
||||
"no-unassigned-import": "allow",
|
||||
"new-cap": "allow",
|
||||
"class-methods-use-this": "allow",
|
||||
"import/no-named-export": "allow",
|
||||
"unicorn/no-array-sort": "allow",
|
||||
"no-param-reassign": "allow",
|
||||
"unicorn/prefer-at": "allow",
|
||||
"no-process-env": "allow"
|
||||
},
|
||||
"ignorePatterns": ["index.d.ts"]
|
||||
}
|
||||
4
.prettierignore
Normal file
4
.prettierignore
Normal file
@@ -0,0 +1,4 @@
|
||||
CHANGELOG.md
|
||||
__snapshots__
|
||||
pnpm-lock.yaml
|
||||
node_modules
|
||||
7
.prettierrc
Normal file
7
.prettierrc
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"singleQuote": true,
|
||||
"trailingComma": "es5",
|
||||
"tabWidth": 2,
|
||||
"printWidth": 120,
|
||||
"arrowParens": "avoid"
|
||||
}
|
||||
18
.vscode/launch.json
vendored
18
.vscode/launch.json
vendored
@@ -9,16 +9,9 @@
|
||||
"request": "launch",
|
||||
"name": "Egg Debug",
|
||||
"runtimeExecutable": "npm",
|
||||
"runtimeArgs": [
|
||||
"run",
|
||||
"dev",
|
||||
"--",
|
||||
"--inspect-brk"
|
||||
],
|
||||
"runtimeArgs": ["run", "dev", "--", "--inspect-brk"],
|
||||
"console": "integratedTerminal",
|
||||
"restart": true,
|
||||
"protocol": "auto",
|
||||
"port": 9229,
|
||||
"autoAttachChildProcesses": true
|
||||
},
|
||||
{
|
||||
@@ -26,14 +19,7 @@
|
||||
"request": "launch",
|
||||
"name": "Egg Test",
|
||||
"runtimeExecutable": "npm",
|
||||
"runtimeArgs": [
|
||||
"run",
|
||||
"test-local",
|
||||
"--",
|
||||
"--inspect-brk"
|
||||
],
|
||||
"protocol": "auto",
|
||||
"port": 9229,
|
||||
"runtimeArgs": ["run", "test:local", "--", "--inspect-brk"],
|
||||
"autoAttachChildProcesses": true
|
||||
}
|
||||
]
|
||||
|
||||
2174
CHANGELOG.md
2174
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
270
CLAUDE.md
Normal file
270
CLAUDE.md
Normal file
@@ -0,0 +1,270 @@
|
||||
# CLAUDE.md
|
||||
|
||||
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||
|
||||
## Project Overview
|
||||
|
||||
cnpmcore is a TypeScript-based private NPM registry implementation for enterprise use. It's built on the Egg.js framework using Domain-Driven Design (DDD) architecture principles and supports both MySQL and PostgreSQL databases.
|
||||
|
||||
## Essential Commands
|
||||
|
||||
### Development
|
||||
```bash
|
||||
# Start development server (MySQL)
|
||||
npm run dev
|
||||
|
||||
# Start development server (PostgreSQL)
|
||||
npm run dev:postgresql
|
||||
|
||||
# Lint code
|
||||
npm run lint
|
||||
|
||||
# Fix linting issues
|
||||
npm run lint:fix
|
||||
|
||||
# TypeScript type checking
|
||||
npm run typecheck
|
||||
```
|
||||
|
||||
### Testing
|
||||
```bash
|
||||
# Run all tests with MySQL (takes 4+ minutes)
|
||||
npm run test
|
||||
|
||||
# Run all tests with PostgreSQL (takes 4+ minutes)
|
||||
npm run test:postgresql
|
||||
|
||||
# Run single test file (faster iteration, ~12 seconds)
|
||||
npm run test:local test/path/to/file.test.ts
|
||||
|
||||
# Generate coverage report
|
||||
npm run cov
|
||||
```
|
||||
|
||||
### Database Setup
|
||||
```bash
|
||||
# MySQL setup
|
||||
docker compose -f docker-compose.yml up -d
|
||||
CNPMCORE_DATABASE_NAME=cnpmcore bash ./prepare-database-mysql.sh
|
||||
|
||||
# PostgreSQL setup
|
||||
docker compose -f docker-compose-postgres.yml up -d
|
||||
CNPMCORE_DATABASE_NAME=cnpmcore bash ./prepare-database-postgresql.sh
|
||||
```
|
||||
|
||||
### Build
|
||||
```bash
|
||||
# Clean build artifacts
|
||||
npm run clean
|
||||
|
||||
# Development build
|
||||
npm run tsc
|
||||
|
||||
# Production build
|
||||
npm run tsc:prod
|
||||
```
|
||||
|
||||
## Architecture - Domain-Driven Design (DDD)
|
||||
|
||||
The codebase follows strict DDD layering with clear separation of concerns:
|
||||
|
||||
```
|
||||
Controller (app/port/controller/) ← HTTP interface, validation, auth
|
||||
↓ depends on
|
||||
Service (app/core/service/) ← Business logic orchestration
|
||||
↓ depends on
|
||||
Repository (app/repository/) ← Data access layer
|
||||
↓ depends on
|
||||
Model (app/repository/model/) ← ORM/Database mapping
|
||||
|
||||
Entity (app/core/entity/) ← Pure domain models (no dependencies)
|
||||
Common (app/common/) ← Utilities and adapters (all layers)
|
||||
```
|
||||
|
||||
### Layer Responsibilities
|
||||
|
||||
**Controller Layer** (`app/port/controller/`):
|
||||
- Handle HTTP requests/responses
|
||||
- Validate inputs using `@eggjs/typebox-validate`
|
||||
- Authenticate users and verify authorization
|
||||
- Delegate business logic to Services
|
||||
- All controllers extend `AbstractController`
|
||||
|
||||
**Service Layer** (`app/core/service/`):
|
||||
- Implement core business logic
|
||||
- Orchestrate multiple repositories
|
||||
- Publish domain events
|
||||
- Manage transactions
|
||||
|
||||
**Repository Layer** (`app/repository/`):
|
||||
- CRUD operations on Models
|
||||
- Data access and persistence
|
||||
- Query building and optimization
|
||||
- Methods named: `findX`, `saveX`, `removeX`, `listXs`
|
||||
|
||||
**Entity Layer** (`app/core/entity/`):
|
||||
- Pure domain models with business behavior
|
||||
- No infrastructure dependencies
|
||||
- Immutable data structures preferred
|
||||
|
||||
**Model Layer** (`app/repository/model/`):
|
||||
- ORM definitions using Leoric
|
||||
- Database schema mapping
|
||||
- No business logic
|
||||
|
||||
### Infrastructure Adapters (`app/infra/`)
|
||||
Enterprise customization layer for PaaS integration:
|
||||
- **NFSClientAdapter**: File storage (local/S3/OSS)
|
||||
- **QueueAdapter**: Message queue integration
|
||||
- **AuthAdapter**: Authentication system
|
||||
- **BinaryAdapter**: Binary package storage
|
||||
|
||||
## Key Development Patterns
|
||||
|
||||
### Request Validation Trilogy
|
||||
Always validate requests in this exact order:
|
||||
1. **Parameter Validation** - Use `@eggjs/typebox-validate` for type-safe validation
|
||||
2. **Authentication** - Get authorized user with token role verification
|
||||
3. **Authorization** - Check resource-level permissions to prevent privilege escalation
|
||||
|
||||
```typescript
|
||||
// Example controller method
|
||||
async someMethod(@HTTPQuery() params: QueryType) {
|
||||
// 1. Params already validated by @HTTPQuery with typebox
|
||||
// 2. Authenticate
|
||||
const user = await this.userRoleManager.requiredAuthorizedUser(this.ctx, 'publish');
|
||||
// 3. Authorize (if needed)
|
||||
const { pkg } = await this.ensurePublishAccess(this.ctx, fullname);
|
||||
// 4. Execute business logic
|
||||
return await this.service.doSomething(params);
|
||||
}
|
||||
```
|
||||
|
||||
### Repository Method Naming
|
||||
- `findSomething` - Query single entity
|
||||
- `saveSomething` - Create or update entity
|
||||
- `removeSomething` - Delete entity
|
||||
- `listSomethings` - Query multiple entities (plural)
|
||||
|
||||
### Modifying Database Models
|
||||
When changing a Model, update all 3 locations:
|
||||
1. SQL migrations: `sql/mysql/*.sql` AND `sql/postgresql/*.sql`
|
||||
2. ORM Model: `app/repository/model/*.ts`
|
||||
3. Domain Entity: `app/core/entity/*.ts`
|
||||
|
||||
## Code Style
|
||||
|
||||
### Linting
|
||||
- **Linter**: Oxlint (Rust-based, very fast)
|
||||
- **Formatter**: Prettier
|
||||
- **Pre-commit**: Husky + lint-staged (auto-format on commit)
|
||||
|
||||
Style rules:
|
||||
- Single quotes (`'`)
|
||||
- 2-space indentation
|
||||
- 120 character line width
|
||||
- ES5 trailing commas
|
||||
- Max 6 function parameters
|
||||
- No console statements (use logger)
|
||||
|
||||
### TypeScript
|
||||
- Strict TypeScript enabled
|
||||
- Avoid `any` types - use proper typing or `unknown`
|
||||
- ES modules (`import/export`) throughout
|
||||
- Comprehensive type definitions in all files
|
||||
|
||||
### Testing
|
||||
- Test files use `.test.ts` suffix
|
||||
- Tests mirror source structure in `test/` directory
|
||||
- Use `@eggjs/mock` for mocking
|
||||
- Use `assert` from `node:assert/strict`
|
||||
- Test both success and error cases
|
||||
|
||||
Pattern:
|
||||
```typescript
|
||||
describe('test/path/to/SourceFile.test.ts', () => {
|
||||
describe('[HTTP_METHOD /api/path] functionName()', () => {
|
||||
it('should handle expected behavior', async () => {
|
||||
// Test implementation
|
||||
});
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
app/
|
||||
├── common/ # Global utilities and adapters
|
||||
│ ├── adapter/ # External service adapters
|
||||
│ └── enum/ # Shared enumerations
|
||||
├── core/ # Business logic layer
|
||||
│ ├── entity/ # Domain models
|
||||
│ ├── event/ # Event handlers
|
||||
│ ├── service/ # Business services
|
||||
│ └── util/ # Internal utilities
|
||||
├── port/ # Interface layer
|
||||
│ ├── controller/ # HTTP controllers
|
||||
│ ├── middleware/ # Middleware
|
||||
│ └── schedule/ # Background jobs
|
||||
├── repository/ # Data access layer
|
||||
│ └── model/ # ORM models
|
||||
└── infra/ # Infrastructure adapters
|
||||
|
||||
config/ # Configuration files
|
||||
sql/ # Database migrations
|
||||
├── mysql/ # MySQL migrations
|
||||
└── postgresql/ # PostgreSQL migrations
|
||||
test/ # Test files (mirrors app/ structure)
|
||||
```
|
||||
|
||||
## Important Configuration
|
||||
|
||||
- `config/config.default.ts` - Main application configuration
|
||||
- `config/database.ts` - Database connection settings
|
||||
- `config/binaries.ts` - Binary package mirror configurations
|
||||
- `.env` - Environment-specific variables (copy from `.env.example`)
|
||||
- `tsconfig.json` - TypeScript settings (target: ES2021 for Leoric compatibility)
|
||||
|
||||
## Development Workflow
|
||||
|
||||
1. **Setup**: Copy `.env.example` to `.env`, start Docker services, initialize database
|
||||
2. **Feature Development**: Follow bottom-up approach (Model → Entity → Repository → Service → Controller)
|
||||
3. **Testing**: Write tests at appropriate layer, run individual tests for fast iteration
|
||||
4. **Validation**: Run linter, typecheck, relevant tests before committing
|
||||
5. **Commit**: Use semantic commit messages (feat/fix/docs/test/chore)
|
||||
|
||||
## Integration as NPM Package
|
||||
|
||||
cnpmcore can be integrated into Egg.js/Tegg applications as an NPM package, allowing enterprises to:
|
||||
- Customize infrastructure adapters (storage, auth, queue)
|
||||
- Override default behavior while receiving updates
|
||||
- Integrate with existing enterprise systems
|
||||
|
||||
See INTEGRATE.md for detailed integration guide.
|
||||
|
||||
## Performance Notes
|
||||
|
||||
Typical command execution times:
|
||||
- Development server startup: ~20 seconds
|
||||
- TypeScript build: ~6 seconds
|
||||
- Full test suite: 4-15 minutes
|
||||
- Single test file: ~12 seconds
|
||||
- Linting: <1 second
|
||||
- Database initialization: <2 seconds
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Node.js: 20.18.0+ or 22.18.0+
|
||||
- Database: MySQL 5.7+ or PostgreSQL 17+
|
||||
- Cache: Redis 6+
|
||||
- Optional: Elasticsearch 8.x
|
||||
|
||||
## Key Services & Controllers
|
||||
|
||||
Core components to understand:
|
||||
- **PackageController**: Package CRUD operations
|
||||
- **PackageManagerService**: Core package management logic
|
||||
- **BinarySyncerService**: Binary package synchronization
|
||||
- **ChangesStreamService**: NPM registry change stream processing
|
||||
- **UserController**: User authentication and profiles
|
||||
81
DEVELOPER.md
81
DEVELOPER.md
@@ -2,19 +2,37 @@
|
||||
|
||||
## 环境初始化
|
||||
|
||||
本项目的外部服务依赖有:MySQL 数据服务、Redis 缓存服务。
|
||||
本项目的外部服务依赖有:MySQL 数据库或 PostgreSQL 数据库、Redis 缓存服务。
|
||||
|
||||
生成本地开发环境配置文件:
|
||||
|
||||
```bash
|
||||
cp .env.example .env
|
||||
```
|
||||
|
||||
可以通过 Docker 来快速启动本地开发环境:
|
||||
|
||||
MySQL 开发环境:
|
||||
|
||||
```bash
|
||||
# 启动本地依赖服务
|
||||
docker-compose up -d
|
||||
# 启动本地依赖服务 - MySQL + Redis
|
||||
docker-compose -f docker-compose.yml up -d
|
||||
|
||||
# 关闭本地依赖服务
|
||||
docker-compose down
|
||||
docker-compose -f docker-compose.yml down
|
||||
```
|
||||
|
||||
> 手动初始化依赖服务参见[文档](./docs/setup.md)
|
||||
PostgreSQL 开发环境:
|
||||
|
||||
```bash
|
||||
# 启动本地依赖服务 - PostgreSQL + Redis
|
||||
docker-compose -f docker-compose-postgres.yml up -d
|
||||
|
||||
# 关闭本地依赖服务
|
||||
docker-compose -f docker-compose-postgres.yml down
|
||||
```
|
||||
|
||||
> 手动初始化依赖服务参见[本地开发环境 - MySQL](./docs/setup.md) 或 [本地开发环境 - PostgreSQL](./docs/setup-with-postgresql.md)
|
||||
|
||||
## 本地开发
|
||||
|
||||
@@ -24,11 +42,11 @@ docker-compose down
|
||||
npm install
|
||||
```
|
||||
|
||||
### 开发运行
|
||||
### 开发运行 - MySQL
|
||||
|
||||
```bash
|
||||
# 初始化数据库
|
||||
MYSQL_DATABASE=cnpmcore bash ./prepare-database.sh
|
||||
CNPMCORE_DATABASE_NAME=cnpmcore bash ./prepare-database-mysql.sh
|
||||
|
||||
# 启动 Web 服务
|
||||
npm run dev
|
||||
@@ -37,12 +55,53 @@ npm run dev
|
||||
curl -v http://127.0.0.1:7001
|
||||
```
|
||||
|
||||
### 开发运行 - PostgreSQL
|
||||
|
||||
```bash
|
||||
# 初始化数据库
|
||||
CNPMCORE_DATABASE_NAME=cnpmcore bash ./prepare-database-postgresql.sh
|
||||
|
||||
# 启动 Web 服务
|
||||
npm run dev:postgresql
|
||||
|
||||
# 访问
|
||||
curl -v http://127.0.0.1:7001
|
||||
```
|
||||
|
||||
### 登录和测试发包
|
||||
|
||||
> cnpmcore 默认不开放注册,可以通过 `config.default.ts` 中的 `allowPublicRegistration` 配置开启,否则只有管理员可以登录
|
||||
|
||||
|
||||
注册 cnpmcore_admin 管理员
|
||||
|
||||
```bash
|
||||
npm login --registry=http://127.0.0.1:7001
|
||||
|
||||
# 验证登录
|
||||
npm whoami --registry=http://127.0.0.1:7001
|
||||
```
|
||||
|
||||
发包
|
||||
|
||||
```bash
|
||||
npm publish --registry=http://127.0.0.1:7001
|
||||
```
|
||||
|
||||
### 单元测试
|
||||
|
||||
MySQL
|
||||
|
||||
```bash
|
||||
npm run test
|
||||
```
|
||||
|
||||
PostgreSQL
|
||||
|
||||
```bash
|
||||
npm run test:postgresql
|
||||
```
|
||||
|
||||
## 项目结构
|
||||
|
||||
```txt
|
||||
@@ -177,7 +236,7 @@ private async getPackageEntity(scope: string, name: string) {
|
||||
|
||||
#### 1、请求参数校验
|
||||
|
||||
使用 [egg-typebox-validate](https://github.com/xiekw2010/egg-typebox-validate) 来做请求参数校验,只需要定义一次参数类型和规则,就能同时拥有参数校验和类型定义。
|
||||
使用 [@eggjs/typebox-validate](https://github.com/eggjs/egg/tree/next/plugins/typebox-validate) 来做请求参数校验,只需要定义一次参数类型和规则,就能同时拥有参数校验和类型定义。
|
||||
详细使用方式可以参考 [PR#12](https://github.com/cnpm/cnpmcore/pull/12)。
|
||||
|
||||
使用方式请直接参考 `app/port/typebox.ts` 代码。
|
||||
@@ -268,9 +327,9 @@ Repository 依赖 Model,然后被 Service 和 Controller 依赖
|
||||
|
||||
可能需要涉及3个地方的修改:
|
||||
|
||||
1. sql/*.sql
|
||||
2. repository/model/*.ts
|
||||
3. core/entity/*.ts
|
||||
1. `sql/mysql/*.sql`, `sql/postgresql/*.sql`
|
||||
2. `repository/model/*.ts`
|
||||
3. `core/entity/*.ts`
|
||||
|
||||
目前还不会做 Model 到 SQL 的自动转换生成,核心原因有:
|
||||
|
||||
|
||||
17
Dockerfile
17
Dockerfile
@@ -1,17 +0,0 @@
|
||||
FROM node:18
|
||||
|
||||
# Create app directory
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# Install app dependencies
|
||||
COPY . .
|
||||
|
||||
RUN npm install -g npminstall --registry=https://registry.npmmirror.com \
|
||||
&& npminstall -c \
|
||||
&& npm run tsc
|
||||
|
||||
ENV NODE_ENV=production \
|
||||
EGG_SERVER_ENV=prod
|
||||
|
||||
EXPOSE 7001
|
||||
CMD ["npm", "run", "start:foreground"]
|
||||
317
History.md
317
History.md
@@ -1,317 +0,0 @@
|
||||
|
||||
2.9.0 / 2022-12-15
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`c562645`](http://github.com/cnpm/cnpmcore/commit/c562645db7c88f9c3c5787fd450b457574d1cce6)] - feat: suspend task before app close (#365) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
2.8.1 / 2022-12-05
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`fad30ad`](http://github.com/cnpm/cnpmcore/commit/fad30adc564c931c0bf63828d83bab84105aaef0)] - feat: npm command support npm v6 (#356) (laibao101 <<369632567@qq.com>>)
|
||||
|
||||
**fixes**
|
||||
* [[`f961219`](http://github.com/cnpm/cnpmcore/commit/f961219dbe4676156e1766db82379ee40087bcd8)] - fix: Sync save ignore ER_DUP_ENTRY error (#364) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
**others**
|
||||
* [[`7bc0fcc`](http://github.com/cnpm/cnpmcore/commit/7bc0fccaca880efe08228b4109953bd3974d2eb9)] - 🤖 TEST: Fix async function mock (fengmk2 <<fengmk2@gmail.com>>)
|
||||
* [[`84ae9bc`](http://github.com/cnpm/cnpmcore/commit/84ae9bcfa06124255703b926f83fb5e6a6bf9d6b)] - 📖 DOC: Update contributors (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
2.8.0 / 2022-11-29
|
||||
==================
|
||||
|
||||
**others**
|
||||
* [[`d55c680`](http://github.com/cnpm/cnpmcore/commit/d55c680ef906ecb27f7967782ad7d25987cef7d4)] - Event cork (#361) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
2.7.1 / 2022-11-25
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`c6b8aec`](http://github.com/cnpm/cnpmcore/commit/c6b8aecfd0c2b0d454389e931747c431dac5742b)] - fix: request binary error (#360) (Ke Wu <<gemwuu@163.com>>)
|
||||
|
||||
2.7.0 / 2022-11-25
|
||||
==================
|
||||
|
||||
**others**
|
||||
* [[`5738d56`](http://github.com/cnpm/cnpmcore/commit/5738d569ea691c05c3f3b0b74a454a33fefb8fc7)] - refactor: binary sync task use binaryName by default (#358) (Ke Wu <<gemwuu@163.com>>)
|
||||
|
||||
2.6.1 / 2022-11-23
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`0b35ead`](http://github.com/cnpm/cnpmcore/commit/0b35ead2a0cd73b89d2d961bafec13d7250fe805)] - 🐛 FIX: typo for canvas (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
2.6.0 / 2022-11-23
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`be8387d`](http://github.com/cnpm/cnpmcore/commit/be8387dfa48b9487156542000a93081fa823694a)] - feat: Support canvas sync from different binary (#357) (Ke Wu <<gemwuu@163.com>>)
|
||||
|
||||
**fixes**
|
||||
* [[`d6c4cf5`](http://github.com/cnpm/cnpmcore/commit/d6c4cf5029ca6450064fc05696a8624b6c36f0b2)] - fix: duplicate binary task (#354) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
2.5.2 / 2022-11-11
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`7eb209d`](http://github.com/cnpm/cnpmcore/commit/7eb209de1332417db2070846891d78f5afa0cd10)] - fix: create task when waiting (#352) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
2.5.1 / 2022-11-07
|
||||
==================
|
||||
|
||||
**others**
|
||||
* [[`e40c502`](http://github.com/cnpm/cnpmcore/commit/e40c5021bb2ba78f8879d19bc477883168560b85)] - 🐛 FIX: Mirror cypress arm64 binary (#351) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
2.5.0 / 2022-11-04
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`43d77ee`](http://github.com/cnpm/cnpmcore/commit/43d77ee91e52bd74594d9d569b839c1a4b7fbac6)] - feat: long description (#349) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
2.4.1 / 2022-10-28
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`92350a8`](http://github.com/cnpm/cnpmcore/commit/92350a864313ee42a048d9e83886ef42db3419de)] - 👌 IMPROVE: Show changes stream create task log (#347) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
**fixes**
|
||||
* [[`28eeeaf`](http://github.com/cnpm/cnpmcore/commit/28eeeafd9870c6b1c5b4f4c23916f6ae73ddda12)] - fix: registry host config (#346) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
* [[`cd5bd92`](http://github.com/cnpm/cnpmcore/commit/cd5bd923b8d47bf90b5f077ce04777b38653b850)] - 🐛 FIX: Catch all error on changes stream handler (#344) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
2.4.0 / 2022-10-25
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`6aa302d`](http://github.com/cnpm/cnpmcore/commit/6aa302d074f2c84f39e2065fa20853b007f6fa3b)] - 📦 NEW: Use oss-cnpm v4 (#340) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
* [[`a217fd0`](http://github.com/cnpm/cnpmcore/commit/a217fd07ccad3fe5058881654a13e0c69c758717)] - 👌 IMPROVE: Reduce warning log (#326) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
**fixes**
|
||||
* [[`b19b0a0`](http://github.com/cnpm/cnpmcore/commit/b19b0a0496e35ac1c6b3de746b9221990ba9dc93)] - fix: Lazy set registryId when executeTask (#341) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
**others**
|
||||
* [[`305175a`](http://github.com/cnpm/cnpmcore/commit/305175ab5fcdc3ad3b60055d45cfcacb23065a80)] - 🤖 TEST: Use enum define on unittest (#333) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
* [[`07f2eba`](http://github.com/cnpm/cnpmcore/commit/07f2eba137ba625b2d422677a465920617141b87)] - 🤖 TEST: Mock all binary http requests (#328) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
* [[`4b0c7dc`](http://github.com/cnpm/cnpmcore/commit/4b0c7dc6196960d34b2529bfde724e97f1af8444)] - 🤖 TEST: Mock all httpclient request (#327) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
2.3.1 / 2022-10-06
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`bbc08fd`](http://github.com/cnpm/cnpmcore/commit/bbc08fd26887d55b98b70d1ed210caf81f9d5c22)] - 👌 IMPROVE: syncPackageWorkerMaxConcurrentTasks up to 20 (#322) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
* [[`5852f22`](http://github.com/cnpm/cnpmcore/commit/5852f22023525d857ff1ceea205e4315c8079877)] - feat: support sync exist mode (#275) (zhangyuantao <<zhangyuantao@163.com>>)
|
||||
|
||||
**fixes**
|
||||
* [[`d79634e`](http://github.com/cnpm/cnpmcore/commit/d79634eea749fef1a420988a8599f156f28ee85a)] - 🐛 FIX: Should sync package when registry id is null (#324) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
* [[`24f920d`](http://github.com/cnpm/cnpmcore/commit/24f920d65b31f9eb83c1ecda36adf7f9e2c379c3)] - 🐛 FIX: Should run sync package on all worker (#323) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
2.3.0 / 2022-09-24
|
||||
==================
|
||||
|
||||
**others**
|
||||
* [[`bd83a19`](http://github.com/cnpm/cnpmcore/commit/bd83a19eca761c96bcee04e6ae91e68eac3cb6bf)] - 👌 IMPROVE: use urllib3 instead (#302) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
* [[`35e7d3a`](http://github.com/cnpm/cnpmcore/commit/35e7d3ad3c78712b507d522a0b72b5a6a5a4ec1c)] - 👌 IMPROVE: Enable phpmyadmin and DEBUG_LOCAL_SQL by default (#320) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
2.2.0 / 2022-09-22
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`bca0fb3`](http://github.com/cnpm/cnpmcore/commit/bca0fb3c37b9f74f3c41ab181dd3113d9dab4c05)] - feat: only allow pkg sync from registry it belong (#317) (killa <<killa123@126.com>>)
|
||||
|
||||
**fixes**
|
||||
* [[`7e9beea`](http://github.com/cnpm/cnpmcore/commit/7e9beead576a41de3aa042b92b788bde5d55f44a)] - fix: only append / if path is not empty and not ends with / (#316) (killa <<killa123@126.com>>)
|
||||
* [[`4fe68cb`](http://github.com/cnpm/cnpmcore/commit/4fe68cbf38f303e797b80b88407f714ec76bfae0)] - fix: fix directory path (#313) (killa <<killa123@126.com>>)
|
||||
|
||||
**others**
|
||||
* [[`e72ce35`](http://github.com/cnpm/cnpmcore/commit/e72ce3576f9a3cda095e3feac59eeb1d8c1e8033)] - 🤖 TEST: Skip unstable tests (#318) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
* [[`171b11f`](http://github.com/cnpm/cnpmcore/commit/171b11f7bba534c993af4088b00f8545216734a9)] - Revert "fix: fix directory path (#313)" (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
2.1.1 / 2022-09-08
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`8fb9dd8`](http://github.com/cnpm/cnpmcore/commit/8fb9dd8cf4800afe3f54aba9ee4c0ae05efb4f1d)] - fix: findExecuteTask only return waiting task (#312) (killa <<killa123@126.com>>)
|
||||
|
||||
2.1.0 / 2022-09-05
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`c5d2b49`](http://github.com/cnpm/cnpmcore/commit/c5d2b49ab3a0ce0d67f6e7cc19e0be867c92d04c)] - feat: auto get next valid task (#311) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
2.0.0 / 2022-09-05
|
||||
==================
|
||||
|
||||
**others**
|
||||
* [[`fc4baff`](http://github.com/cnpm/cnpmcore/commit/fc4baff226540e7cfee9adc069e17a59f4050a43)] - chore: refactor schedule with @Schedule (#309) (killa <<killa123@126.com>>)
|
||||
|
||||
1.11.6 / 2022-09-04
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`768f951`](http://github.com/cnpm/cnpmcore/commit/768f951b6f2509f14c30a70d86a6719107d963a4)] - fix: cnpmjsorg changesstream limit (#310) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
1.11.5 / 2022-09-02
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`f673ab8`](http://github.com/cnpm/cnpmcore/commit/f673ab8ba1545909ff6b8e445364646511930891)] - fix: execute state check (#308) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
**others**
|
||||
* [[`091420a`](http://github.com/cnpm/cnpmcore/commit/091420ae2677ecedd1a26a238921321c2a191675)] - 🤖 TEST: Add SQL Review Action (#307) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
1.11.4 / 2022-08-30
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`f9210ca`](http://github.com/cnpm/cnpmcore/commit/f9210ca7e180e19bce08da9ef33e46e990b86ef1)] - fix: changes stream empty (#306) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
1.11.3 / 2022-08-29
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`48f228d`](http://github.com/cnpm/cnpmcore/commit/48f228da447d8cde62849fa52cf43bae7754e2e3)] - fix: changes stream updatedAt (#304) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
* [[`87045ba`](http://github.com/cnpm/cnpmcore/commit/87045ba8b0e14547c93689600eb7e2c1de2a611b)] - fix: task updatedAt save (#305) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
1.11.2 / 2022-08-28
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`4e8700c`](http://github.com/cnpm/cnpmcore/commit/4e8700c4f7c6fb5c4f4d4a2b9a9546096c5d10e2)] - fix: only create createHookTask if hook enable (#299) (killa <<killa123@126.com>>)
|
||||
|
||||
**others**
|
||||
* [[`e06c841`](http://github.com/cnpm/cnpmcore/commit/e06c841537113fdb0c00beb22b0a55378c61ce80)] - 🐛 FIX: Should sync public package when registryName not exists (#303) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
* [[`f139444`](http://github.com/cnpm/cnpmcore/commit/f139444213403494ebe9bf073df62125413892d9)] - 📖 DOC: Update contributors (fengmk2 <<fengmk2@gmail.com>>)
|
||||
* [[`c4a9de5`](http://github.com/cnpm/cnpmcore/commit/c4a9de598dce9a1b82bbcdd91968a15bbc5a4b6b)] - Create SECURITY.md (fengmk2 <<fengmk2@gmail.com>>)
|
||||
* [[`709d65b`](http://github.com/cnpm/cnpmcore/commit/709d65bd0473856c9bfc4416ea2ca375136e354f)] - 🤖 TEST: Use diff bucket on OSS test (#301) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
* [[`9576699`](http://github.com/cnpm/cnpmcore/commit/95766990fa9c4c2c43d462f6b151557425b0c741)] - chore: use AsyncGenerator insteadof Transform stream (#300) (killa <<killa123@126.com>>)
|
||||
* [[`3ed5269`](http://github.com/cnpm/cnpmcore/commit/3ed5269f1d22ca3aaca89a90a4fff90f293e2464)] - 📦 NEW: Mirror better-sqlite3 binary (#296) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
1.11.1 / 2022-08-24
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`359a150`](http://github.com/cnpm/cnpmcore/commit/359a150eb450d69e6523b20efcc5c7cfe3efab4d)] - fix: changes stream (#297) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
1.11.0 / 2022-08-23
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`a91c8ac`](http://github.com/cnpm/cnpmcore/commit/a91c8ac4d05dc903780fda516b09364a05a2b1e6)] - feat: sync package from spec regsitry (#293) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
* [[`de37008`](http://github.com/cnpm/cnpmcore/commit/de37008261b05845f392d66764cdfe14ae324756)] - feat: changesStream adapter & needSync() method (#292) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
* [[`4b506c8`](http://github.com/cnpm/cnpmcore/commit/4b506c8371697ddacdbe99a8ecb330bfc1911ec6)] - feat: init registry & scope (#286) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
* [[`41c6e24`](http://github.com/cnpm/cnpmcore/commit/41c6e24c84d546eb9d5515cc0940cc3e4274687b)] - feat: impl trigger Hooks (#289) (killa <<killa123@126.com>>)
|
||||
* [[`79cb826`](http://github.com/cnpm/cnpmcore/commit/79cb82615f04bdb3da3ccbe09bb6a861608b69c5)] - feat: impl migration sql (#290) (killa <<killa123@126.com>>)
|
||||
* [[`4cfa8ed`](http://github.com/cnpm/cnpmcore/commit/4cfa8ed9d687ce7d950d7d20c0ea28221763ba5f)] - feat: impl hooks api (#287) (killa <<killa123@126.com>>)
|
||||
* [[`47d53d2`](http://github.com/cnpm/cnpmcore/commit/47d53d22ad03c02ee9cb9035a38ae205a6d38381)] - feat: add bizId for task (#285) (killa <<killa123@126.com>>)
|
||||
* [[`3b1536b`](http://github.com/cnpm/cnpmcore/commit/3b1536b070b2f9062bc2cc377db96d2f4a160efc)] - feat: add node-webrtc mirror (#274) (Opportunity <<opportunity@live.in>>)
|
||||
|
||||
**others**
|
||||
* [[`7106807`](http://github.com/cnpm/cnpmcore/commit/710680742a078b2faf4cb18c3a39c0397308712e)] - 🐛 FIX: Should show queue size on logging (#280) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
* [[`3a41b21`](http://github.com/cnpm/cnpmcore/commit/3a41b2161cc99bb2f6f6dd7cbaa7abef25ff4393)] - 🐛 FIX: Handle binary configuration value (#278) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
1.10.0 / 2022-08-04
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`c2b7d5a`](http://github.com/cnpm/cnpmcore/commit/c2b7d5aa98b5ba8649ec246c616574a22e9a74b8)] - feat: use sort set to impl queue (#277) (killa <<killa123@126.com>>)
|
||||
|
||||
1.9.1 / 2022-07-29
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`c54aa21`](http://github.com/cnpm/cnpmcore/commit/c54aa2165c3938dcbb5a2b3b54e66a0d961cc813)] - fix: check executingCount after task is done (#276) (killa <<killa123@126.com>>)
|
||||
|
||||
**others**
|
||||
* [[`3268d03`](http://github.com/cnpm/cnpmcore/commit/3268d030b620825c8c2e6331e1745c1788066c61)] - 🤖 TEST: show package not use cache if isSync (#273) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
1.9.0 / 2022-07-25
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`af6a75a`](http://github.com/cnpm/cnpmcore/commit/af6a75af32ea04c90fda82be3a56c99ec77e5807)] - feat: add forceSyncHistory options (#271) (killa <<killa123@126.com>>)
|
||||
|
||||
1.8.0 / 2022-07-21
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`b49a38c`](http://github.com/cnpm/cnpmcore/commit/b49a38c77e044c978e6de32a9d3e257cc90ea7c1)] - feat: use Model with inject (#269) (killa <<killa123@126.com>>)
|
||||
|
||||
1.7.1 / 2022-07-20
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`52fca55`](http://github.com/cnpm/cnpmcore/commit/52fca55aa883865f0ae70bfc1ff274c313b8f76a)] - fix: show package not use cache if isSync (#268) (killa <<killa123@126.com>>)
|
||||
|
||||
1.7.0 / 2022-07-12
|
||||
==================
|
||||
|
||||
**others**
|
||||
* [[`4f7ce8b`](http://github.com/cnpm/cnpmcore/commit/4f7ce8b4b2a5806a225ce67228388e14388b7059)] - deps: upgrade leoric to 2.x (#262) (killa <<killa123@126.com>>)
|
||||
|
||||
1.6.0 / 2022-07-11
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`1b9a9c7`](http://github.com/cnpm/cnpmcore/commit/1b9a9c70f66d8393e3b132f18713461a9243db73)] - feat: mirror nydus binaries (#261) (killa <<killa123@126.com>>)
|
||||
|
||||
**others**
|
||||
* [[`c1256bf`](http://github.com/cnpm/cnpmcore/commit/c1256bf3807bcc9a5c8be2ec5bf5ca8a5eef112e)] - 🐛 FIX: Ignore 403 status on s3 download fail (#260) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
* [[`d685772`](http://github.com/cnpm/cnpmcore/commit/d6857724307fb0df0c4c118491784b30d19a9a15)] - 🐛 FIX: skia-canvas should use NodePreGypBinary (#259) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
1.5.0 / 2022-07-09
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`b15b10c`](http://github.com/cnpm/cnpmcore/commit/b15b10c5c6cfb32bcc2b1d94434cdd16871ae565)] - feat(mirror): add skia-canvas mirror (#258) (Beace <<beaceshimin@gmail.com>>)
|
||||
|
||||
**others**
|
||||
* [[`2bd6ed0`](http://github.com/cnpm/cnpmcore/commit/2bd6ed0e5dace1d8840c342ecf4c86e8973dc6b7)] - 👌 IMPROVE: use tegg@1.2.0 (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
1.4.0 / 2022-06-28
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`57da0a3`](http://github.com/cnpm/cnpmcore/commit/57da0a3c7e56d6613b57391948949ffea24ec058)] - feat: add configuration enableNopmClientAndVersionCheck (laibao101 <<369632567@qq.com>>)
|
||||
|
||||
**others**
|
||||
* [[`bf62932`](http://github.com/cnpm/cnpmcore/commit/bf62932f2e5224de6e34b873bf690a6e887b94b0)] - 🤖 TEST: Fix unstable test cases on OSS env (#254) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
1.3.2 / 2022-06-27
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`c63159d`](http://github.com/cnpm/cnpmcore/commit/c63159d8df804fe711b664606fe42be42010eb38)] - fix: valid npm client with correct pattern (#252) (TZ | 天猪 <<atian25@qq.com>>)
|
||||
|
||||
**others**
|
||||
* [[`d578baf`](http://github.com/cnpm/cnpmcore/commit/d578bafff07a0f9d4dd75393492cffc7f5d2660b)] - 🐛 FIX: Ignore exists seq on changes worker (#253) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
1.3.1 / 2022-06-24
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`4ea0ef6`](http://github.com/cnpm/cnpmcore/commit/4ea0ef63b7af9fd4dcc247c2c2ac8e4d579f941a)] - fix: query changes with order by id asc (#251) (killa <<killa123@126.com>>)
|
||||
|
||||
1.3.0 / 2022-06-24
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`0948a71`](http://github.com/cnpm/cnpmcore/commit/0948a71a40ac4897d129ef56830665dc028f07c7)] - feat: read enableChangesStream when sync changes stream (#250) (killa <<killa123@126.com>>)
|
||||
|
||||
1.2.0 / 2022-06-20
|
||||
==================
|
||||
|
||||
**others**
|
||||
* [[`c0d8b52`](http://github.com/cnpm/cnpmcore/commit/c0d8b52ea09736ac11b0ef780aec781d172fb94c)] - refactor: move CacheAdapter to ContextProto (#249) (killa <<killa123@126.com>>)
|
||||
|
||||
1.1.0 / 2022-06-20
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`66b411e`](http://github.com/cnpm/cnpmcore/commit/66b411ea5bf6192dc9509df408525078e7128a27)] - feat: add type for exports (#248) (killa <<killa123@126.com>>)
|
||||
|
||||
1.0.0 / 2022-06-17
|
||||
==================
|
||||
|
||||
**others**
|
||||
* [[`5cadbf4`](http://github.com/cnpm/cnpmcore/commit/5cadbf4b22bee7d85cd14526f5d6c6e2cd3a2e4b)] - refactor: add infra module (#245) (killa <<killa123@126.com>>),fatal: No names found, cannot describe anything.
|
||||
|
||||
203
INTEGRATE.md
203
INTEGRATE.md
@@ -1,14 +1,14 @@
|
||||
# 🥚 如何在 [tegg](https://github.com/eggjs/tegg) 中集成 cnpmcore
|
||||
# 🥚 如何在 [tegg](https://github.com/eggjs/egg/blob/next/tegg) 中集成 cnpmcore
|
||||
|
||||
> 文档中的示例项目可以在 [这里](https://github.com/eggjs/examples/commit/bed580fe053ae573f8b63f6788002ff9c6e7a142) 查看,在开始前请确保已阅读 [DEVELOPER.md](DEVELOPER.md) 中的相关文档,完成本地开发环境搭建。
|
||||
|
||||
在生产环境中,我们也可以直接部署 cnpmcore 系统,实现完整的 Registry 镜像功能。
|
||||
但通常,在企业内部会有一些内部的中间件服务或限制,例如文件存储、缓存服务、登录鉴权流程等。
|
||||
|
||||
除了源码部署、二次开发的方式,我们还提供了 npm 包的方式,便于 [tegg](https://github.com/eggjs/tegg) 应用集成。
|
||||
除了源码部署、二次开发的方式,我们还提供了 npm 包的方式,便于 [tegg](https://github.com/eggjs/egg/blob/next/tegg) 应用集成。
|
||||
这样既可以享受到丰富的自定义扩展能力,又可以享受到 cnpmcore 持续迭代的功能演进。
|
||||
|
||||
下面,让我们以 [tegg](https://github.com/eggjs/tegg) 初始化的应用为例,以 npm 包的方式集成 cnpmcore,并扩展登录功能,以支持企业内 [SSO](https://en.wikipedia.org/wiki/Single_sign-on) 登录。
|
||||
下面,让我们以 [tegg](https://github.com/eggjs/egg/blob/next/tegg) 初始化的应用为例,以 npm 包的方式集成 cnpmcore,并扩展登录功能,以支持企业内 [SSO](https://en.wikipedia.org/wiki/Single_sign-on) 登录。
|
||||
|
||||
## 🚀 快速开始
|
||||
|
||||
@@ -34,51 +34,36 @@
|
||||
|
||||
### 📦︎ 安装 cnpmcore 修改对应配置
|
||||
|
||||
```shell
|
||||
npm i cnpmcore -S
|
||||
```
|
||||
```shell
|
||||
npm i cnpmcore
|
||||
```
|
||||
|
||||
1. 修改 `ts-config.json` 配置,这是因为 cnpmcore 使用了 [subPath](https://nodejs.org/api/packages.html#subpath-exports)
|
||||
|
||||
```json
|
||||
{
|
||||
"extends": "@eggjs/tsconfig",
|
||||
"compilerOptions": {
|
||||
"baseUrl": "./",
|
||||
"moduleResolution": "NodeNext",
|
||||
"target": "ES2020",
|
||||
"module": "Node16"
|
||||
}
|
||||
}
|
||||
```
|
||||
```json
|
||||
{
|
||||
"extends": "@eggjs/tsconfig",
|
||||
"compilerOptions": {
|
||||
"baseUrl": "./",
|
||||
"target": "ES2021"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
2. 修改 `config/plugin.ts` 文件,开启 cnpmcore 依赖的一些插件
|
||||
|
||||
```typescript
|
||||
// 开启如下插件
|
||||
{
|
||||
redis: {
|
||||
enable: true,
|
||||
package: 'egg-redis',
|
||||
},
|
||||
teggOrm: {
|
||||
enable: true,
|
||||
package: '@eggjs/tegg-orm-plugin',
|
||||
},
|
||||
eventbusModule: {
|
||||
enable: true,
|
||||
package: '@eggjs/tegg-eventbus-plugin',
|
||||
},
|
||||
tracer: {
|
||||
enable: true,
|
||||
package: 'egg-tracer',
|
||||
},
|
||||
typeboxValidate: {
|
||||
enable: true,
|
||||
package: 'egg-typebox-validate',
|
||||
},
|
||||
}
|
||||
```
|
||||
```typescript
|
||||
import tracerPlugin from '@eggjs/tracer';
|
||||
import typeboxValidatePlugin from '@eggjs/typebox-validate';
|
||||
import redisPlugin from '@eggjs/redis';
|
||||
|
||||
// 开启如下插件
|
||||
export default {
|
||||
...redisPlugin(),
|
||||
...tracerPlugin(),
|
||||
...typeboxValidatePlugin(),
|
||||
}
|
||||
```
|
||||
|
||||
3. 修改 `config.default.ts` 文件,可以直接覆盖默认配置
|
||||
|
||||
@@ -92,6 +77,8 @@ export default () => {
|
||||
...cnpmcoreConfig,
|
||||
enableChangesStream: false,
|
||||
syncMode: SyncMode.all,
|
||||
allowPublicRegistration: true,
|
||||
// 放开注册配置
|
||||
};
|
||||
return config;
|
||||
}
|
||||
@@ -101,63 +88,63 @@ export default () => {
|
||||
|
||||
1. 创建文件夹,用于存放自定义的 infra module,这里以 app/infra 为例
|
||||
|
||||
```shell
|
||||
├── infra
|
||||
│ ├── AuthAdapter.ts
|
||||
│ ├── NFSAdapter.ts
|
||||
│ ├── QueueAdapter.ts
|
||||
│ └── package.json
|
||||
```
|
||||
```shell
|
||||
├── infra
|
||||
│ ├── AuthAdapter.ts
|
||||
│ ├── NFSAdapter.ts
|
||||
│ ├── QueueAdapter.ts
|
||||
│ └── package.json
|
||||
```
|
||||
|
||||
* 添加 `package.json` ,声明 infra 作为一个 eggModule 单元
|
||||
|
||||
```JSON
|
||||
{
|
||||
"name": "infra",
|
||||
"eggModule": {
|
||||
"name": "infra"
|
||||
}
|
||||
}
|
||||
```
|
||||
```JSON
|
||||
{
|
||||
"name": "infra",
|
||||
"eggModule": {
|
||||
"name": "infra"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
* 添加 `XXXAdapter.ts` 在对应的 Adapter 中继承 cnpmcore 默认的 Adapter,以 AuthAdapter 为例
|
||||
|
||||
```typescript
|
||||
import { AccessLevel, SingletonProto } from '@eggjs/tegg';
|
||||
import { AuthAdapter } from 'cnpmcore/infra/AuthAdapter';
|
||||
```typescript
|
||||
import { AccessLevel, SingletonProto } from 'egg';
|
||||
import { AuthAdapter } from 'cnpmcore/infra/AuthAdapter';
|
||||
|
||||
@SingletonProto({
|
||||
name: 'authAdapter',
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
})
|
||||
export class MyAuthAdapter extends AuthAdapter {
|
||||
}
|
||||
```
|
||||
@SingletonProto({
|
||||
name: 'authAdapter',
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
})
|
||||
export class MyAuthAdapter extends AuthAdapter {
|
||||
}
|
||||
```
|
||||
|
||||
2. 添加 `config/module.json`,将 cnpmcore 作为一个 module 集成进我们新增的 tegg 应用中
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"path": "../app/biz"
|
||||
},
|
||||
{
|
||||
"path": "../app/infra"
|
||||
},
|
||||
{
|
||||
"package": "cnpmcore/common"
|
||||
},
|
||||
{
|
||||
"package": "cnpmcore/core"
|
||||
},
|
||||
{
|
||||
"package": "cnpmcore/port"
|
||||
},
|
||||
{
|
||||
"package": "cnpmcore/repository"
|
||||
}
|
||||
]
|
||||
```
|
||||
```json
|
||||
[
|
||||
{
|
||||
"path": "../app/biz"
|
||||
},
|
||||
{
|
||||
"path": "../app/infra"
|
||||
},
|
||||
{
|
||||
"package": "cnpmcore/common"
|
||||
},
|
||||
{
|
||||
"package": "cnpmcore/core"
|
||||
},
|
||||
{
|
||||
"package": "cnpmcore/port"
|
||||
},
|
||||
{
|
||||
"package": "cnpmcore/repository"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
### ✍🏻 重载 AuthAdapter 实现
|
||||
|
||||
@@ -173,10 +160,10 @@ export default () => {
|
||||
修改 AuthAdapter.ts 文件
|
||||
|
||||
```typescript
|
||||
import { AccessLevel, EggContext, SingletonProto } from '@eggjs/tegg';
|
||||
import { AccessLevel, Context, SingletonProto } from 'egg';
|
||||
import { AuthAdapter } from 'cnpmcore/infra/AuthAdapter';
|
||||
import { randomUUID } from 'crypto';
|
||||
import { AuthUrlResult, userResult } from 'node_modules/cnpmcore/dist/app/common/typing';
|
||||
import { AuthUrlResult, userResult } from 'cnpmcore/dist/app/common/typing';
|
||||
|
||||
const ONE_DAY = 3600 * 24;
|
||||
|
||||
@@ -185,7 +172,7 @@ const ONE_DAY = 3600 * 24;
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
})
|
||||
export class MyAuthAdapter extends AuthAdapter {
|
||||
async getAuthUrl(ctx: EggContext): Promise<AuthUrlResult> {
|
||||
async getAuthUrl(ctx: Context): Promise<AuthUrlResult> {
|
||||
const sessionId = randomUUID();
|
||||
await this.redis.setex(sessionId, ONE_DAY, '');
|
||||
return {
|
||||
@@ -208,33 +195,33 @@ export class MyAuthAdapter extends AuthAdapter {
|
||||
修改 HelloController 的实现,实际也可以通过登录中心回调、页面确认等方式实现
|
||||
|
||||
```typescript
|
||||
// 触发回调接口,会自动完成用户创建
|
||||
await this.httpclient.request(`${ctx.origin}/-/v1/login/sso/${name}`, { method: 'POST' });
|
||||
// 触发回调接口,会自动完成用户创建
|
||||
await this.httpclient.request(`${ctx.origin}/-/v1/login/sso/${name}`, { method: 'POST' });
|
||||
```
|
||||
|
||||
## 🎉 功能验证
|
||||
|
||||
1. 在命令行输入 `npm login --registry=http://127.0.0.1:7001`
|
||||
|
||||
```shell
|
||||
npm login --registry=http://127.0.0.1:7001
|
||||
npm notice Log in on http://127.0.0.1:7001/
|
||||
Login at:
|
||||
http://127.0.0.1:7001/hello?name=e44e8c43-211a-4bcd-ae78-c4cbb1a78ae7
|
||||
Press ENTER to open in the browser...
|
||||
```
|
||||
```shell
|
||||
npm login --registry=http://127.0.0.1:7001
|
||||
npm notice Log in on http://127.0.0.1:7001/
|
||||
Login at:
|
||||
http://127.0.0.1:7001/hello?name=e44e8c43-211a-4bcd-ae78-c4cbb1a78ae7
|
||||
Press ENTER to open in the browser...
|
||||
```
|
||||
|
||||
2. 界面提示回车打开浏览器访问登录中心,也就是我们在 getAuthUrl,返回的 loginUrl 配置
|
||||
|
||||
3. 由于我们 mock 了对应实现,界面会直接显示登录成功
|
||||
|
||||
```shell
|
||||
Logged in on http://127.0.0.1:7001/.
|
||||
```
|
||||
```shell
|
||||
Logged in on http://127.0.0.1:7001/.
|
||||
```
|
||||
|
||||
4. 在命令行输入 `npm whoami --registry=http://127.0.0.1:7001` 验证
|
||||
|
||||
```shell
|
||||
npm whoami --registry=http://127.0.0.1:7001
|
||||
hello
|
||||
```
|
||||
```shell
|
||||
npm whoami --registry=http://127.0.0.1:7001
|
||||
hello
|
||||
```
|
||||
|
||||
32
README.md
32
README.md
@@ -1,16 +1,24 @@
|
||||
# Private NPM Registry for Enterprise
|
||||
|
||||
[](https://github.com/cnpm/cnpmcore/actions/workflows/nodejs.yml)
|
||||
[](https://codecov.io/gh/cnpm/cnpmcore)
|
||||
[](https://github.com/cnpm/cnpmcore/actions/workflows/codeql-analysis.yml)
|
||||
[](https://github.com/ahmadawais/Emoji-Log/)
|
||||
[](https://github.com/cnpm/cnpmcore/actions/workflows/nodejs.yml)
|
||||
[](https://app.codecov.io/gh/cnpm/cnpmcore/tree/master)
|
||||
[](https://app.fossa.com/projects/git%2Bgithub.com%2Fcnpm%2Fcnpmcore?ref=badge_shield)
|
||||
[](https://nodejs.org/en/download/)
|
||||
[](https://makeapullrequest.com)
|
||||

|
||||
[](https://www.npmjs.com/package/cnpmcore)
|
||||
[](https://www.npmjs.com/package/cnpmcore)
|
||||
[](https://github.com/cnpm/cnpmcore/blob/master/LICENSE)
|
||||
|
||||
Reimplementation based on [cnpmjs.org](https://github.com/cnpm/cnpmjs.org) with TypeScript.
|
||||
Reimplement based on [cnpmjs.org](https://github.com/cnpm/cnpmjs.org) with TypeScript.
|
||||
|
||||
## Registry HTTP API
|
||||
|
||||
See https://github.com/cnpm/cnpmjs.org/blob/master/docs/registry-api.md#npm-registry-api
|
||||
See [registry-api.md](docs/registry-api.md)
|
||||
|
||||
## Internal API for Direct HTTP Requests
|
||||
|
||||
See [internal-api.md](docs/internal-api.md) for comprehensive documentation of cnpmcore's internal APIs that allow direct HTTP requests for package synchronization, administration, and other advanced operations.
|
||||
|
||||
## How to contribute
|
||||
|
||||
@@ -24,18 +32,10 @@ See [INTEGRATE.md](INTEGRATE.md)
|
||||
|
||||
[MIT](LICENSE)
|
||||
|
||||
<!-- GITCONTRIBUTOR_START -->
|
||||
|
||||
## Contributors
|
||||
|
||||
|[<img src="https://avatars.githubusercontent.com/u/156269?v=4" width="100px;"/><br/><sub><b>fengmk2</b></sub>](https://github.com/fengmk2)<br/>|[<img src="https://avatars.githubusercontent.com/u/6897780?v=4" width="100px;"/><br/><sub><b>killagu</b></sub>](https://github.com/killagu)<br/>|[<img src="https://avatars.githubusercontent.com/u/32174276?v=4" width="100px;"/><br/><sub><b>semantic-release-bot</b></sub>](https://github.com/semantic-release-bot)<br/>|[<img src="https://avatars.githubusercontent.com/u/5574625?v=4" width="100px;"/><br/><sub><b>elrrrrrrr</b></sub>](https://github.com/elrrrrrrr)<br/>|[<img src="https://avatars.githubusercontent.com/u/35598090?v=4" width="100px;"/><br/><sub><b>hezhengxu2018</b></sub>](https://github.com/hezhengxu2018)<br/>|[<img src="https://avatars.githubusercontent.com/u/26033663?v=4" width="100px;"/><br/><sub><b>Zian502</b></sub>](https://github.com/Zian502)<br/>|
|
||||
| :---: | :---: | :---: | :---: | :---: | :---: |
|
||||
|[<img src="https://avatars.githubusercontent.com/u/4635838?v=4" width="100px;"/><br/><sub><b>gemwuu</b></sub>](https://github.com/gemwuu)<br/>|[<img src="https://avatars.githubusercontent.com/u/17879221?v=4" width="100px;"/><br/><sub><b>laibao101</b></sub>](https://github.com/laibao101)<br/>|[<img src="https://avatars.githubusercontent.com/u/3478550?v=4" width="100px;"/><br/><sub><b>coolyuantao</b></sub>](https://github.com/coolyuantao)<br/>|[<img src="https://avatars.githubusercontent.com/u/13284978?v=4" width="100px;"/><br/><sub><b>Beace</b></sub>](https://github.com/Beace)<br/>|[<img src="https://avatars.githubusercontent.com/u/10163680?v=4" width="100px;"/><br/><sub><b>Wellaiyo</b></sub>](https://github.com/Wellaiyo)<br/>|[<img src="https://avatars.githubusercontent.com/u/227713?v=4" width="100px;"/><br/><sub><b>atian25</b></sub>](https://github.com/atian25)<br/>|
|
||||
|[<img src="https://avatars.githubusercontent.com/u/8198408?v=4" width="100px;"/><br/><sub><b>BlackHole1</b></sub>](https://github.com/BlackHole1)<br/>|[<img src="https://avatars.githubusercontent.com/u/1814071?v=4" width="100px;"/><br/><sub><b>xiekw2010</b></sub>](https://github.com/xiekw2010)<br/>|[<img src="https://avatars.githubusercontent.com/u/13471233?v=4" width="100px;"/><br/><sub><b>OpportunityLiu</b></sub>](https://github.com/OpportunityLiu)<br/>|[<img src="https://avatars.githubusercontent.com/u/958063?v=4" width="100px;"/><br/><sub><b>thonatos</b></sub>](https://github.com/thonatos)<br/>|[<img src="https://avatars.githubusercontent.com/u/11039003?v=4" width="100px;"/><br/><sub><b>chenpx976</b></sub>](https://github.com/chenpx976)<br/>|[<img src="https://avatars.githubusercontent.com/u/29791463?v=4" width="100px;"/><br/><sub><b>fossabot</b></sub>](https://github.com/fossabot)<br/>|
|
||||
[<img src="https://avatars.githubusercontent.com/u/1119126?v=4" width="100px;"/><br/><sub><b>looksgood</b></sub>](https://github.com/looksgood)<br/>|[<img src="https://avatars.githubusercontent.com/u/23701019?v=4" width="100px;"/><br/><sub><b>laoboxie</b></sub>](https://github.com/laoboxie)<br/>|[<img src="https://avatars.githubusercontent.com/u/5550931?v=4" width="100px;"/><br/><sub><b>shinima</b></sub>](https://github.com/shinima)<br/>
|
||||
[](https://github.com/cnpm/cnpmcore/graphs/contributors)
|
||||
|
||||
This project follows the git-contributor [spec](https://github.com/xudafeng/git-contributor), auto updated at `Sat May 06 2023 12:40:20 GMT+0800`.
|
||||
|
||||
<!-- GITCONTRIBUTOR_END -->
|
||||
Made with [contributors-img](https://contrib.rocks).
|
||||
|
||||
[](https://app.fossa.com/projects/git%2Bgithub.com%2Fcnpm%2Fcnpmcore?ref=badge_large)
|
||||
|
||||
@@ -6,7 +6,7 @@ Currently being supported with security updates.
|
||||
|
||||
| Version | Supported |
|
||||
| -------- | ------------------ |
|
||||
| >= 1.0.0 | :white_check_mark: |
|
||||
| >= 3.0.0 | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
@@ -35,7 +35,7 @@ When the security team receives a security bug report, they will assign it
|
||||
to a primary handler. This person will coordinate the fix and release
|
||||
process, involving the following steps:
|
||||
|
||||
* Confirm the problem and determine the affected versions.
|
||||
* Audit code to find any potential similar problems.
|
||||
* Prepare fixes for all releases still under maintenance. These fixes
|
||||
* Confirm the problem and determine the affected versions.
|
||||
* Audit code to find any potential similar problems.
|
||||
* Prepare fixes for all releases still under maintenance. These fixes
|
||||
will be released as fast as possible to NPM.
|
||||
|
||||
26
app.ts
26
app.ts
@@ -1,14 +1,16 @@
|
||||
import path from 'path';
|
||||
import { readFile } from 'fs/promises';
|
||||
import { Application } from 'egg';
|
||||
import { ChangesStreamService } from './app/core/service/ChangesStreamService';
|
||||
import path from 'node:path';
|
||||
import { readFile } from 'node:fs/promises';
|
||||
import type { Application, ILifecycleBoot } from 'egg';
|
||||
|
||||
import { ChangesStreamService } from './app/core/service/ChangesStreamService.ts';
|
||||
|
||||
declare module 'egg' {
|
||||
interface Application {
|
||||
binaryHTML: string;
|
||||
}
|
||||
}
|
||||
|
||||
export default class CnpmcoreAppHook {
|
||||
export default class CnpmcoreAppHook implements ILifecycleBoot {
|
||||
private readonly app: Application;
|
||||
|
||||
constructor(app: Application) {
|
||||
@@ -16,9 +18,9 @@ export default class CnpmcoreAppHook {
|
||||
this.app.binaryHTML = '';
|
||||
}
|
||||
|
||||
async configWillLoad() {
|
||||
configWillLoad() {
|
||||
const app = this.app;
|
||||
// https://github.com/eggjs/tegg/blob/master/plugin/orm/app.ts#L37
|
||||
// https://github.com/eggjs/egg/blob/next/tegg/plugin/orm/src/app.ts#L37
|
||||
// store query sql to log
|
||||
app.config.orm.logger = {
|
||||
...app.config.orm.logger,
|
||||
@@ -32,14 +34,18 @@ export default class CnpmcoreAppHook {
|
||||
async didReady() {
|
||||
// ready binary.html and replace registry
|
||||
const filepath = path.join(this.app.baseDir, 'app/port/binary.html');
|
||||
const text = await readFile(filepath, 'utf-8');
|
||||
this.app.binaryHTML = text.replace('{{registry}}', this.app.config.cnpmcore.registry);
|
||||
const text = await readFile(filepath, 'utf8');
|
||||
this.app.binaryHTML = text.replace(
|
||||
'{{registry}}',
|
||||
this.app.config.cnpmcore.registry
|
||||
);
|
||||
}
|
||||
|
||||
// 应用退出时执行
|
||||
// 需要暂停当前执行的 changesStream task
|
||||
async beforeClose() {
|
||||
const changesStreamService = await this.app.getEggObject(ChangesStreamService);
|
||||
const changesStreamService =
|
||||
await this.app.getEggObject(ChangesStreamService);
|
||||
await changesStreamService.suspendSync(true);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,8 @@
|
||||
import {
|
||||
Inject,
|
||||
} from '@eggjs/tegg';
|
||||
import {
|
||||
EggAppConfig,
|
||||
EggLogger,
|
||||
} from 'egg';
|
||||
import { EggAppConfig, Logger, Inject } from 'egg';
|
||||
|
||||
export abstract class AbstractService {
|
||||
@Inject()
|
||||
protected readonly config: EggAppConfig;
|
||||
@Inject()
|
||||
protected readonly logger: EggLogger;
|
||||
protected readonly logger: Logger;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { generateKeyPairSync, publicEncrypt, privateDecrypt, constants } from 'crypto';
|
||||
import { generateKeyPairSync } from 'node:crypto';
|
||||
import NodeRSA from 'node-rsa';
|
||||
|
||||
// generate rsa key pair
|
||||
export function genRSAKeys(): { publicKey: string, privateKey: string } {
|
||||
@@ -17,17 +18,19 @@ export function genRSAKeys(): { publicKey: string, privateKey: string } {
|
||||
}
|
||||
|
||||
// encrypt rsa private key
|
||||
export function encryptRSA(publicKey: string, data: string): string {
|
||||
return publicEncrypt({
|
||||
key: publicKey,
|
||||
padding: constants.RSA_PKCS1_PADDING,
|
||||
}, Buffer.from(data, 'utf8')).toString('base64');
|
||||
export function encryptRSA(publicKey: string, plainText: string): string {
|
||||
const key = new NodeRSA(publicKey, 'pkcs1-public-pem', {
|
||||
encryptionScheme: 'pkcs1',
|
||||
environment: 'browser',
|
||||
});
|
||||
return key.encrypt(plainText, 'base64');
|
||||
}
|
||||
|
||||
// decrypt rsa private key
|
||||
export function decryptRSA(privateKey: string, data: string) {
|
||||
return privateDecrypt({
|
||||
key: privateKey,
|
||||
padding: constants.RSA_PKCS1_PADDING,
|
||||
}, Buffer.from(data, 'base64')).toString('utf8');
|
||||
export function decryptRSA(privateKey: string, encryptedBase64: string): string {
|
||||
const key = new NodeRSA(privateKey, 'pkcs1-private-pem', {
|
||||
encryptionScheme: 'pkcs1',
|
||||
environment: 'browser',
|
||||
});
|
||||
return key.decrypt(encryptedBase64, 'utf8');
|
||||
}
|
||||
|
||||
29
app/common/ErrorUtil.ts
Normal file
29
app/common/ErrorUtil.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
const TimeoutErrorNames = new Set([
|
||||
'HttpClientRequestTimeoutError',
|
||||
'HttpClientConnectTimeoutError',
|
||||
'ConnectionError',
|
||||
'ConnectTimeoutError',
|
||||
'BodyTimeoutError',
|
||||
'ResponseTimeoutError',
|
||||
]);
|
||||
|
||||
export function isTimeoutError(err: Error) {
|
||||
if (TimeoutErrorNames.has(err.name)) {
|
||||
return true;
|
||||
}
|
||||
if (err instanceof AggregateError && err.errors) {
|
||||
for (const subError of err.errors) {
|
||||
if (TimeoutErrorNames.has(subError.name)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (
|
||||
'cause' in err &&
|
||||
err.cause instanceof Error &&
|
||||
TimeoutErrorNames.has(err.cause.name)
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
@@ -1,78 +1,41 @@
|
||||
// oxlint-disable import/exports-last
|
||||
import { mkdir, rm } from 'node:fs/promises';
|
||||
import { createWriteStream } from 'node:fs';
|
||||
import { setTimeout } from 'node:timers/promises';
|
||||
import path from 'node:path';
|
||||
import url from 'node:url';
|
||||
import { randomBytes } from 'node:crypto';
|
||||
import { EggContextHttpClient, HttpClientResponse } from 'egg';
|
||||
import type { EggContextHttpClient, HttpClientResponse } from 'egg';
|
||||
import mime from 'mime-types';
|
||||
import dayjs from './dayjs';
|
||||
import dayjs from './dayjs.ts';
|
||||
|
||||
interface DownloadToTempfileOptionalConfig {
|
||||
retries?: number,
|
||||
ignoreDownloadStatuses?: number[],
|
||||
remoteAuthToken?: string
|
||||
}
|
||||
|
||||
export async function createTempDir(dataDir: string, dirname?: string) {
|
||||
// will auto clean on CleanTempDir Schedule
|
||||
let tmpdir = path.join(dataDir, 'downloads', dayjs().format('YYYY/MM/DD'));
|
||||
if (dirname) {
|
||||
tmpdir = path.join(tmpdir, dirname);
|
||||
}
|
||||
await mkdir(tmpdir, { recursive: true });
|
||||
return tmpdir;
|
||||
}
|
||||
|
||||
export async function createTempfile(dataDir: string, filename: string) {
|
||||
const tmpdir = await createTempDir(dataDir);
|
||||
// The filename is a URL (from dist.tarball), which needs to be truncated, (`getconf NAME_MAX /` # max filename length: 255 bytes)
|
||||
// https://github.com/cnpm/cnpmjs.org/pull/1345
|
||||
const tmpfile = path.join(tmpdir, `${randomBytes(10).toString('hex')}-${path.basename(url.parse(filename).pathname!)}`);
|
||||
return tmpfile;
|
||||
}
|
||||
|
||||
export async function downloadToTempfile(httpclient: EggContextHttpClient,
|
||||
dataDir: string, url: string, optionalConfig?: DownloadToTempfileOptionalConfig) {
|
||||
let retries = optionalConfig?.retries || 3;
|
||||
let lastError: any;
|
||||
while (retries > 0) {
|
||||
try {
|
||||
return await _downloadToTempfile(httpclient, dataDir, url, optionalConfig);
|
||||
} catch (err: any) {
|
||||
if (err.name === 'DownloadNotFoundError') throw err;
|
||||
lastError = err;
|
||||
}
|
||||
retries--;
|
||||
if (retries > 0) {
|
||||
// sleep 1s ~ 4s in random
|
||||
const delay = process.env.NODE_ENV === 'test' ? 1 : 1000 + Math.random() * 4000;
|
||||
await setTimeout(delay);
|
||||
}
|
||||
}
|
||||
throw lastError;
|
||||
}
|
||||
export interface Tempfile {
|
||||
tmpfile: string;
|
||||
headers: HttpClientResponse['res']['headers'];
|
||||
timing: HttpClientResponse['res']['timing'];
|
||||
}
|
||||
async function _downloadToTempfile(httpclient: EggContextHttpClient,
|
||||
dataDir: string, url: string, optionalConfig?: DownloadToTempfileOptionalConfig): Promise<Tempfile> {
|
||||
async function _downloadToTempfile(
|
||||
httpclient: EggContextHttpClient,
|
||||
dataDir: string,
|
||||
url: string,
|
||||
optionalConfig?: DownloadToTempfileOptionalConfig
|
||||
): Promise<Tempfile> {
|
||||
const tmpfile = await createTempfile(dataDir, url);
|
||||
const writeStream = createWriteStream(tmpfile);
|
||||
try {
|
||||
// max 10 mins to download
|
||||
// FIXME: should show download progress
|
||||
const authorization = optionalConfig?.remoteAuthToken ? `Bearer ${optionalConfig?.remoteAuthToken}` : '';
|
||||
const { status, headers, res } = await httpclient.request(url, {
|
||||
timeout: 60000 * 10,
|
||||
headers: { authorization },
|
||||
const requestHeaders: Record<string, string> = {};
|
||||
if (optionalConfig?.remoteAuthToken) {
|
||||
requestHeaders.authorization = `Bearer ${optionalConfig.remoteAuthToken}`;
|
||||
}
|
||||
const { status, headers, res } = (await httpclient.request(url, {
|
||||
timeout: 60_000 * 10,
|
||||
headers: requestHeaders,
|
||||
writeStream,
|
||||
timing: true,
|
||||
followRedirect: true,
|
||||
}) as HttpClientResponse;
|
||||
if (status === 404 || (optionalConfig?.ignoreDownloadStatuses && optionalConfig.ignoreDownloadStatuses.includes(status))) {
|
||||
})) as HttpClientResponse;
|
||||
if (
|
||||
status === 404 ||
|
||||
(optionalConfig?.ignoreDownloadStatuses &&
|
||||
optionalConfig.ignoreDownloadStatuses.includes(status))
|
||||
) {
|
||||
const err = new Error(`Not found, status(${status})`);
|
||||
err.name = 'DownloadNotFoundError';
|
||||
throw err;
|
||||
@@ -93,6 +56,71 @@ async function _downloadToTempfile(httpclient: EggContextHttpClient,
|
||||
}
|
||||
}
|
||||
|
||||
export interface DownloadToTempfileOptionalConfig {
|
||||
retries?: number;
|
||||
ignoreDownloadStatuses?: number[];
|
||||
remoteAuthToken?: string;
|
||||
}
|
||||
|
||||
export async function createTempDir(dataDir: string, dirname?: string) {
|
||||
// will auto clean on CleanTempDir Schedule
|
||||
let tmpdir = path.join(dataDir, 'downloads', dayjs().format('YYYY/MM/DD'));
|
||||
if (dirname) {
|
||||
tmpdir = path.join(tmpdir, dirname);
|
||||
}
|
||||
await mkdir(tmpdir, { recursive: true });
|
||||
return tmpdir;
|
||||
}
|
||||
|
||||
export async function createTempfile(dataDir: string, filename: string) {
|
||||
const tmpdir = await createTempDir(dataDir);
|
||||
// The filename is a URL (from dist.tarball), which needs to be truncated, (`getconf NAME_MAX /` # max filename length: 255 bytes)
|
||||
// https://github.com/cnpm/cnpmjs.org/pull/1345
|
||||
const tmpfile = path.join(
|
||||
tmpdir,
|
||||
// oxlint-disable-next-line typescript-eslint/no-non-null-assertion
|
||||
`${randomBytes(10).toString('hex')}-${path.basename(url.parse(filename).pathname!)}`
|
||||
);
|
||||
return tmpfile;
|
||||
}
|
||||
|
||||
export async function downloadToTempfile(
|
||||
httpclient: EggContextHttpClient,
|
||||
dataDir: string,
|
||||
url: string,
|
||||
optionalConfig?: DownloadToTempfileOptionalConfig
|
||||
) {
|
||||
let retries = optionalConfig?.retries || 3;
|
||||
let lastError: Error | undefined;
|
||||
while (retries > 0) {
|
||||
try {
|
||||
return await _downloadToTempfile(
|
||||
httpclient,
|
||||
dataDir,
|
||||
url,
|
||||
optionalConfig
|
||||
);
|
||||
} catch (err) {
|
||||
if (err.name === 'DownloadNotFoundError') throw err;
|
||||
lastError = err;
|
||||
}
|
||||
retries--;
|
||||
if (retries > 0) {
|
||||
// sleep 1s ~ 4s in random
|
||||
const delay =
|
||||
process.env.NODE_ENV === 'test' ? 1 : 1000 + Math.random() * 4000;
|
||||
await setTimeout(delay);
|
||||
}
|
||||
}
|
||||
// oxlint-disable-next-line no-throw-literal
|
||||
throw lastError;
|
||||
}
|
||||
export interface Tempfile {
|
||||
tmpfile: string;
|
||||
headers: HttpClientResponse['res']['headers'];
|
||||
timing: HttpClientResponse['res']['timing'];
|
||||
}
|
||||
|
||||
const DEFAULT_CONTENT_TYPE = 'application/octet-stream';
|
||||
const PLAIN_TEXT = 'text/plain';
|
||||
const WHITE_FILENAME_CONTENT_TYPES = {
|
||||
@@ -107,11 +135,27 @@ const WHITE_FILENAME_CONTENT_TYPES = {
|
||||
'.eslintrc': 'application/json',
|
||||
} as const;
|
||||
|
||||
const CONTENT_TYPE_BLACKLIST = new Set(['application/xml', 'text/html']);
|
||||
|
||||
export function ensureContentType(contentType: string) {
|
||||
if (CONTENT_TYPE_BLACKLIST.has(contentType)) {
|
||||
return 'text/plain';
|
||||
}
|
||||
return contentType;
|
||||
}
|
||||
|
||||
export function mimeLookup(filepath: string) {
|
||||
const filename = path.basename(filepath).toLowerCase();
|
||||
if (filename.endsWith('.ts')) return PLAIN_TEXT;
|
||||
if (filename.endsWith('.lock')) return PLAIN_TEXT;
|
||||
return mime.lookup(filename) ||
|
||||
WHITE_FILENAME_CONTENT_TYPES[filename as keyof typeof WHITE_FILENAME_CONTENT_TYPES] ||
|
||||
const defaultContentType = mime.lookup(filename);
|
||||
// https://github.com/cnpm/cnpmcore/issues/693#issuecomment-2955268229
|
||||
const contentType =
|
||||
defaultContentType ||
|
||||
WHITE_FILENAME_CONTENT_TYPES[
|
||||
filename as keyof typeof WHITE_FILENAME_CONTENT_TYPES
|
||||
] ||
|
||||
DEFAULT_CONTENT_TYPE;
|
||||
|
||||
return ensureContentType(contentType);
|
||||
}
|
||||
|
||||
@@ -1,9 +1,15 @@
|
||||
import { createReadStream } from 'node:fs';
|
||||
import { Readable } from 'node:stream';
|
||||
import { pipeline } from 'node:stream/promises';
|
||||
import * as ssri from 'ssri';
|
||||
import tar from 'tar';
|
||||
import { PackageJSONType } from '../repository/PackageRepository';
|
||||
|
||||
import { fromData, fromStream, type HashLike } from 'ssri';
|
||||
// @ts-expect-error no types available
|
||||
import tar from '@fengmk2/tar';
|
||||
|
||||
import type {
|
||||
AuthorType,
|
||||
PackageJSONType,
|
||||
} from '../repository/PackageRepository.ts';
|
||||
|
||||
// /@cnpm%2ffoo
|
||||
// /@cnpm%2Ffoo
|
||||
@@ -11,13 +17,14 @@ import { PackageJSONType } from '../repository/PackageRepository';
|
||||
// /foo
|
||||
// name max length is 214 chars
|
||||
// https://www.npmjs.com/package/path-to-regexp#custom-matching-parameters
|
||||
export const FULLNAME_REG_STRING = '@[^/]{1,220}\/[^/]{1,220}|@[^%]+\%2[fF][^/]{1,220}|[^@/]{1,220}';
|
||||
export const FULLNAME_REG_STRING =
|
||||
'@[^/]{1,220}/[^/]{1,220}|@[^%]+%2[fF][^/]{1,220}|[^@/]{1,220}';
|
||||
|
||||
export function getScopeAndName(fullname: string): string[] {
|
||||
if (fullname.startsWith('@')) {
|
||||
return fullname.split('/', 2);
|
||||
}
|
||||
return [ '', fullname ];
|
||||
return ['', fullname];
|
||||
}
|
||||
|
||||
export function getFullname(scope: string, name: string): string {
|
||||
@@ -32,15 +39,22 @@ export function getPrefixedName(prefix: string, username: string): string {
|
||||
return prefix ? `${prefix}${username}` : username;
|
||||
}
|
||||
|
||||
export async function calculateIntegrity(contentOrFile: Uint8Array | string) {
|
||||
let integrityObj;
|
||||
export interface Integrity {
|
||||
integrity: string;
|
||||
shasum: string;
|
||||
}
|
||||
|
||||
export async function calculateIntegrity(
|
||||
contentOrFile: Uint8Array | string
|
||||
): Promise<Integrity> {
|
||||
let integrityObj: HashLike;
|
||||
if (typeof contentOrFile === 'string') {
|
||||
integrityObj = await ssri.fromStream(createReadStream(contentOrFile), {
|
||||
algorithms: [ 'sha512', 'sha1' ],
|
||||
integrityObj = await fromStream(createReadStream(contentOrFile), {
|
||||
algorithms: ['sha512', 'sha1'],
|
||||
});
|
||||
} else {
|
||||
integrityObj = ssri.fromData(contentOrFile, {
|
||||
algorithms: [ 'sha512', 'sha1' ],
|
||||
integrityObj = fromData(contentOrFile, {
|
||||
algorithms: ['sha512', 'sha1'],
|
||||
});
|
||||
}
|
||||
const integrity = integrityObj.sha512[0].toString() as string;
|
||||
@@ -48,26 +62,33 @@ export async function calculateIntegrity(contentOrFile: Uint8Array | string) {
|
||||
return { integrity, shasum };
|
||||
}
|
||||
|
||||
export function formatTarball(registry: string, scope: string, name: string, version: string) {
|
||||
export function formatTarball(
|
||||
registry: string,
|
||||
scope: string,
|
||||
name: string,
|
||||
version: string
|
||||
) {
|
||||
const fullname = getFullname(scope, name);
|
||||
return `${registry}/${fullname}/-/${name}-${version}.tgz`;
|
||||
}
|
||||
|
||||
export function detectInstallScript(manifest: any) {
|
||||
export function detectInstallScript(manifest: {
|
||||
scripts?: Record<string, string>;
|
||||
}) {
|
||||
// https://github.com/npm/registry/blob/master/docs/responses/package-metadata.md#abbreviated-version-object
|
||||
let hasInstallScript = false;
|
||||
const scripts = manifest.scripts;
|
||||
if (scripts) {
|
||||
// https://www.npmjs.com/package/fix-has-install-script
|
||||
if (scripts.install || scripts.preinstall || scripts.postinstall) {
|
||||
hasInstallScript = true;
|
||||
}
|
||||
// https://www.npmjs.com/package/fix-has-install-script
|
||||
if (scripts?.install || scripts?.preinstall || scripts?.postinstall) {
|
||||
hasInstallScript = true;
|
||||
}
|
||||
return hasInstallScript;
|
||||
}
|
||||
|
||||
/** 判断一个版本压缩包中是否包含 npm-shrinkwrap.json */
|
||||
export async function hasShrinkWrapInTgz(contentOrFile: Uint8Array | string): Promise<boolean> {
|
||||
export async function hasShrinkWrapInTgz(
|
||||
contentOrFile: Uint8Array | string
|
||||
): Promise<boolean> {
|
||||
let readable: Readable;
|
||||
if (typeof contentOrFile === 'string') {
|
||||
readable = createReadStream(contentOrFile);
|
||||
@@ -85,7 +106,8 @@ export async function hasShrinkWrapInTgz(contentOrFile: Uint8Array | string): Pr
|
||||
const parser = tar.t({
|
||||
// options.strict 默认为 false,会忽略 Recoverable errors,例如 tar 解析失败
|
||||
// 详见 https://github.com/isaacs/node-tar#warnings-and-errors
|
||||
onentry(entry) {
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
onentry(entry: any) {
|
||||
if (entry.path === 'package/npm-shrinkwrap.json') {
|
||||
hasShrinkWrap = true;
|
||||
abortController.abort();
|
||||
@@ -100,16 +122,37 @@ export async function hasShrinkWrapInTgz(contentOrFile: Uint8Array | string): Pr
|
||||
if (e.code === 'ABORT_ERR') {
|
||||
return hasShrinkWrap;
|
||||
}
|
||||
throw Object.assign(new Error('[hasShrinkWrapInTgz] Fail to parse input file'), { cause: e });
|
||||
throw Object.assign(
|
||||
new Error('[hasShrinkWrapInTgz] Fail to parse input file'),
|
||||
{ cause: e }
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export async function extractPackageJSON(tarballBytes: Buffer): Promise<PackageJSONType> {
|
||||
/** 写入 ES 时,格式化 author */
|
||||
export function formatAuthor(
|
||||
author: string | AuthorType | undefined
|
||||
): AuthorType | undefined {
|
||||
if (author === undefined) {
|
||||
return author;
|
||||
}
|
||||
|
||||
if (typeof author === 'string') {
|
||||
return { name: author };
|
||||
}
|
||||
|
||||
return author;
|
||||
}
|
||||
|
||||
export async function extractPackageJSON(
|
||||
tarballBytes: Buffer
|
||||
): Promise<PackageJSONType> {
|
||||
// oxlint-disable-next-line promise/avoid-new
|
||||
return new Promise((resolve, reject) => {
|
||||
Readable.from(tarballBytes)
|
||||
.pipe(tar.t({
|
||||
filter: name => name === 'package/package.json',
|
||||
onentry: async entry => {
|
||||
Readable.from(tarballBytes).pipe(
|
||||
tar.t({
|
||||
filter: (name: string) => name === 'package/package.json',
|
||||
onentry: async (entry: Readable) => {
|
||||
const chunks: Buffer[] = [];
|
||||
for await (const chunk of entry) {
|
||||
chunks.push(chunk);
|
||||
@@ -117,10 +160,11 @@ export async function extractPackageJSON(tarballBytes: Buffer): Promise<PackageJ
|
||||
try {
|
||||
const data = Buffer.concat(chunks);
|
||||
return resolve(JSON.parse(data.toString()));
|
||||
} catch (err) {
|
||||
} catch {
|
||||
reject(new Error('Error parsing package.json'));
|
||||
}
|
||||
},
|
||||
}));
|
||||
})
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { EggContext } from '@eggjs/tegg';
|
||||
import type { Context } from 'egg';
|
||||
|
||||
export function isSyncWorkerRequest(ctx: EggContext) {
|
||||
export function isSyncWorkerRequest(ctx: Context) {
|
||||
// sync request will contain this query params
|
||||
let isSyncWorkerRequest = ctx.query.cache === '0';
|
||||
if (!isSyncWorkerRequest) {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import crypto from 'crypto';
|
||||
import crypto from 'node:crypto';
|
||||
import base from 'base-x';
|
||||
import { crc32 } from '@node-rs/crc32';
|
||||
import * as ssri from 'ssri';
|
||||
import { checkData, create } from 'ssri';
|
||||
import UAParser from 'ua-parser-js';
|
||||
|
||||
const base62 = base('0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ');
|
||||
@@ -29,12 +29,12 @@ export function checkToken(token: string, prefix: string): boolean {
|
||||
}
|
||||
|
||||
export function integrity(plain: string): string {
|
||||
return ssri.create().update(plain).digest()
|
||||
return create().update(plain).digest()
|
||||
.toString();
|
||||
}
|
||||
|
||||
export function checkIntegrity(plain: string, expectedIntegrity: string): boolean {
|
||||
return !!ssri.checkData(plain, expectedIntegrity);
|
||||
return !!checkData(plain, expectedIntegrity);
|
||||
}
|
||||
|
||||
export function sha512(plain: string): string {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { AccessLevel, SingletonProto } from '@eggjs/tegg';
|
||||
import { BugVersion } from '../../core/entity/BugVersion';
|
||||
import { AccessLevel, SingletonProto } from 'egg';
|
||||
|
||||
import type { BugVersion } from '../../core/entity/BugVersion.ts';
|
||||
|
||||
@SingletonProto({
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
import {
|
||||
SingletonProto,
|
||||
AccessLevel,
|
||||
Inject,
|
||||
} from '@eggjs/tegg';
|
||||
// FIXME: egg-redis should use ioredis v5
|
||||
// https://github.com/eggjs/egg-redis/issues/35
|
||||
import { AccessLevel, Inject, SingletonProto } from 'egg';
|
||||
// FIXME: @eggjs/redis should use ioredis v5
|
||||
// https://github.com/eggjs/redis/issues/35
|
||||
import type { Redis } from 'ioredis';
|
||||
|
||||
const ONE_DAY = 3600 * 24;
|
||||
@@ -40,7 +36,7 @@ export class CacheAdapter {
|
||||
const lockName = this.getLockName(key);
|
||||
const existsTimestamp = await this.redis.get(lockName);
|
||||
if (existsTimestamp) {
|
||||
if (Date.now() - parseInt(existsTimestamp) < seconds * 1000) {
|
||||
if (Date.now() - Number.parseInt(existsTimestamp) < seconds * 1000) {
|
||||
return null;
|
||||
}
|
||||
// lock timeout, delete it
|
||||
|
||||
@@ -1,14 +1,11 @@
|
||||
import { Readable } from 'stream';
|
||||
import {
|
||||
SingletonProto,
|
||||
AccessLevel,
|
||||
Inject,
|
||||
} from '@eggjs/tegg';
|
||||
import { Pointcut } from '@eggjs/tegg/aop';
|
||||
import { EggLogger } from 'egg';
|
||||
import { AsyncTimer } from '../aop/AsyncTimer';
|
||||
import { NFSClient } from '../typing';
|
||||
import { IncomingHttpHeaders } from 'http';
|
||||
import type { Readable } from 'node:stream';
|
||||
import type { IncomingHttpHeaders } from 'node:http';
|
||||
|
||||
import { AccessLevel, Inject, SingletonProto, Logger } from 'egg';
|
||||
import { Pointcut } from 'egg/aop';
|
||||
|
||||
import { AsyncTimer } from '../aop/AsyncTimer.ts';
|
||||
import type { NFSClient } from '../typing.ts';
|
||||
|
||||
const INSTANCE_NAME = 'nfsAdapter';
|
||||
|
||||
@@ -21,17 +18,27 @@ export class NFSAdapter {
|
||||
private readonly nfsClient: NFSClient;
|
||||
|
||||
@Inject()
|
||||
private readonly logger: EggLogger;
|
||||
private readonly logger: Logger;
|
||||
|
||||
@Pointcut(AsyncTimer)
|
||||
async uploadBytes(storeKey: string, bytes: Uint8Array) {
|
||||
this.logger.info('[%s:uploadBytes] key: %s, bytes: %d', INSTANCE_NAME, storeKey, bytes.length);
|
||||
this.logger.info(
|
||||
'[%s:uploadBytes] key: %s, bytes: %d',
|
||||
INSTANCE_NAME,
|
||||
storeKey,
|
||||
bytes.length
|
||||
);
|
||||
await this.nfsClient.uploadBytes(bytes, { key: storeKey });
|
||||
}
|
||||
|
||||
// will return next store position
|
||||
@Pointcut(AsyncTimer)
|
||||
async appendBytes(storeKey: string, bytes: Uint8Array, position?: string, headers?: IncomingHttpHeaders) {
|
||||
async appendBytes(
|
||||
storeKey: string,
|
||||
bytes: Uint8Array,
|
||||
position?: string,
|
||||
headers?: IncomingHttpHeaders
|
||||
) {
|
||||
// make sure position is undefined by the first time
|
||||
if (!position) position = undefined;
|
||||
const options = {
|
||||
@@ -45,14 +52,24 @@ export class NFSAdapter {
|
||||
|
||||
@Pointcut(AsyncTimer)
|
||||
async uploadFile(storeKey: string, file: string) {
|
||||
this.logger.info('[%s:uploadFile] key: %s, file: %s', INSTANCE_NAME, storeKey, file);
|
||||
this.logger.info(
|
||||
'[%s:uploadFile] key: %s, file: %s',
|
||||
INSTANCE_NAME,
|
||||
storeKey,
|
||||
file
|
||||
);
|
||||
await this.nfsClient.upload(file, { key: storeKey });
|
||||
}
|
||||
|
||||
@Pointcut(AsyncTimer)
|
||||
async downloadFile(storeKey: string, file: string, timeout: number) {
|
||||
this.logger.info('[%s:downloadFile] key: %s, file: %s, timeout: %s',
|
||||
INSTANCE_NAME, storeKey, file, timeout);
|
||||
this.logger.info(
|
||||
'[%s:downloadFile] key: %s, file: %s, timeout: %s',
|
||||
INSTANCE_NAME,
|
||||
storeKey,
|
||||
file,
|
||||
timeout
|
||||
);
|
||||
await this.nfsClient.download(storeKey, file, { timeout });
|
||||
}
|
||||
|
||||
@@ -79,7 +96,9 @@ export class NFSAdapter {
|
||||
}
|
||||
}
|
||||
|
||||
async getDownloadUrlOrStream(storeKey: string): Promise<string | Readable | undefined> {
|
||||
async getDownloadUrlOrStream(
|
||||
storeKey: string
|
||||
): Promise<string | Readable | undefined> {
|
||||
const downloadUrl = await this.getDownloadUrl(storeKey);
|
||||
if (downloadUrl) {
|
||||
return downloadUrl;
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
import { setTimeout } from 'timers/promises';
|
||||
import { setTimeout } from 'node:timers/promises';
|
||||
|
||||
import {
|
||||
ContextProto,
|
||||
AccessLevel,
|
||||
Inject,
|
||||
} from '@eggjs/tegg';
|
||||
import {
|
||||
EggLogger,
|
||||
EggContextHttpClient,
|
||||
AccessLevel, Inject,
|
||||
EggAppConfig,
|
||||
HttpClient,
|
||||
Logger,
|
||||
HttpClientRequestOptions,
|
||||
HttpClientResponse,
|
||||
} from 'egg';
|
||||
import { PackageManifestType } from '../../repository/PackageRepository';
|
||||
|
||||
import type { PackageManifestType } from '../../repository/PackageRepository.ts';
|
||||
import { isTimeoutError } from '../ErrorUtil.ts';
|
||||
|
||||
type HttpMethod = HttpClientRequestOptions['method'];
|
||||
|
||||
@@ -25,12 +25,12 @@ export type RegistryResponse = { method: HttpMethod } & HttpClientResponse;
|
||||
})
|
||||
export class NPMRegistry {
|
||||
@Inject()
|
||||
private readonly logger: EggLogger;
|
||||
private readonly logger: Logger;
|
||||
@Inject()
|
||||
private readonly httpclient: EggContextHttpClient;
|
||||
private readonly httpClient: HttpClient;
|
||||
@Inject()
|
||||
private config: EggAppConfig;
|
||||
private timeout = 10000;
|
||||
private timeout = 10_000;
|
||||
public registryHost: string;
|
||||
|
||||
get registry(): string {
|
||||
@@ -41,35 +41,52 @@ export class NPMRegistry {
|
||||
this.registryHost = registryHost;
|
||||
}
|
||||
|
||||
public async getFullManifests(fullname: string, optionalConfig?: { retries?: number, remoteAuthToken?: string }): Promise<{ method: HttpMethod } & HttpClientResponse<PackageManifestType>> {
|
||||
public async getFullManifests(
|
||||
fullname: string,
|
||||
optionalConfig?: { retries?: number; remoteAuthToken?: string }
|
||||
): Promise<{ method: HttpMethod } & HttpClientResponse<PackageManifestType>> {
|
||||
let retries = optionalConfig?.retries || 3;
|
||||
// set query t=timestamp, make sure CDN cache disable
|
||||
// cache=0 is sync worker request flag
|
||||
const url = `${this.registry}/${encodeURIComponent(fullname)}?t=${Date.now()}&cache=0`;
|
||||
let lastError: any;
|
||||
let lastError: Error | undefined;
|
||||
while (retries > 0) {
|
||||
try {
|
||||
// large package: https://r.cnpmjs.org/%40procore%2Fcore-icons
|
||||
// https://r.cnpmjs.org/intraactive-sdk-ui 44s
|
||||
const authorization = this.genAuthorizationHeader(optionalConfig?.remoteAuthToken);
|
||||
return await this.request('GET', url, undefined, { timeout: 120000, headers: { authorization } });
|
||||
} catch (err: any) {
|
||||
if (err.name === 'ResponseTimeoutError') throw err;
|
||||
const authorization = this.genAuthorizationHeader(
|
||||
optionalConfig?.remoteAuthToken
|
||||
);
|
||||
return await this.request('GET', url, undefined, {
|
||||
timeout: 120_000,
|
||||
headers: { authorization },
|
||||
});
|
||||
} catch (err) {
|
||||
if (isTimeoutError(err)) {
|
||||
throw err;
|
||||
}
|
||||
lastError = err;
|
||||
}
|
||||
retries--;
|
||||
if (retries > 0) {
|
||||
// sleep 1s ~ 4s in random
|
||||
const delay = process.env.NODE_ENV === 'test' ? 1 : 1000 + Math.random() * 4000;
|
||||
const delay =
|
||||
process.env.NODE_ENV === 'test' ? 1 : 1000 + Math.random() * 4000;
|
||||
await setTimeout(delay);
|
||||
}
|
||||
}
|
||||
// oxlint-disable-next-line no-throw-literal
|
||||
throw lastError;
|
||||
}
|
||||
|
||||
// app.put('/:name/sync', sync.sync);
|
||||
public async createSyncTask(fullname: string, optionalConfig?: { remoteAuthToken?:string}): Promise<RegistryResponse> {
|
||||
const authorization = this.genAuthorizationHeader(optionalConfig?.remoteAuthToken);
|
||||
public async createSyncTask(
|
||||
fullname: string,
|
||||
optionalConfig?: { remoteAuthToken?: string }
|
||||
): Promise<RegistryResponse> {
|
||||
const authorization = this.genAuthorizationHeader(
|
||||
optionalConfig?.remoteAuthToken
|
||||
);
|
||||
const url = `${this.registry}/${encodeURIComponent(fullname)}/sync?sync_upstream=true&nodeps=true`;
|
||||
// {
|
||||
// ok: true,
|
||||
@@ -79,38 +96,64 @@ export class NPMRegistry {
|
||||
}
|
||||
|
||||
// app.get('/:name/sync/log/:id', sync.getSyncLog);
|
||||
public async getSyncTask(fullname: string, id: string, offset: number, optionalConfig?:{ remoteAuthToken?:string }): Promise<RegistryResponse> {
|
||||
const authorization = this.genAuthorizationHeader(optionalConfig?.remoteAuthToken);
|
||||
public async getSyncTask(
|
||||
fullname: string,
|
||||
id: string,
|
||||
offset: number,
|
||||
optionalConfig?: { remoteAuthToken?: string }
|
||||
): Promise<RegistryResponse> {
|
||||
const authorization = this.genAuthorizationHeader(
|
||||
optionalConfig?.remoteAuthToken
|
||||
);
|
||||
const url = `${this.registry}/${encodeURIComponent(fullname)}/sync/log/${id}?offset=${offset}`;
|
||||
// { ok: true, syncDone: syncDone, log: log }
|
||||
return await this.request('GET', url, undefined, { authorization });
|
||||
}
|
||||
|
||||
public async getDownloadRanges(registry: string, fullname: string, start: string, end: string, optionalConfig?:{ remoteAuthToken?:string }): Promise<RegistryResponse> {
|
||||
const authorization = this.genAuthorizationHeader(optionalConfig?.remoteAuthToken);
|
||||
public async getDownloadRanges(
|
||||
registry: string,
|
||||
fullname: string,
|
||||
start: string,
|
||||
end: string,
|
||||
optionalConfig?: { remoteAuthToken?: string }
|
||||
): Promise<RegistryResponse> {
|
||||
const authorization = this.genAuthorizationHeader(
|
||||
optionalConfig?.remoteAuthToken
|
||||
);
|
||||
const url = `${registry}/downloads/range/${start}:${end}/${encodeURIComponent(fullname)}`;
|
||||
return await this.request('GET', url, undefined, { authorization });
|
||||
}
|
||||
|
||||
private async request(method: HttpMethod, url: string, params?: object, options?: object): Promise<RegistryResponse> {
|
||||
const res = await this.httpclient.request(url, {
|
||||
private async request(
|
||||
method: HttpMethod,
|
||||
url: string,
|
||||
params?: object,
|
||||
options?: object
|
||||
): Promise<RegistryResponse> {
|
||||
const res = (await this.httpClient.request(url, {
|
||||
method,
|
||||
data: params,
|
||||
dataType: 'json',
|
||||
timing: true,
|
||||
retry: 3,
|
||||
timeout: this.timeout,
|
||||
followRedirect: true,
|
||||
gzip: true,
|
||||
...options,
|
||||
}) as HttpClientResponse;
|
||||
this.logger.info('[NPMRegistry:request] %s %s, status: %s', method, url, res.status);
|
||||
})) as HttpClientResponse;
|
||||
this.logger.info(
|
||||
'[NPMRegistry:request] %s %s, status: %s',
|
||||
method,
|
||||
url,
|
||||
res.status
|
||||
);
|
||||
return {
|
||||
method,
|
||||
...res,
|
||||
};
|
||||
}
|
||||
|
||||
private genAuthorizationHeader(remoteAuthToken?:string) {
|
||||
public genAuthorizationHeader(remoteAuthToken?: string) {
|
||||
return remoteAuthToken ? `Bearer ${remoteAuthToken}` : '';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,72 +1,108 @@
|
||||
import { ImplDecorator, Inject, QualifierImplDecoratorUtil } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import { EggHttpClient, EggLogger } from 'egg';
|
||||
import { BinaryName, BinaryTaskConfig } from '../../../../config/binaries';
|
||||
import {
|
||||
Inject,
|
||||
QualifierImplDecoratorUtil,
|
||||
type ImplDecorator,
|
||||
HttpClient,
|
||||
Logger,
|
||||
} from 'egg';
|
||||
|
||||
export type BinaryItem = {
|
||||
import type { BinaryType } from '../../enum/Binary.ts';
|
||||
import type {
|
||||
BinaryName,
|
||||
BinaryTaskConfig,
|
||||
} from '../../../../config/binaries.ts';
|
||||
|
||||
const platforms = ['darwin', 'linux', 'win32'] as const;
|
||||
export interface BinaryItem {
|
||||
name: string;
|
||||
isDir: boolean;
|
||||
url: string;
|
||||
size: string | number;
|
||||
date: string;
|
||||
ignoreDownloadStatuses?: number[];
|
||||
};
|
||||
}
|
||||
|
||||
export type FetchResult = {
|
||||
export interface FetchResult {
|
||||
items: BinaryItem[];
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
nextParams?: any;
|
||||
};
|
||||
|
||||
const platforms = [ 'darwin', 'linux', 'win32' ] as const;
|
||||
}
|
||||
|
||||
export const BINARY_ADAPTER_ATTRIBUTE = Symbol('BINARY_ADAPTER_ATTRIBUTE');
|
||||
|
||||
export abstract class AbstractBinary {
|
||||
@Inject()
|
||||
protected logger: EggLogger;
|
||||
protected logger: Logger;
|
||||
|
||||
@Inject()
|
||||
protected httpclient: EggHttpClient;
|
||||
protected httpclient: HttpClient;
|
||||
|
||||
abstract initFetch(binaryName: BinaryName): Promise<void>;
|
||||
abstract fetch(dir: string, binaryName: BinaryName): Promise<FetchResult | undefined>;
|
||||
abstract fetch(
|
||||
dir: string,
|
||||
binaryName: BinaryName,
|
||||
lastData?: Record<string, unknown>
|
||||
): Promise<FetchResult | undefined>;
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
async finishFetch(_success: boolean, _binaryName: BinaryName): Promise<void> {
|
||||
// do not thing by default
|
||||
}
|
||||
|
||||
protected async requestXml(url: string) {
|
||||
const { status, data, headers } = await this.httpclient.request(url, {
|
||||
timeout: 30000,
|
||||
timeout: 30_000,
|
||||
followRedirect: true,
|
||||
gzip: true,
|
||||
});
|
||||
const xml = data.toString() as string;
|
||||
if (status !== 200) {
|
||||
this.logger.warn('[AbstractBinary.requestXml:non-200-status] url: %s, status: %s, headers: %j, xml: %j', url, status, headers, xml);
|
||||
this.logger.warn(
|
||||
'[AbstractBinary.requestXml:non-200-status] url: %s, status: %s, headers: %j, xml: %j',
|
||||
url,
|
||||
status,
|
||||
headers,
|
||||
xml
|
||||
);
|
||||
return '';
|
||||
}
|
||||
return xml;
|
||||
}
|
||||
|
||||
protected async requestJSON(url: string) {
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
protected async requestJSON<T = any>(
|
||||
url: string,
|
||||
requestHeaders?: Record<string, string>
|
||||
): Promise<T> {
|
||||
const { status, data, headers } = await this.httpclient.request(url, {
|
||||
timeout: 30000,
|
||||
timeout: 30_000,
|
||||
dataType: 'json',
|
||||
followRedirect: true,
|
||||
gzip: true,
|
||||
headers: requestHeaders,
|
||||
});
|
||||
if (status !== 200) {
|
||||
this.logger.warn('[AbstractBinary.requestJSON:non-200-status] url: %s, status: %s, headers: %j', url, status, headers);
|
||||
return data;
|
||||
this.logger.warn(
|
||||
'[AbstractBinary.requestJSON:non-200-status] url: %s, status: %s, headers: %j',
|
||||
url,
|
||||
status,
|
||||
headers
|
||||
);
|
||||
return data as T;
|
||||
}
|
||||
return data;
|
||||
return data as T;
|
||||
}
|
||||
|
||||
// https://nodejs.org/api/n-api.html#n_api_node_api_version_matrix
|
||||
protected async listNodeABIVersions() {
|
||||
const nodeABIVersions: number[] = [];
|
||||
const versions = await this.requestJSON('https://nodejs.org/dist/index.json');
|
||||
const versions = await this.requestJSON(
|
||||
'https://nodejs.org/dist/index.json'
|
||||
);
|
||||
for (const version of versions) {
|
||||
if (!version.modules) continue;
|
||||
const modulesVersion = parseInt(version.modules);
|
||||
// node v6.0.0 moduels 48 min
|
||||
const modulesVersion = Number.parseInt(version.modules);
|
||||
// node v6.0.0 modules 48 min
|
||||
if (modulesVersion >= 48 && !nodeABIVersions.includes(modulesVersion)) {
|
||||
nodeABIVersions.push(modulesVersion);
|
||||
}
|
||||
@@ -83,21 +119,24 @@ export abstract class AbstractBinary {
|
||||
if (binaryConfig?.options?.nodeArchs) return binaryConfig.options.nodeArchs;
|
||||
// https://nodejs.org/api/os.html#osarch
|
||||
return {
|
||||
linux: [ 'arm', 'arm64', 's390x', 'ia32', 'x64' ],
|
||||
darwin: [ 'arm64', 'ia32', 'x64' ],
|
||||
win32: [ 'ia32', 'x64' ],
|
||||
linux: ['arm', 'arm64', 's390x', 'ia32', 'x64'],
|
||||
darwin: ['arm64', 'ia32', 'x64'],
|
||||
win32: ['ia32', 'x64'],
|
||||
};
|
||||
}
|
||||
|
||||
protected listNodeLibcs(): Record<typeof platforms[number], string[]> {
|
||||
protected listNodeLibcs(): Record<(typeof platforms)[number], string[]> {
|
||||
// https://github.com/lovell/detect-libc/blob/master/lib/detect-libc.js#L42
|
||||
return {
|
||||
darwin: [ 'unknown' ],
|
||||
linux: [ 'glibc', 'musl' ],
|
||||
win32: [ 'unknown' ],
|
||||
darwin: ['unknown'],
|
||||
linux: ['glibc', 'musl'],
|
||||
win32: ['unknown'],
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export const BinaryAdapter: ImplDecorator<AbstractBinary, typeof BinaryType> =
|
||||
QualifierImplDecoratorUtil.generatorDecorator(AbstractBinary, BINARY_ADAPTER_ATTRIBUTE);
|
||||
QualifierImplDecoratorUtil.generatorDecorator(
|
||||
AbstractBinary,
|
||||
BINARY_ADAPTER_ATTRIBUTE
|
||||
);
|
||||
|
||||
@@ -1,7 +1,12 @@
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
import { Inject, SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import { EggAppConfig } from 'egg';
|
||||
import { Inject, SingletonProto, EggAppConfig } from 'egg';
|
||||
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import {
|
||||
AbstractBinary,
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Api)
|
||||
@@ -14,12 +19,25 @@ export class ApiBinary extends AbstractBinary {
|
||||
return;
|
||||
}
|
||||
|
||||
async fetch(dir: string, binaryName: string): Promise<FetchResult | undefined> {
|
||||
const apiUrl = this.config.cnpmcore.syncBinaryFromAPISource || `${this.config.cnpmcore.sourceRegistry}/-/binary`;
|
||||
const url = `${apiUrl}/${binaryName}${dir}`;
|
||||
async fetch(
|
||||
dir: string,
|
||||
binaryName: string,
|
||||
lastData?: Record<string, unknown>
|
||||
): Promise<FetchResult | undefined> {
|
||||
const apiUrl =
|
||||
this.config.cnpmcore.syncBinaryFromAPISource ||
|
||||
`${this.config.cnpmcore.sourceRegistry}/-/binary`;
|
||||
let url = `${apiUrl}/${binaryName}${dir}`;
|
||||
if (lastData && lastData.lastSyncTime) {
|
||||
url += `?since=${lastData.lastSyncTime}&limit=100`;
|
||||
}
|
||||
|
||||
const data = await this.requestJSON(url);
|
||||
if (!Array.isArray(data)) {
|
||||
this.logger.warn('[ApiBinary.fetch:response-data-not-array] data: %j', data);
|
||||
this.logger.warn(
|
||||
'[ApiBinary.fetch:response-data-not-array] data: %j',
|
||||
data
|
||||
);
|
||||
return;
|
||||
}
|
||||
const items: BinaryItem[] = [];
|
||||
@@ -28,6 +46,7 @@ export class ApiBinary extends AbstractBinary {
|
||||
name: item.name,
|
||||
isDir: item.type === 'dir',
|
||||
url: item.url,
|
||||
// oxlint-disable-next-line unicorn/explicit-length-check
|
||||
size: item.size || '-',
|
||||
date: item.date,
|
||||
});
|
||||
|
||||
@@ -1,8 +1,18 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import binaries, { BinaryName, BinaryTaskConfig } from '../../../../config/binaries';
|
||||
import path from 'path';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
import path from 'node:path';
|
||||
|
||||
import { SingletonProto } from 'egg';
|
||||
|
||||
import binaries, {
|
||||
type BinaryName,
|
||||
type BinaryTaskConfig,
|
||||
} from '../../../../config/binaries.ts';
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import {
|
||||
AbstractBinary,
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Bucket)
|
||||
@@ -12,22 +22,30 @@ export class BucketBinary extends AbstractBinary {
|
||||
return;
|
||||
}
|
||||
|
||||
async fetch(dir: string, binaryName: BinaryName): Promise<FetchResult | undefined> {
|
||||
async fetch(
|
||||
dir: string,
|
||||
binaryName: BinaryName
|
||||
): Promise<FetchResult | undefined> {
|
||||
// /foo/ => foo/
|
||||
const binaryConfig = binaries[binaryName];
|
||||
const subDir = dir.substring(1);
|
||||
const subDir = dir.slice(1);
|
||||
const url = `${binaryConfig.distUrl}?delimiter=/&prefix=${encodeURIComponent(subDir)}`;
|
||||
const xml = await this.requestXml(url);
|
||||
return { items: this.parseItems(xml, dir, binaryConfig), nextParams: null };
|
||||
}
|
||||
|
||||
protected parseItems(xml: string, dir: string, binaryConfig: BinaryTaskConfig): BinaryItem[] {
|
||||
protected parseItems(
|
||||
xml: string,
|
||||
dir: string,
|
||||
binaryConfig: BinaryTaskConfig
|
||||
): BinaryItem[] {
|
||||
const items: BinaryItem[] = [];
|
||||
// https://nwjs2.s3.amazonaws.com/?prefix=v0.59.0%2Fx64%2F
|
||||
// https://chromedriver.storage.googleapis.com/?delimiter=/&prefix=
|
||||
// <Contents><Key>2.0/chromedriver_linux32.zip</Key><Generation>1380149859530000</Generation><MetaGeneration>2</MetaGeneration><LastModified>2013-09-25T22:57:39.349Z</LastModified><ETag>"c0d96102715c4916b872f91f5bf9b12c"</ETag><Size>7262134</Size><Owner/></Contents><Contents>
|
||||
// <Contents><Key>v0.59.0/nwjs-v0.59.0-linux-ia32.tar.gz</Key><LastModified>2015-11-02T02:34:18.000Z</LastModified><ETag>"b1b7a52928e9f874bad0cabf7f74ba8e"</ETag><Size>22842</Size><StorageClass>STANDARD</StorageClass></Contents>
|
||||
const fileRe = /<Contents><Key>([^<]+?)<\/Key>(?:<Generation>\d+?<\/Generation>)?(?:<MetaGeneration>\d+?<\/MetaGeneration>)?<LastModified>([^<]+?)<\/LastModified><ETag>[^<]+?<\/ETag><Size>(\d+?)<\/Size>/g;
|
||||
const fileRe =
|
||||
/<Contents><Key>([^<]+?)<\/Key>(?:<Generation>\d+?<\/Generation>)?(?:<MetaGeneration>\d+?<\/MetaGeneration>)?<LastModified>([^<]+?)<\/LastModified><ETag>[^<]+?<\/ETag><Size>(\d+?)<\/Size>/g;
|
||||
let matchs = xml.matchAll(fileRe);
|
||||
for (const m of matchs) {
|
||||
const fullname = m[1].trim();
|
||||
@@ -42,7 +60,7 @@ export class BucketBinary extends AbstractBinary {
|
||||
|
||||
const name = path.basename(fullname);
|
||||
const date = m[2].trim();
|
||||
const size = parseInt(m[3].trim());
|
||||
const size = Number.parseInt(m[3].trim());
|
||||
items.push({
|
||||
name,
|
||||
isDir: false,
|
||||
@@ -52,7 +70,8 @@ export class BucketBinary extends AbstractBinary {
|
||||
});
|
||||
}
|
||||
// <CommonPrefixes><Prefix>v0.59.0/x64/</Prefix></CommonPrefixes>
|
||||
const dirRe = /<CommonPrefixes><Prefix>([^<]+?)<\/Prefix><\/CommonPrefixes>/g;
|
||||
const dirRe =
|
||||
/<CommonPrefixes><Prefix>([^<]+?)<\/Prefix><\/CommonPrefixes>/g;
|
||||
matchs = xml.matchAll(dirRe);
|
||||
for (const m of matchs) {
|
||||
// <Prefix>AWSLogs/</Prefix>
|
||||
@@ -65,7 +84,7 @@ export class BucketBinary extends AbstractBinary {
|
||||
let date = '-';
|
||||
// root dir children, should set date to '2022-04-19T01:00:00Z', sync per hour
|
||||
if (dir === '/') {
|
||||
date = new Date().toISOString().split(':', 1)[0] + ':00:00Z';
|
||||
date = `${new Date().toISOString().split(':', 1)[0]}:00:00Z`;
|
||||
}
|
||||
items.push({
|
||||
name,
|
||||
|
||||
@@ -1,10 +1,21 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
import { basename } from 'node:path';
|
||||
|
||||
import { SingletonProto } from 'egg';
|
||||
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import {
|
||||
AbstractBinary,
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.ChromeForTesting)
|
||||
export class ChromeForTestingBinary extends AbstractBinary {
|
||||
static lastTimestamp = '';
|
||||
#timestamp = '';
|
||||
|
||||
private dirItems?: {
|
||||
[key: string]: BinaryItem[];
|
||||
};
|
||||
@@ -13,57 +24,157 @@ export class ChromeForTestingBinary extends AbstractBinary {
|
||||
this.dirItems = undefined;
|
||||
}
|
||||
|
||||
async fetch(dir: string): Promise<FetchResult | undefined> {
|
||||
if (!this.dirItems) {
|
||||
this.dirItems = {};
|
||||
this.dirItems['/'] = [];
|
||||
let chromeVersion = '';
|
||||
async finishFetch(success: boolean) {
|
||||
if (
|
||||
success &&
|
||||
this.#timestamp &&
|
||||
ChromeForTestingBinary.lastTimestamp !== this.#timestamp
|
||||
) {
|
||||
ChromeForTestingBinary.lastTimestamp = this.#timestamp;
|
||||
}
|
||||
}
|
||||
|
||||
// exports.PUPPETEER_REVISIONS = Object.freeze({
|
||||
// chrome: '113.0.5672.63',
|
||||
// firefox: 'latest',
|
||||
// });
|
||||
const unpkgURL = 'https://unpkg.com/puppeteer-core@latest/lib/cjs/puppeteer/revisions.js';
|
||||
const text = await this.requestXml(unpkgURL);
|
||||
const m = /chrome:\s+\'([\d\.]+)\'\,/.exec(text);
|
||||
if (m) {
|
||||
chromeVersion = m[1];
|
||||
async #syncDirItems() {
|
||||
this.dirItems = {};
|
||||
this.dirItems['/'] = [];
|
||||
const jsonApiEndpoint =
|
||||
'https://googlechromelabs.github.io/chrome-for-testing/known-good-versions-with-downloads.json';
|
||||
const { data, status, headers } = await this.httpclient.request(
|
||||
jsonApiEndpoint,
|
||||
{
|
||||
dataType: 'json',
|
||||
timeout: 30_000,
|
||||
followRedirect: true,
|
||||
gzip: true,
|
||||
}
|
||||
);
|
||||
if (status !== 200) {
|
||||
this.logger.warn(
|
||||
'[ChromeForTestingBinary.request:non-200-status] url: %s, status: %s, headers: %j, data: %j',
|
||||
jsonApiEndpoint,
|
||||
status,
|
||||
headers,
|
||||
data
|
||||
);
|
||||
return;
|
||||
}
|
||||
this.#timestamp = data.timestamp;
|
||||
const hasNewData = this.#timestamp !== ChromeForTestingBinary.lastTimestamp;
|
||||
this.logger.info(
|
||||
'[ChromeForTestingBinary] remote data timestamp: %j, last timestamp: %j, hasNewData: %s',
|
||||
this.#timestamp,
|
||||
ChromeForTestingBinary.lastTimestamp,
|
||||
hasNewData
|
||||
);
|
||||
if (!hasNewData) {
|
||||
return;
|
||||
}
|
||||
|
||||
const platforms = [ 'linux64', 'mac-arm64', 'mac-x64', 'win32', 'win64' ];
|
||||
const date = new Date().toISOString();
|
||||
this.dirItems['/'].push({
|
||||
name: 'known-good-versions-with-downloads.json',
|
||||
date: data.timestamp,
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: jsonApiEndpoint,
|
||||
});
|
||||
this.dirItems['/'].push({
|
||||
name: 'latest-patch-versions-per-build.json',
|
||||
date: data.timestamp,
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: 'https://googlechromelabs.github.io/chrome-for-testing/latest-patch-versions-per-build.json',
|
||||
});
|
||||
this.dirItems['/'].push({
|
||||
name: 'last-known-good-versions.json',
|
||||
date: data.timestamp,
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: 'https://googlechromelabs.github.io/chrome-for-testing/last-known-good-versions.json',
|
||||
});
|
||||
|
||||
// "timestamp": "2023-09-16T00:21:21.964Z",
|
||||
// "versions": [
|
||||
// {
|
||||
// "version": "113.0.5672.0",
|
||||
// "revision": "1121455",
|
||||
// "downloads": {
|
||||
// "chrome": [
|
||||
// {
|
||||
// "platform": "linux64",
|
||||
// "url": "https://edgedl.me.gvt1.com/edgedl/chrome/chrome-for-testing/113.0.5672.0/linux64/chrome-linux64.zip"
|
||||
// },
|
||||
// {
|
||||
// "platform": "mac-arm64",
|
||||
// "url": "https://edgedl.me.gvt1.com/edgedl/chrome/chrome-for-testing/113.0.5672.0/mac-arm64/chrome-mac-arm64.zip"
|
||||
// },
|
||||
// {
|
||||
// "platform": "mac-x64",
|
||||
// "url": "https://edgedl.me.gvt1.com/edgedl/chrome/chrome-for-testing/113.0.5672.0/mac-x64/chrome-mac-x64.zip"
|
||||
// },
|
||||
// {
|
||||
// "platform": "win32",
|
||||
// "url": "https://edgedl.me.gvt1.com/edgedl/chrome/chrome-for-testing/113.0.5672.0/win32/chrome-win32.zip"
|
||||
// },
|
||||
// {
|
||||
// "platform": "win64",
|
||||
// "url": "https://edgedl.me.gvt1.com/edgedl/chrome/chrome-for-testing/113.0.5672.0/win64/chrome-win64.zip"
|
||||
// }
|
||||
// ]
|
||||
// }
|
||||
// },
|
||||
const versions = data.versions as {
|
||||
version: string;
|
||||
revision: string;
|
||||
downloads: {
|
||||
[key: string]: {
|
||||
platform: string;
|
||||
url: string;
|
||||
}[];
|
||||
};
|
||||
}[];
|
||||
for (const item of versions) {
|
||||
this.dirItems['/'].push({
|
||||
name: `${chromeVersion}/`,
|
||||
date,
|
||||
name: `${item.version}/`,
|
||||
date: item.revision,
|
||||
size: '-',
|
||||
isDir: true,
|
||||
url: '',
|
||||
});
|
||||
this.dirItems[`/${chromeVersion}/`] = [];
|
||||
|
||||
for (const platform of platforms) {
|
||||
this.dirItems[`/${chromeVersion}/`].push({
|
||||
name: `${platform}/`,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: true,
|
||||
url: '',
|
||||
});
|
||||
|
||||
// https://edgedl.me.gvt1.com/edgedl/chrome/chrome-for-testing/113.0.5672.63/mac-arm64/chrome-mac-arm64.zip
|
||||
const name = `chrome-${platform}.zip`;
|
||||
this.dirItems[`/${chromeVersion}/${platform}/`] = [
|
||||
{
|
||||
name,
|
||||
date,
|
||||
const versionDir = `/${item.version}/`;
|
||||
if (!this.dirItems[versionDir]) {
|
||||
this.dirItems[versionDir] = [];
|
||||
}
|
||||
for (const category in item.downloads) {
|
||||
const downloads = item.downloads[category];
|
||||
for (const download of downloads) {
|
||||
const platformDir = `${versionDir}${download.platform}/`;
|
||||
if (!this.dirItems[platformDir]) {
|
||||
this.dirItems[platformDir] = [];
|
||||
this.dirItems[versionDir].push({
|
||||
name: `${download.platform}/`,
|
||||
date: item.revision,
|
||||
size: '-',
|
||||
isDir: true,
|
||||
url: '',
|
||||
});
|
||||
}
|
||||
this.dirItems[platformDir].push({
|
||||
name: basename(download.url),
|
||||
date: data.timestamp,
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `https://edgedl.me.gvt1.com/edgedl/chrome/chrome-for-testing/${chromeVersion}/${platform}/${name}`,
|
||||
},
|
||||
];
|
||||
url: download.url,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { items: this.dirItems[dir], nextParams: null };
|
||||
async fetch(dir: string): Promise<FetchResult | undefined> {
|
||||
// use https://github.com/GoogleChromeLabs/chrome-for-testing#json-api-endpoints
|
||||
if (!this.dirItems) {
|
||||
await this.#syncDirItems();
|
||||
}
|
||||
return { items: this.dirItems?.[dir] ?? [], nextParams: null };
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
import { SingletonProto } from 'egg';
|
||||
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import {
|
||||
AbstractBinary,
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Cypress)
|
||||
@@ -20,7 +26,7 @@ export class CypressBinary extends AbstractBinary {
|
||||
this.dirItems = {};
|
||||
this.dirItems['/'] = [];
|
||||
for (const version in data.versions) {
|
||||
const major = parseInt(version.split('.', 1)[0]);
|
||||
const major = Number.parseInt(version.split('.', 1)[0]);
|
||||
// need >= 4.0.0
|
||||
// https://npmmirror.com/mirrors/cypress/4.0.0/
|
||||
if (major < 4) continue;
|
||||
@@ -42,8 +48,23 @@ export class CypressBinary extends AbstractBinary {
|
||||
// "https://cdn.cypress.io/desktop/9.2.0/darwin-arm64/cypress.zip"
|
||||
// "https://cdn.cypress.io/desktop/9.2.0/darwin-x64/cypress.zip"
|
||||
// "https://cdn.cypress.io/desktop/9.2.0/linux-x64/cypress.zip"
|
||||
// "https://cdn.cypress.io/desktop/9.2.0/linux-arm64/cypress.zip"
|
||||
// "https://cdn.cypress.io/desktop/9.2.0/win32-x64/cypress.zip"
|
||||
const platforms = [ 'darwin-x64', 'darwin-arm64', 'linux-x64', 'win32-x64' ];
|
||||
// https://github.com/cypress-io/cypress/blob/develop/scripts/binary/index.js#L146
|
||||
// const systems = [
|
||||
// { platform: 'linux', arch: 'x64' },
|
||||
// { platform: 'linux', arch: 'arm64' },
|
||||
// { platform: 'darwin', arch: 'x64' },
|
||||
// { platform: 'darwin', arch: 'arm64' },
|
||||
// { platform: 'win32', arch: 'x64' },
|
||||
// ]
|
||||
const platforms = [
|
||||
'darwin-x64',
|
||||
'darwin-arm64',
|
||||
'linux-x64',
|
||||
'linux-arm64',
|
||||
'win32-x64',
|
||||
];
|
||||
for (const platform of platforms) {
|
||||
this.dirItems[subDir].push({
|
||||
name: `${platform}/`,
|
||||
|
||||
227
app/common/adapter/binary/EdgedriverBinary.ts
Normal file
227
app/common/adapter/binary/EdgedriverBinary.ts
Normal file
@@ -0,0 +1,227 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import { SingletonProto } from 'egg';
|
||||
|
||||
import {
|
||||
AbstractBinary,
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Edgedriver)
|
||||
export class EdgedriverBinary extends AbstractBinary {
|
||||
private dirItems?: {
|
||||
[key: string]: BinaryItem[];
|
||||
};
|
||||
|
||||
async initFetch() {
|
||||
this.dirItems = undefined;
|
||||
}
|
||||
|
||||
async #syncDirItems() {
|
||||
this.dirItems = {};
|
||||
this.dirItems['/'] = [];
|
||||
const jsonApiEndpoint = 'https://edgeupdates.microsoft.com/api/products';
|
||||
const { data, status, headers } = await this.httpclient.request(
|
||||
jsonApiEndpoint,
|
||||
{
|
||||
dataType: 'json',
|
||||
timeout: 30_000,
|
||||
followRedirect: true,
|
||||
gzip: true,
|
||||
}
|
||||
);
|
||||
if (status !== 200) {
|
||||
this.logger.warn(
|
||||
'[EdgedriverBinary.request:non-200-status] url: %s, status: %s, headers: %j, data: %j',
|
||||
jsonApiEndpoint,
|
||||
status,
|
||||
headers,
|
||||
data
|
||||
);
|
||||
return;
|
||||
}
|
||||
this.logger.info('[EdgedriverBinary] remote data length: %s', data.length);
|
||||
// [
|
||||
// {
|
||||
// "Product": "Stable",
|
||||
// "Releases": [
|
||||
// {
|
||||
// "ReleaseId": 73376,
|
||||
// "Platform": "iOS",
|
||||
// "Architecture": "arm64",
|
||||
// "CVEs": [],
|
||||
// "ProductVersion": "124.0.2478.89",
|
||||
// "Artifacts": [],
|
||||
// "PublishedTime": "2024-05-07T02:57:00",
|
||||
// "ExpectedExpiryDate": "2025-05-07T02:57:00"
|
||||
// },
|
||||
// {
|
||||
// "ReleaseId": 73629,
|
||||
// "Platform": "Windows",
|
||||
// "Architecture": "x86",
|
||||
// "CVEs": [
|
||||
// "CVE-2024-4559",
|
||||
// "CVE-2024-4671"
|
||||
// ],
|
||||
// "ProductVersion": "124.0.2478.97",
|
||||
// "Artifacts": [
|
||||
// {
|
||||
// "ArtifactName": "msi",
|
||||
// "Location": "https://msedge.sf.dl.delivery.mp.microsoft.com/filestreamingservice/files/aa1c9fe3-bb9c-4a80-9ff7-5c109701fbfe/MicrosoftEdgeEnterpriseX86.msi",
|
||||
// "Hash": "4CEF7B907D3E2371E953C41190E32C3560CEE7D3F16D7550CA156DC976EBCB80",
|
||||
// "HashAlgorithm": "SHA256",
|
||||
// "SizeInBytes": 162029568
|
||||
// }
|
||||
// ],
|
||||
// "PublishedTime": "2024-05-11T06:47:00",
|
||||
// "ExpectedExpiryDate": "2025-05-10T16:59:00"
|
||||
// },
|
||||
// {
|
||||
// "ReleaseId": 73630,
|
||||
// "Platform": "Linux",
|
||||
// "Architecture": "x64",
|
||||
// "CVEs": [
|
||||
// "CVE-2024-4559"
|
||||
// ],
|
||||
// "ProductVersion": "124.0.2478.97",
|
||||
// "Artifacts": [
|
||||
// {
|
||||
// "ArtifactName": "rpm",
|
||||
// "Location": "https://packages.microsoft.com/yumrepos/edge/microsoft-edge-stable-124.0.2478.97-1.x86_64.rpm",
|
||||
// "Hash": "32D9C333544DDD9C56FED54844E89EF00F3E5620942C07B9B68D214016687895",
|
||||
// "HashAlgorithm": "SHA256",
|
||||
// "SizeInBytes": 169877932
|
||||
// },
|
||||
// {
|
||||
// "ArtifactName": "deb",
|
||||
// "Location": "https://packages.microsoft.com/repos/edge/pool/main/m/microsoft-edge-stable/microsoft-edge-stable_124.0.2478.97-1_amd64.deb",
|
||||
// "Hash": "85D0AD1D63847B3DD54F0F214D18A2B54462BB43291536E773AD1B8B29BBF799",
|
||||
// "HashAlgorithm": "SHA256",
|
||||
// "SizeInBytes": 167546042
|
||||
// }
|
||||
// ],
|
||||
// "PublishedTime": "2024-05-10T17:01:00",
|
||||
// "ExpectedExpiryDate": "2025-05-10T17:01:00"
|
||||
// },
|
||||
// {
|
||||
// "Product": "EdgeUpdate",
|
||||
// "Releases": [
|
||||
// {
|
||||
// "ReleaseId": 73493,
|
||||
// "Platform": "Windows",
|
||||
// "Architecture": "x86",
|
||||
// "CVEs": [],
|
||||
// "ProductVersion": "1.3.187.37",
|
||||
// "Artifacts": [
|
||||
// {
|
||||
// "ArtifactName": "exe",
|
||||
// "Location": "https://msedge.sf.dl.delivery.mp.microsoft.com/filestreamingservice/files/a2fa84fe-796b-4f80-b1cd-f4d1f5731aa8/MicrosoftEdgeUpdateSetup_X86_1.3.187.37.exe",
|
||||
// "Hash": "503088D22461FEE5D7B6B011609D73FFD5869D3ACE1DBB0F00F8F3B9D122C514",
|
||||
// "HashAlgorithm": "SHA256",
|
||||
// "SizeInBytes": 1622072
|
||||
// }
|
||||
// ],
|
||||
// "PublishedTime": "2024-05-08T05:44:00",
|
||||
// "ExpectedExpiryDate": "2025-05-08T05:44:00"
|
||||
// }
|
||||
// ]
|
||||
// }
|
||||
const products = data as {
|
||||
Product: string;
|
||||
Releases: {
|
||||
ReleaseId: number;
|
||||
Platform: string;
|
||||
Architecture: string;
|
||||
CVEs: string[];
|
||||
ProductVersion: string;
|
||||
Artifacts: {
|
||||
ArtifactName: string;
|
||||
Location: string;
|
||||
Hash: string;
|
||||
HashAlgorithm: string;
|
||||
SizeInBytes: string;
|
||||
}[];
|
||||
PublishedTime: string;
|
||||
ExpectedExpiryDate: string;
|
||||
}[];
|
||||
}[];
|
||||
const existsVersions = new Set<string>();
|
||||
for (const product of products) {
|
||||
if (product.Product === 'EdgeUpdate') continue;
|
||||
for (const release of product.Releases) {
|
||||
if (!release.Artifacts || release.Artifacts.length === 0) continue;
|
||||
if (existsVersions.has(release.ProductVersion)) continue;
|
||||
this.dirItems['/'].push({
|
||||
name: `${release.ProductVersion}/`,
|
||||
date: release.PublishedTime,
|
||||
size: '-',
|
||||
isDir: true,
|
||||
url: '',
|
||||
});
|
||||
existsVersions.add(release.ProductVersion);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fetch(dir: string): Promise<FetchResult | undefined> {
|
||||
if (!this.dirItems) {
|
||||
await this.#syncDirItems();
|
||||
}
|
||||
// fetch root dir
|
||||
if (dir === '/') {
|
||||
return { items: this.dirItems?.[dir] ?? [], nextParams: null };
|
||||
}
|
||||
|
||||
// fetch sub dir
|
||||
// /foo/ => foo/
|
||||
const subDir = dir.slice(1);
|
||||
// https://msedgewebdriverstorage.blob.core.windows.net/edgewebdriver?prefix=124.0.2478.97/&delimiter=/&maxresults=100&restype=container&comp=list
|
||||
const url = `https://msedgewebdriverstorage.blob.core.windows.net/edgewebdriver?prefix=${encodeURIComponent(subDir)}&delimiter=/&maxresults=100&restype=container&comp=list`;
|
||||
const xml = await this.requestXml(url);
|
||||
return { items: this.#parseItems(xml), nextParams: null };
|
||||
}
|
||||
|
||||
#parseItems(xml: string): BinaryItem[] {
|
||||
const items: BinaryItem[] = [];
|
||||
// <Blob><Name>124.0.2478.97/edgedriver_arm64.zip</Name><Url>https://msedgewebdriverstorage.blob.core.windows.net/edgewebdriver/124.0.2478.97/edgedriver_arm64.zip</Url><Properties><Last-Modified>Fri, 10 May 2024 18:35:44 GMT</Last-Modified><Etag>0x8DC712000713C13</Etag><Content-Length>9191362</Content-Length><Content-Type>application/octet-stream</Content-Type><Content-Encoding /><Content-Language /><Content-MD5>1tjPTf5JU6KKB06Qf1JOGw==</Content-MD5><Cache-Control /><BlobType>BlockBlob</BlobType><LeaseStatus>unlocked</LeaseStatus></Properties></Blob>
|
||||
const fileRe =
|
||||
/<Blob><Name>([^<]+?)<\/Name><Url>([^<]+?)<\/Url><Properties><Last-Modified>([^<]+?)<\/Last-Modified><Etag>(?:[^<]+?)<\/Etag><Content-Length>(\d+)<\/Content-Length>/g;
|
||||
const matchItems = xml.matchAll(fileRe);
|
||||
for (const m of matchItems) {
|
||||
const fullname = m[1].trim();
|
||||
// <Blob>
|
||||
// <Name>124.0.2478.97/edgedriver_arm64.zip</Name>
|
||||
// <Url>https://msedgewebdriverstorage.blob.core.windows.net/edgewebdriver/124.0.2478.97/edgedriver_arm64.zip</Url>
|
||||
// <Properties>
|
||||
// <Last-Modified>Fri, 10 May 2024 18:35:44 GMT</Last-Modified>
|
||||
// <Etag>0x8DC712000713C13</Etag>
|
||||
// <Content-Length>9191362</Content-Length>
|
||||
// <Content-Type>application/octet-stream</Content-Type>
|
||||
// <Content-Encoding/>
|
||||
// <Content-Language/>
|
||||
// <Content-MD5>1tjPTf5JU6KKB06Qf1JOGw==</Content-MD5>
|
||||
// <Cache-Control/>
|
||||
// <BlobType>BlockBlob</BlobType>
|
||||
// <LeaseStatus>unlocked</LeaseStatus>
|
||||
// </Properties>
|
||||
// </Blob>
|
||||
// ignore size = 0 dir
|
||||
const name = path.basename(fullname);
|
||||
const url = m[2].trim();
|
||||
const date = m[3].trim();
|
||||
const size = Number.parseInt(m[4].trim());
|
||||
items.push({
|
||||
name,
|
||||
isDir: false,
|
||||
url,
|
||||
size,
|
||||
date,
|
||||
});
|
||||
}
|
||||
return items;
|
||||
}
|
||||
}
|
||||
@@ -1,13 +1,21 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import binaries, { BinaryName } from '../../../../config/binaries';
|
||||
import { BinaryAdapter, BinaryItem, FetchResult } from './AbstractBinary';
|
||||
import { GithubBinary } from './GithubBinary';
|
||||
import { SingletonProto } from 'egg';
|
||||
|
||||
import binaries, { type BinaryName } from '../../../../config/binaries.ts';
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import { GithubBinary } from './GithubBinary.ts';
|
||||
import {
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Electron)
|
||||
export class ElectronBinary extends GithubBinary {
|
||||
async fetch(dir: string, binaryName: BinaryName = 'electron'): Promise<FetchResult | undefined> {
|
||||
async fetch(
|
||||
dir: string,
|
||||
binaryName: BinaryName = 'electron'
|
||||
): Promise<FetchResult | undefined> {
|
||||
const releases = await this.initReleases(binaryName, binaries.electron);
|
||||
if (!releases) return;
|
||||
|
||||
@@ -24,7 +32,7 @@ export class ElectronBinary extends GithubBinary {
|
||||
// v14.2.6 => 14.2.6
|
||||
if (/^v\d+?\./.test(item.tag_name)) {
|
||||
items.push({
|
||||
name: `${item.tag_name.substring(1)}/`,
|
||||
name: `${item.tag_name.slice(1)}/`,
|
||||
isDir: true,
|
||||
url: item.url,
|
||||
size: '-',
|
||||
@@ -34,7 +42,10 @@ export class ElectronBinary extends GithubBinary {
|
||||
}
|
||||
} else {
|
||||
for (const item of releases) {
|
||||
if (dir === `/${item.tag_name}/` || dir === `/${item.tag_name.substring(1)}/`) {
|
||||
if (
|
||||
dir === `/${item.tag_name}/` ||
|
||||
dir === `/${item.tag_name.slice(1)}/`
|
||||
) {
|
||||
items = this.formatItems(item, binaries.electron);
|
||||
break;
|
||||
}
|
||||
|
||||
145
app/common/adapter/binary/FirefoxBinary.ts
Normal file
145
app/common/adapter/binary/FirefoxBinary.ts
Normal file
@@ -0,0 +1,145 @@
|
||||
import { basename } from 'node:path';
|
||||
|
||||
import { SingletonProto } from 'egg';
|
||||
|
||||
import binaries, { type BinaryName } from '../../../../config/binaries.ts';
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import {
|
||||
AbstractBinary,
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Firefox)
|
||||
export class FirefoxBinary extends AbstractBinary {
|
||||
async initFetch() {
|
||||
// do nothing
|
||||
return;
|
||||
}
|
||||
|
||||
// Only fetch Firefox versions >= 100.0.0 to avoid too old versions
|
||||
async fetch(
|
||||
dir: string,
|
||||
binaryName: BinaryName
|
||||
): Promise<FetchResult | undefined> {
|
||||
const binaryConfig = binaries[binaryName];
|
||||
const url = `${binaryConfig.distUrl}${dir}`;
|
||||
const html = await this.requestXml(url);
|
||||
|
||||
// Mozilla archive has format like:
|
||||
// <tr>
|
||||
// <td>Dir</td>
|
||||
// <td><a href="/pub/firefox/releases/131.0.3/update/">update/</a></td>
|
||||
// <td></td>
|
||||
// <td></td>
|
||||
// </tr>
|
||||
// <tr>
|
||||
// <td>File</td>
|
||||
// <td><a href="/pub/firefox/releases/131.0.3/SHA256SUMS.asc">SHA256SUMS.asc</a></td>
|
||||
// <td>833</td>
|
||||
// <td>12-Apr-2025 08:52</td>
|
||||
// </tr>
|
||||
|
||||
// Parse Mozilla directory listing format - handles two different formats:
|
||||
// Format 1 (main index): <td><a href="/path/">name/</a></td>
|
||||
// Format 2 (version dir): <td>Type</td><td><a href="/path/">name</a></td><td>size</td><td>date</td>
|
||||
|
||||
// Try the detailed format first (with Type/Size/Date columns)
|
||||
const detailedRe = /<tr>\s*<td>(Dir|File)<\/td>\s*<td><a href="([^"]+?)"[^>]*?>[^<]+?<\/a><\/td>\s*<td>([^<]*?)<\/td>\s*<td>([^<]*?)<\/td>\s*<\/tr>/gi;
|
||||
const detailedMatches = Array.from(html.matchAll(detailedRe));
|
||||
|
||||
let matchs: RegExpMatchArray[];
|
||||
let useDetailedFormat = false;
|
||||
|
||||
if (detailedMatches.length > 0) {
|
||||
// Use detailed format
|
||||
matchs = detailedMatches;
|
||||
useDetailedFormat = true;
|
||||
} else {
|
||||
// Fallback to simple format
|
||||
const simpleRe = /<td><a href="([^"]+?)"[^>]*?>[^<]+?<\/a><\/td>/gi;
|
||||
matchs = Array.from(html.matchAll(simpleRe));
|
||||
}
|
||||
|
||||
const items: BinaryItem[] = [];
|
||||
|
||||
for (const m of matchs) {
|
||||
let href: string;
|
||||
let isDir: boolean;
|
||||
let size: string;
|
||||
let date: string;
|
||||
|
||||
if (useDetailedFormat) {
|
||||
// Detailed format: [fullMatch, type, href, size, date]
|
||||
const type = m[1]; // "Dir" or "File"
|
||||
href = m[2];
|
||||
size = m[3].trim() || '-';
|
||||
date = m[4].trim() || '-';
|
||||
isDir = type === 'Dir';
|
||||
} else {
|
||||
// Simple format: [fullMatch, href]
|
||||
href = m[1];
|
||||
isDir = href.endsWith('/');
|
||||
size = '-';
|
||||
date = '-';
|
||||
}
|
||||
|
||||
// Extract the name from the href path
|
||||
// href could be "/pub/firefox/releases/130.0/" or just "130.0/"
|
||||
let name = href;
|
||||
if (href.startsWith('/')) {
|
||||
// Extract the last part of the path
|
||||
const parts = href.split('/').filter(Boolean);
|
||||
name = parts[parts.length - 1] ?? '';
|
||||
if (href.endsWith('/')) {
|
||||
name += '/';
|
||||
}
|
||||
}
|
||||
|
||||
if (!isDir) {
|
||||
// Keep the full name for files
|
||||
name = basename(name);
|
||||
}
|
||||
|
||||
// Skip parent directory links
|
||||
if (name === '../' || href === '/pub/firefox/' || href.endsWith('/..') || href === '/pub/firefox/releases/') continue;
|
||||
|
||||
// Filter out old Firefox versions (< 100.0.0) for directories - apply to main index (root directory)
|
||||
if (isDir && name !== '../' && dir === '/') {
|
||||
const versionName = name.slice(0, -1); // Remove trailing '/'
|
||||
// Skip non-version directories that are just special names
|
||||
if (/^\d+\.\d+/.test(versionName)) {
|
||||
try {
|
||||
const major = Number.parseInt(versionName.split('.')[0]);
|
||||
if (major < 100) {
|
||||
continue; // Skip versions < 100.0.0
|
||||
}
|
||||
} catch {
|
||||
// If version parsing fails, skip this directory
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// Also skip named directories that aren't version numbers
|
||||
else if (!['latest', 'latest-beta', 'latest-esr'].includes(versionName)) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
const fileUrl = isDir ? '' : `${url}${name}`;
|
||||
if (binaryConfig.ignoreFiles?.includes(`${dir}${name}`)) continue;
|
||||
|
||||
const item = {
|
||||
name,
|
||||
isDir,
|
||||
url: fileUrl,
|
||||
size,
|
||||
date,
|
||||
ignoreDownloadStatuses: binaryConfig.options?.ignoreDownloadStatuses,
|
||||
};
|
||||
items.push(item);
|
||||
}
|
||||
return { items, nextParams: null };
|
||||
}
|
||||
}
|
||||
@@ -1,15 +1,17 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import binaries, { BinaryName, BinaryTaskConfig } from '../../../../config/binaries';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
import { SingletonProto } from 'egg';
|
||||
|
||||
import binaries, { type BinaryName, type BinaryTaskConfig } from '../../../../config/binaries.ts';
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import { AbstractBinary, BinaryAdapter, type BinaryItem, type FetchResult } from './AbstractBinary.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.GitHub)
|
||||
export class GithubBinary extends AbstractBinary {
|
||||
private releases: Record<string, any[]> = {};
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
private releases: Record<string, any[] | undefined> = {};
|
||||
|
||||
async initFetch(binaryName: BinaryName) {
|
||||
delete this.releases[binaryName];
|
||||
this.releases[binaryName] = undefined;
|
||||
}
|
||||
|
||||
protected async initReleases(binaryName: BinaryName, binaryConfig: BinaryTaskConfig) {
|
||||
@@ -17,11 +19,17 @@ export class GithubBinary extends AbstractBinary {
|
||||
// https://docs.github.com/en/rest/reference/releases get three pages
|
||||
// https://api.github.com/repos/electron/electron/releases
|
||||
// https://api.github.com/repos/electron/electron/releases?per_page=100&page=3
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
let releases: any[] = [];
|
||||
const maxPage = binaryConfig.options?.maxPage || 1;
|
||||
const perPage = binaryConfig.options?.perPage || 100;
|
||||
for (let i = 0; i < maxPage; i++) {
|
||||
const url = `https://api.github.com/repos/${binaryConfig.repo}/releases?per_page=100&page=${i + 1}`;
|
||||
const data = await this.requestJSON(url);
|
||||
const url = `https://api.github.com/repos/${binaryConfig.repo}/releases?per_page=${perPage}&page=${i + 1}`;
|
||||
const requestHeaders: Record<string, string> = {};
|
||||
if (process.env.GITHUB_TOKEN) {
|
||||
requestHeaders.Authorization = `token ${process.env.GITHUB_TOKEN}`;
|
||||
}
|
||||
const data = await this.requestJSON(url, requestHeaders);
|
||||
if (!Array.isArray(data)) {
|
||||
// {"message":"API rate limit exceeded for 47.57.239.54. (But here's the good news: Authenticated requests get a higher rate limit. Check out the documentation for more details.)","documentation_url":"https://docs.github.com/rest/overview/resources-in-the-rest-api#rate-limiting"}
|
||||
if (typeof data?.message === 'string' && data.message.includes('rate limit')) {
|
||||
@@ -38,12 +46,19 @@ export class GithubBinary extends AbstractBinary {
|
||||
return this.releases[binaryName];
|
||||
}
|
||||
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
protected formatItems(releaseItem: any, binaryConfig: BinaryTaskConfig) {
|
||||
const items: BinaryItem[] = [];
|
||||
// 200MB
|
||||
const maxFileSize = 1024 * 1024 * 200;
|
||||
// 250MB
|
||||
const maxFileSize = 1024 * 1024 * 250;
|
||||
for (const asset of releaseItem.assets) {
|
||||
if (asset.size > maxFileSize) continue;
|
||||
if (asset.size > maxFileSize) {
|
||||
this.logger.info(
|
||||
'[GithubBinary.formatItems] asset reach max file size(> 250MB), ignore download it, asset: %j',
|
||||
asset
|
||||
);
|
||||
continue;
|
||||
}
|
||||
items.push({
|
||||
name: asset.name,
|
||||
isDir: false,
|
||||
|
||||
@@ -1,7 +1,13 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import binaries, { BinaryName } from '../../../../config/binaries';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
import { SingletonProto } from 'egg';
|
||||
|
||||
import binaries, { type BinaryName } from '../../../../config/binaries.ts';
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import {
|
||||
AbstractBinary,
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Imagemin)
|
||||
@@ -11,7 +17,10 @@ export class ImageminBinary extends AbstractBinary {
|
||||
return;
|
||||
}
|
||||
|
||||
async fetch(dir: string, binaryName: BinaryName): Promise<FetchResult | undefined> {
|
||||
async fetch(
|
||||
dir: string,
|
||||
binaryName: BinaryName
|
||||
): Promise<FetchResult | undefined> {
|
||||
const binaryConfig = binaries[binaryName];
|
||||
const dirItems: {
|
||||
[key: string]: BinaryItem[];
|
||||
@@ -24,7 +33,7 @@ export class ImageminBinary extends AbstractBinary {
|
||||
// https://github.com/imagemin/jpegtran-bin/blob/v4.0.0/lib/index.js
|
||||
// https://github.com/imagemin/pngquant-bin/blob/v4.0.0/lib/index.js
|
||||
for (const version in data.versions) {
|
||||
const major = parseInt(version.split('.', 1)[0]);
|
||||
const major = Number.parseInt(version.split('.', 1)[0]);
|
||||
if (major < 4) continue;
|
||||
// >= 4.0.0
|
||||
const date = data.time[version];
|
||||
@@ -47,7 +56,7 @@ export class ImageminBinary extends AbstractBinary {
|
||||
});
|
||||
const versionVendorDir = `/v${version}/vendor/`;
|
||||
dirItems[versionVendorDir] = [];
|
||||
for (const platform of binaryConfig.options!.nodePlatforms!) {
|
||||
for (const platform of binaryConfig.options?.nodePlatforms ?? []) {
|
||||
dirItems[versionVendorDir].push({
|
||||
name: `${platform}/`,
|
||||
date,
|
||||
@@ -57,16 +66,16 @@ export class ImageminBinary extends AbstractBinary {
|
||||
});
|
||||
const platformDir = `/v${version}/vendor/${platform}/`;
|
||||
dirItems[platformDir] = [];
|
||||
const archs = binaryConfig.options!.nodeArchs![platform];
|
||||
const archs = binaryConfig.options?.nodeArchs?.[platform] ?? [];
|
||||
if (archs.length === 0) {
|
||||
for (const name of binaryConfig.options!.binFiles![platform]) {
|
||||
for (const name of binaryConfig.options?.binFiles?.[platform] ?? []) {
|
||||
dirItems[platformDir].push({
|
||||
name,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `${binaryConfig.distUrl}/${binaryConfig.repo}${platformDir}${name}`,
|
||||
ignoreDownloadStatuses: [ 404 ],
|
||||
ignoreDownloadStatuses: [404],
|
||||
});
|
||||
}
|
||||
} else {
|
||||
@@ -81,14 +90,15 @@ export class ImageminBinary extends AbstractBinary {
|
||||
const platformArchDir = `/v${version}/vendor/${platform}/${arch}/`;
|
||||
dirItems[platformArchDir] = [];
|
||||
|
||||
for (const name of binaryConfig.options!.binFiles![platform]) {
|
||||
for (const name of binaryConfig.options?.binFiles?.[platform] ??
|
||||
[]) {
|
||||
dirItems[platformArchDir].push({
|
||||
name,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `${binaryConfig.distUrl}/${binaryConfig.repo}${platformArchDir}${name}`,
|
||||
ignoreDownloadStatuses: [ 404 ],
|
||||
ignoreDownloadStatuses: [404],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,16 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import binaries, { BinaryName } from '../../../../config/binaries';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
import { basename } from 'node:path';
|
||||
|
||||
import { SingletonProto } from 'egg';
|
||||
import dayjs from 'dayjs';
|
||||
|
||||
import binaries, { type BinaryName } from '../../../../config/binaries.ts';
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import {
|
||||
AbstractBinary,
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Node)
|
||||
@@ -11,36 +20,89 @@ export class NodeBinary extends AbstractBinary {
|
||||
return;
|
||||
}
|
||||
|
||||
async fetch(dir: string, binaryName: BinaryName): Promise<FetchResult | undefined> {
|
||||
async fetch(
|
||||
dir: string,
|
||||
binaryName: BinaryName
|
||||
): Promise<FetchResult | undefined> {
|
||||
const binaryConfig = binaries[binaryName];
|
||||
const url = `${binaryConfig.distUrl}${dir}`;
|
||||
const html = await this.requestXml(url);
|
||||
|
||||
// <a href="v9.8.0/">v9.8.0/</a> 08-Mar-2018 01:55 -
|
||||
// <a href="v9.9.0/">v9.9.0/</a> 21-Mar-2018 15:47 -
|
||||
// <a href="index.json">index.json</a> 17-Dec-2021 23:16 219862
|
||||
// <a href="index.tab">index.tab</a> 17-Dec-2021 23:16 136319
|
||||
// <a href="node-0.0.1.tar.gz">node-0.0.1.tar.gz</a> 26-Aug-2011 16:22 2846972
|
||||
// <a href="node-v14.0.0-nightly20200119b318926634-linux-armv7l.tar.xz">node-v14.0.0-nightly20200119b318926634-linux-ar..></a> 19-Jan-2020 06:07 18565976
|
||||
const re = /<a href="([^\"]+?)"[^>]*?>[^<]+?<\/a>\s+?([\w\-]+? \w{2}\:\d{2})\s+?(\d+|\-)/ig;
|
||||
|
||||
// new html format
|
||||
// <a href="docs/">docs/</a> - -
|
||||
// <a href="win-x64/">win-x64/</a> - -
|
||||
// <a href="win-x86/">win-x86/</a> - -
|
||||
// <a href="/dist/v18.15.0/SHASUMS256.txt.asc">SHASUMS256.txt.asc</a> 04-Nov-2024 17:29 3.7 KB
|
||||
// <a href="/dist/v18.15.0/SHASUMS256.txt.sig">SHASUMS256.txt.sig</a> 04-Nov-2024 17:29 310 B
|
||||
// <a href="/dist/v18.15.0/SHASUMS256.txt">SHASUMS256.txt</a> 04-Nov-2024 17:29 3.2 KB
|
||||
|
||||
// <a href="/dist/latest-v20.x/SHASUMS256.txt.asc">SHASUMS256.txt.asc</a> 03 Sept 2025, 18:20 4.7 KB
|
||||
// <a href="/dist/latest-v20.x/SHASUMS256.txt.sig">SHASUMS256.txt.sig</a> 03 Sept 2025, 18:20 566 B
|
||||
// <a href="/dist/latest-v20.x/SHASUMS256.txt">SHASUMS256.txt</a> 03 Sept 2025, 18:19 3.8 KB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-aix-ppc64.tar.gz">node-v20.19.5-aix-ppc64.tar.gz</a> 03 Sept 2025, 18:19 60 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-arm64.msi">node-v20.19.5-arm64.msi</a> 03 Sept 2025, 18:19 24 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-darwin-arm64.tar.gz">node-v20.19.5-darwin-arm64.tar.gz</a> 03 Sept 2025, 18:19 41 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-darwin-arm64.tar.xz">node-v20.19.5-darwin-arm64.tar.xz</a> 03 Sept 2025, 18:19 21 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-darwin-x64.tar.gz">node-v20.19.5-darwin-x64.tar.gz</a> 03 Sept 2025, 18:19 43 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-darwin-x64.tar.xz">node-v20.19.5-darwin-x64.tar.xz</a> 03 Sept 2025, 18:19 23 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-headers.tar.gz">node-v20.19.5-headers.tar.gz</a> 03 Sept 2025, 18:19 8.7 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-headers.tar.xz">node-v20.19.5-headers.tar.xz</a> 03 Sept 2025, 18:19 524 KB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-linux-arm64.tar.gz">node-v20.19.5-linux-arm64.tar.gz</a> 03 Sept 2025, 18:19 47 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-linux-arm64.tar.xz">node-v20.19.5-linux-arm64.tar.xz</a> 03 Sept 2025, 18:19 25 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-linux-armv7l.tar.gz">node-v20.19.5-linux-armv7l.tar.gz</a> 03 Sept 2025, 18:19 43 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-linux-armv7l.tar.xz">node-v20.19.5-linux-armv7l.tar.xz</a> 03 Sept 2025, 18:19 22 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-linux-ppc64le.tar.gz">node-v20.19.5-linux-ppc64le.tar.gz</a> 03 Sept 2025, 18:19 49 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-linux-ppc64le.tar.xz">node-v20.19.5-linux-ppc64le.tar.xz</a> 03 Sept 2025, 18:19 26 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-linux-s390x.tar.gz">node-v20.19.5-linux-s390x.tar.gz</a> 03 Sept 2025, 18:19 47 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-linux-s390x.tar.xz">node-v20.19.5-linux-s390x.tar.xz</a> 03 Sept 2025, 18:19 25 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-linux-x64.tar.gz">node-v20.19.5-linux-x64.tar.gz</a> 03 Sept 2025, 18:19 47 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-linux-x64.tar.xz">node-v20.19.5-linux-x64.tar.xz</a> 03 Sept 2025, 18:19 26 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-win-arm64.7z">node-v20.19.5-win-arm64.7z</a> 03 Sept 2025, 18:19 17 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-win-arm64.zip">node-v20.19.5-win-arm64.zip</a> 03 Sept 2025, 18:19 26 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-win-x64.7z">node-v20.19.5-win-x64.7z</a> 03 Sept 2025, 18:19 19 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-win-x64.zip">node-v20.19.5-win-x64.zip</a> 03 Sept 2025, 18:19 30 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-win-x86.7z">node-v20.19.5-win-x86.7z</a> 03 Sept 2025, 18:19 18 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-win-x86.zip">node-v20.19.5-win-x86.zip</a> 03 Sept 2025, 18:19 28 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-x64.msi">node-v20.19.5-x64.msi</a> 03 Sept 2025, 18:19 27 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-x86.msi">node-v20.19.5-x86.msi</a> 03 Sept 2025, 18:19 25 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5.pkg">node-v20.19.5.pkg</a> 03 Sept 2025, 18:19 72 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5.tar.gz">node-v20.19.5.tar.gz</a> 03 Sept 2025, 18:19 89 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5.tar.xz">node-v20.19.5.tar.xz</a> 03 Sept 2025, 18:19 43 MB
|
||||
|
||||
// date format: 19-Jan-2020 06:07 or 03 Sept 2025, 18:19
|
||||
const re =
|
||||
/<a href="([^"]+?)"[^>]*?>[^<]+?<\/a>\s+?((?:[\w-]+? \w{2}:\d{2})|(?:\d{2} [A-Za-z]{3,9} \d{4}, \d{2}:\d{2})|-)\s+?([\d.\-\s\w]+)/gi;
|
||||
const matchs = html.matchAll(re);
|
||||
const items: BinaryItem[] = [];
|
||||
for (const m of matchs) {
|
||||
const name = m[1];
|
||||
let name = m[1];
|
||||
const isDir = name.endsWith('/');
|
||||
if (!isDir) {
|
||||
// /dist/v18.15.0/SHASUMS256.txt => SHASUMS256.txt
|
||||
name = basename(name);
|
||||
}
|
||||
const fileUrl = isDir ? '' : `${url}${name}`;
|
||||
const date = m[2];
|
||||
const size = m[3];
|
||||
const date = m[2] === '-' ? '-' : dayjs(m[2]).format('DD-MMM-YYYY HH:mm');
|
||||
const size = m[3].trim();
|
||||
if (size === '0') continue;
|
||||
if (binaryConfig.ignoreFiles?.includes(`${dir}${name}`)) continue;
|
||||
|
||||
items.push({
|
||||
const item = {
|
||||
name,
|
||||
isDir,
|
||||
url: fileUrl,
|
||||
size,
|
||||
date,
|
||||
ignoreDownloadStatuses: binaryConfig.options?.ignoreDownloadStatuses,
|
||||
});
|
||||
};
|
||||
items.push(item);
|
||||
}
|
||||
return { items, nextParams: null };
|
||||
}
|
||||
|
||||
@@ -1,8 +1,15 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import binaries, { BinaryName } from '../../../../config/binaries';
|
||||
import { join } from 'path';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
import { join } from 'node:path';
|
||||
|
||||
import { SingletonProto } from 'egg';
|
||||
|
||||
import binaries, { type BinaryName } from '../../../../config/binaries.ts';
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import {
|
||||
AbstractBinary,
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.NodePreGyp)
|
||||
@@ -13,7 +20,10 @@ export class NodePreGypBinary extends AbstractBinary {
|
||||
}
|
||||
|
||||
// https://github.com/mapbox/node-pre-gyp
|
||||
async fetch(dir: string, binaryName: BinaryName): Promise<FetchResult | undefined> {
|
||||
async fetch(
|
||||
dir: string,
|
||||
binaryName: BinaryName
|
||||
): Promise<FetchResult | undefined> {
|
||||
const binaryConfig = binaries[binaryName];
|
||||
const npmPackageName = binaryConfig.options?.npmPackageName ?? binaryName;
|
||||
const pkgUrl = `https://registry.npmjs.com/${npmPackageName}`;
|
||||
@@ -33,20 +43,28 @@ export class NodePreGypBinary extends AbstractBinary {
|
||||
if (!pkgVersion.binary) continue;
|
||||
// https://github.com/mapbox/node-pre-gyp#package_name
|
||||
// defaults to {module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz
|
||||
let binaryFile = pkgVersion.binary.package_name
|
||||
|| '{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz';
|
||||
let binaryFile =
|
||||
pkgVersion.binary.package_name ||
|
||||
'{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz';
|
||||
if (!binaryFile) continue;
|
||||
const moduleName = pkgVersion.binary.module_name || pkgVersion.name;
|
||||
binaryFile = binaryFile.replace('{version}', version)
|
||||
binaryFile = binaryFile
|
||||
.replace('{version}', version)
|
||||
.replace('{module_name}', moduleName);
|
||||
|
||||
let currentDir = dirItems['/'];
|
||||
let versionPrefix = '';
|
||||
let remotePath = pkgVersion.binary.remote_path;
|
||||
const napiVersions = pkgVersion.binary.napi_versions ?? [];
|
||||
if (binaryConfig.options?.requiredNapiVersions && napiVersions.length === 0) continue;
|
||||
if (
|
||||
binaryConfig.options?.requiredNapiVersions &&
|
||||
napiVersions.length === 0
|
||||
)
|
||||
continue;
|
||||
if (remotePath?.includes('{version}')) {
|
||||
const dirName = remotePath.includes('v{version}') ? `v${version}` : version;
|
||||
const dirName = remotePath.includes('v{version}')
|
||||
? `v${version}`
|
||||
: version;
|
||||
versionPrefix = `/${dirName}`;
|
||||
dirItems['/'].push({
|
||||
name: `${dirName}/`,
|
||||
@@ -55,7 +73,8 @@ export class NodePreGypBinary extends AbstractBinary {
|
||||
isDir: true,
|
||||
url: '',
|
||||
});
|
||||
currentDir = dirItems[`/${dirName}/`] = [];
|
||||
currentDir = [];
|
||||
dirItems[`/${dirName}/`] = currentDir;
|
||||
}
|
||||
|
||||
// https://node-precompiled-binaries.grpc.io/?delimiter=/&prefix=grpc/v1.24.11/
|
||||
@@ -67,17 +86,20 @@ export class NodePreGypBinary extends AbstractBinary {
|
||||
// "remote_path": "{name}/v{version}",
|
||||
// "package_name": "{node_abi}-{platform}-{arch}-{libc}.tar.gz"
|
||||
// },
|
||||
if (binaryFile.includes('{node_abi}')
|
||||
&& binaryFile.includes('{platform}')
|
||||
&& binaryFile.includes('{arch}')
|
||||
&& binaryFile.includes('{libc}')) {
|
||||
if (
|
||||
binaryFile.includes('{node_abi}') &&
|
||||
binaryFile.includes('{platform}') &&
|
||||
binaryFile.includes('{arch}') &&
|
||||
binaryFile.includes('{libc}')
|
||||
) {
|
||||
for (const nodeAbi of nodeABIVersions) {
|
||||
for (const platform of nodePlatforms) {
|
||||
const archs = nodeArchs[platform];
|
||||
const libcs = nodeLibcs[platform];
|
||||
for (const arch of archs) {
|
||||
for (const libc of libcs) {
|
||||
const name = binaryFile.replace('{node_abi}', `node-v${nodeAbi}`)
|
||||
const name = binaryFile
|
||||
.replace('{node_abi}', `node-v${nodeAbi}`)
|
||||
.replace('{platform}', platform)
|
||||
.replace('{arch}', arch)
|
||||
.replace('{libc}', libc);
|
||||
@@ -87,20 +109,23 @@ export class NodePreGypBinary extends AbstractBinary {
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `${binaryConfig.distUrl}/${binaryName}${versionPrefix}/${name}`,
|
||||
ignoreDownloadStatuses: [ 404 ],
|
||||
ignoreDownloadStatuses: [404],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (binaryFile.includes('{node_abi}')
|
||||
&& binaryFile.includes('{platform}')
|
||||
&& binaryFile.includes('{arch}')) {
|
||||
} else if (
|
||||
binaryFile.includes('{node_abi}') &&
|
||||
binaryFile.includes('{platform}') &&
|
||||
binaryFile.includes('{arch}')
|
||||
) {
|
||||
for (const nodeAbi of nodeABIVersions) {
|
||||
for (const platform of nodePlatforms) {
|
||||
const archs = nodeArchs[platform];
|
||||
for (const arch of archs) {
|
||||
const name = binaryFile.replace('{node_abi}', `node-v${nodeAbi}`)
|
||||
const name = binaryFile
|
||||
.replace('{node_abi}', `node-v${nodeAbi}`)
|
||||
.replace('{platform}', platform)
|
||||
.replace('{arch}', arch);
|
||||
currentDir.push({
|
||||
@@ -109,12 +134,15 @@ export class NodePreGypBinary extends AbstractBinary {
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `${binaryConfig.distUrl}/${binaryName}${versionPrefix}/${name}`,
|
||||
ignoreDownloadStatuses: [ 404 ],
|
||||
ignoreDownloadStatuses: [404],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (binaryFile.includes('{platform}-{arch}-{node_napi_label}-{libc}') && napiVersions.length > 0) {
|
||||
} else if (
|
||||
binaryFile.includes('{platform}-{arch}-{node_napi_label}-{libc}') &&
|
||||
napiVersions.length > 0
|
||||
) {
|
||||
// https://skia-canvas.s3.us-east-1.amazonaws.com/v0.9.30/darwin-arm64-napi-v6-unknown.tar.gz
|
||||
// https://github.com/samizdatco/skia-canvas/blob/2a75801d7cce3b4e4e6ad015a173daefaa8465e6/package.json#L48
|
||||
// "binary": {
|
||||
@@ -133,7 +161,8 @@ export class NodePreGypBinary extends AbstractBinary {
|
||||
for (const arch of archs) {
|
||||
for (const libc of libcs) {
|
||||
for (const napiVersion of napiVersions) {
|
||||
const name = binaryFile.replace('{platform}', platform)
|
||||
const name = binaryFile
|
||||
.replace('{platform}', platform)
|
||||
.replace('{arch}', arch)
|
||||
.replace('{node_napi_label}', `napi-v${napiVersion}`)
|
||||
.replace('{libc}', libc);
|
||||
@@ -143,7 +172,7 @@ export class NodePreGypBinary extends AbstractBinary {
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `${binaryConfig.distUrl}${versionPrefix}/${name}`,
|
||||
ignoreDownloadStatuses: [ 404, 403 ],
|
||||
ignoreDownloadStatuses: [404, 403],
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -165,10 +194,12 @@ export class NodePreGypBinary extends AbstractBinary {
|
||||
const archs = nodeArchs[platform];
|
||||
for (const arch of archs) {
|
||||
for (const napiVersion of napiVersions) {
|
||||
const binaryFileName = binaryFile.replace('{platform}', platform)
|
||||
const binaryFileName = binaryFile
|
||||
.replace('{platform}', platform)
|
||||
.replace('{arch}', arch)
|
||||
.replace('{node_napi_label}', napiVersion);
|
||||
remotePath = remotePath.replace('{module_name}', moduleName)
|
||||
remotePath = remotePath
|
||||
.replace('{module_name}', moduleName)
|
||||
.replace('{name}', binaryName)
|
||||
.replace('{version}', version)
|
||||
.replace('{configuration}', 'Release');
|
||||
@@ -180,12 +211,15 @@ export class NodePreGypBinary extends AbstractBinary {
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: remoteUrl,
|
||||
ignoreDownloadStatuses: [ 404 ],
|
||||
ignoreDownloadStatuses: [404],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (binaryFile.includes('{platform}') && binaryFile.includes('{arch}')) {
|
||||
} else if (
|
||||
binaryFile.includes('{platform}') &&
|
||||
binaryFile.includes('{arch}')
|
||||
) {
|
||||
// https://github.com/grpc/grpc-node/blob/master/packages/grpc-tools/package.json#L29
|
||||
// "binary": {
|
||||
// "module_name": "grpc_tools",
|
||||
@@ -205,9 +239,11 @@ export class NodePreGypBinary extends AbstractBinary {
|
||||
for (const platform of nodePlatforms) {
|
||||
const archs = nodeArchs[platform];
|
||||
for (const arch of archs) {
|
||||
const binaryFileName = binaryFile.replace('{platform}', platform)
|
||||
const binaryFileName = binaryFile
|
||||
.replace('{platform}', platform)
|
||||
.replace('{arch}', arch);
|
||||
remotePath = remotePath.replace('{module_name}', moduleName)
|
||||
remotePath = remotePath
|
||||
.replace('{module_name}', moduleName)
|
||||
.replace('{name}', binaryName)
|
||||
.replace('{version}', version)
|
||||
.replace('{configuration}', 'Release');
|
||||
@@ -219,7 +255,7 @@ export class NodePreGypBinary extends AbstractBinary {
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: remoteUrl,
|
||||
ignoreDownloadStatuses: [ 404 ],
|
||||
ignoreDownloadStatuses: [404],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,13 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import binaries from '../../../../config/binaries';
|
||||
import { FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
import { BucketBinary } from './BucketBinary';
|
||||
import { SingletonProto } from 'egg';
|
||||
|
||||
import binaries from '../../../../config/binaries.ts';
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import {
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
import { BucketBinary } from './BucketBinary.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Nwjs)
|
||||
@@ -13,8 +18,10 @@ export class NwjsBinary extends BucketBinary {
|
||||
const binaryConfig = binaries.nwjs;
|
||||
const isRootDir = dir === '/';
|
||||
// /foo/ => foo/
|
||||
const subDir = dir.substring(1);
|
||||
const url = isRootDir ? binaryConfig.distUrl : `${this.s3Url}${encodeURIComponent(subDir)}`;
|
||||
const subDir = dir.slice(1);
|
||||
const url = isRootDir
|
||||
? binaryConfig.distUrl
|
||||
: `${this.s3Url}${encodeURIComponent(subDir)}`;
|
||||
const xml = await this.requestXml(url);
|
||||
if (!xml) return;
|
||||
|
||||
@@ -25,7 +32,8 @@ export class NwjsBinary extends BucketBinary {
|
||||
// <tr><td valign="top"><img src="/icons/folder.gif" alt="[DIR]"></td><td><a href="v0.15.0-rc1/">v0.15.0-rc1/</a></td><td align="right">06-May-2016 12:24 </td><td align="right"> - </td><td> </td></tr>
|
||||
// <tr><td valign="top"><img src="/icons/folder.gif" alt="[DIR]"></td><td><a href="v0.15.0-rc2/">v0.15.0-rc2/</a></td><td align="right">13-May-2016 20:13 </td><td align="right"> - </td><td> </td></tr>
|
||||
const items: BinaryItem[] = [];
|
||||
const re = /<td><a [^>]+?>([^<]+?\/)<\/a><\/td><td [^>]+?>([^>]+?)<\/td>/ig;
|
||||
const re =
|
||||
/<td><a [^>]+?>([^<]+?\/)<\/a><\/td><td [^>]+?>([^>]+?)<\/td>/gi;
|
||||
const matchs = xml.matchAll(re);
|
||||
for (const m of matchs) {
|
||||
const name = m[1].trim();
|
||||
|
||||
@@ -1,9 +1,15 @@
|
||||
import util from 'node:util';
|
||||
import path from 'node:path';
|
||||
|
||||
import { AbstractBinary, BinaryAdapter, BinaryItem, FetchResult } from './AbstractBinary';
|
||||
import util from 'util';
|
||||
import path from 'path';
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import { SingletonProto } from 'egg';
|
||||
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import {
|
||||
AbstractBinary,
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
|
||||
const PACKAGE_URL = 'https://registry.npmjs.com/playwright-core';
|
||||
const DOWNLOAD_HOST = 'https://playwright.azureedge.net/';
|
||||
@@ -11,183 +17,340 @@ const DOWNLOAD_HOST = 'https://playwright.azureedge.net/';
|
||||
// https://github.com/microsoft/playwright/blob/main/packages/playwright-core/src/server/registry/index.ts
|
||||
/* eslint-disable quote-props */
|
||||
const DOWNLOAD_PATHS = {
|
||||
'chromium': {
|
||||
chromium: {
|
||||
'<unknown>': undefined,
|
||||
'generic-linux': 'builds/chromium/%s/chromium-linux.zip',
|
||||
'generic-linux-arm64': 'builds/chromium/%s/chromium-linux-arm64.zip',
|
||||
'ubuntu18.04': 'builds/chromium/%s/chromium-linux.zip',
|
||||
'ubuntu20.04': 'builds/chromium/%s/chromium-linux.zip',
|
||||
'ubuntu22.04': 'builds/chromium/%s/chromium-linux.zip',
|
||||
'ubuntu18.04-arm64': 'builds/chromium/%s/chromium-linux-arm64.zip',
|
||||
'ubuntu18.04-x64': undefined,
|
||||
'ubuntu20.04-x64': 'builds/chromium/%s/chromium-linux.zip',
|
||||
'ubuntu22.04-x64': 'builds/chromium/%s/chromium-linux.zip',
|
||||
'ubuntu24.04-x64': 'builds/chromium/%s/chromium-linux.zip',
|
||||
'ubuntu18.04-arm64': undefined,
|
||||
'ubuntu20.04-arm64': 'builds/chromium/%s/chromium-linux-arm64.zip',
|
||||
'ubuntu22.04-arm64': 'builds/chromium/%s/chromium-linux-arm64.zip',
|
||||
'debian11': 'builds/chromium/%s/chromium-linux.zip',
|
||||
'ubuntu24.04-arm64': 'builds/chromium/%s/chromium-linux-arm64.zip',
|
||||
'debian11-x64': 'builds/chromium/%s/chromium-linux.zip',
|
||||
'debian11-arm64': 'builds/chromium/%s/chromium-linux-arm64.zip',
|
||||
'debian12': 'builds/chromium/%s/chromium-linux.zip',
|
||||
'debian12-x64': 'builds/chromium/%s/chromium-linux.zip',
|
||||
'debian12-arm64': 'builds/chromium/%s/chromium-linux-arm64.zip',
|
||||
'mac10.13': 'builds/chromium/%s/chromium-mac.zip',
|
||||
'mac10.14': 'builds/chromium/%s/chromium-mac.zip',
|
||||
'mac10.15': 'builds/chromium/%s/chromium-mac.zip',
|
||||
'mac11': 'builds/chromium/%s/chromium-mac.zip',
|
||||
mac11: 'builds/chromium/%s/chromium-mac.zip',
|
||||
'mac11-arm64': 'builds/chromium/%s/chromium-mac-arm64.zip',
|
||||
'mac12': 'builds/chromium/%s/chromium-mac.zip',
|
||||
mac12: 'builds/chromium/%s/chromium-mac.zip',
|
||||
'mac12-arm64': 'builds/chromium/%s/chromium-mac-arm64.zip',
|
||||
'mac13': 'builds/chromium/%s/chromium-mac.zip',
|
||||
mac13: 'builds/chromium/%s/chromium-mac.zip',
|
||||
'mac13-arm64': 'builds/chromium/%s/chromium-mac-arm64.zip',
|
||||
'win64': 'builds/chromium/%s/chromium-win64.zip',
|
||||
mac14: 'builds/chromium/%s/chromium-mac.zip',
|
||||
'mac14-arm64': 'builds/chromium/%s/chromium-mac-arm64.zip',
|
||||
mac15: 'builds/chromium/%s/chromium-mac.zip',
|
||||
'mac15-arm64': 'builds/chromium/%s/chromium-mac-arm64.zip',
|
||||
win64: 'builds/chromium/%s/chromium-win64.zip',
|
||||
},
|
||||
'chromium-headless-shell': {
|
||||
'<unknown>': undefined,
|
||||
'ubuntu18.04-x64': undefined,
|
||||
'ubuntu20.04-x64': 'builds/chromium/%s/chromium-headless-shell-linux.zip',
|
||||
'ubuntu22.04-x64': 'builds/chromium/%s/chromium-headless-shell-linux.zip',
|
||||
'ubuntu24.04-x64': 'builds/chromium/%s/chromium-headless-shell-linux.zip',
|
||||
'ubuntu18.04-arm64': undefined,
|
||||
'ubuntu20.04-arm64':
|
||||
'builds/chromium/%s/chromium-headless-shell-linux-arm64.zip',
|
||||
'ubuntu22.04-arm64':
|
||||
'builds/chromium/%s/chromium-headless-shell-linux-arm64.zip',
|
||||
'ubuntu24.04-arm64':
|
||||
'builds/chromium/%s/chromium-headless-shell-linux-arm64.zip',
|
||||
'debian11-x64': 'builds/chromium/%s/chromium-headless-shell-linux.zip',
|
||||
'debian11-arm64':
|
||||
'builds/chromium/%s/chromium-headless-shell-linux-arm64.zip',
|
||||
'debian12-x64': 'builds/chromium/%s/chromium-headless-shell-linux.zip',
|
||||
'debian12-arm64':
|
||||
'builds/chromium/%s/chromium-headless-shell-linux-arm64.zip',
|
||||
'mac10.13': undefined,
|
||||
'mac10.14': undefined,
|
||||
'mac10.15': undefined,
|
||||
mac11: 'builds/chromium/%s/chromium-headless-shell-mac.zip',
|
||||
'mac11-arm64': 'builds/chromium/%s/chromium-headless-shell-mac-arm64.zip',
|
||||
mac12: 'builds/chromium/%s/chromium-headless-shell-mac.zip',
|
||||
'mac12-arm64': 'builds/chromium/%s/chromium-headless-shell-mac-arm64.zip',
|
||||
mac13: 'builds/chromium/%s/chromium-headless-shell-mac.zip',
|
||||
'mac13-arm64': 'builds/chromium/%s/chromium-headless-shell-mac-arm64.zip',
|
||||
mac14: 'builds/chromium/%s/chromium-headless-shell-mac.zip',
|
||||
'mac14-arm64': 'builds/chromium/%s/chromium-headless-shell-mac-arm64.zip',
|
||||
mac15: 'builds/chromium/%s/chromium-headless-shell-mac.zip',
|
||||
'mac15-arm64': 'builds/chromium/%s/chromium-headless-shell-mac-arm64.zip',
|
||||
win64: 'builds/chromium/%s/chromium-headless-shell-win64.zip',
|
||||
},
|
||||
'chromium-tip-of-tree': {
|
||||
'<unknown>': undefined,
|
||||
'generic-linux': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
|
||||
'generic-linux-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
|
||||
'ubuntu18.04': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
|
||||
'ubuntu20.04': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
|
||||
'ubuntu22.04': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
|
||||
'ubuntu18.04-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
|
||||
'ubuntu20.04-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
|
||||
'ubuntu22.04-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
|
||||
'debian11': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
|
||||
'debian11-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
|
||||
'debian12': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
|
||||
'debian12-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
|
||||
'ubuntu18.04-x64': undefined,
|
||||
'ubuntu20.04-x64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
|
||||
'ubuntu22.04-x64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
|
||||
'ubuntu24.04-x64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
|
||||
'ubuntu18.04-arm64': undefined,
|
||||
'ubuntu20.04-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
|
||||
'ubuntu22.04-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
|
||||
'ubuntu24.04-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
|
||||
'debian11-x64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
|
||||
'debian11-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
|
||||
'debian12-x64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
|
||||
'debian12-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
|
||||
'mac10.13': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
|
||||
'mac10.14': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
|
||||
'mac10.15': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
|
||||
'mac11': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
|
||||
'mac11-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac-arm64.zip',
|
||||
'mac12': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
|
||||
'mac12-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac-arm64.zip',
|
||||
'mac13': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
|
||||
'mac13-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac-arm64.zip',
|
||||
'win64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-win64.zip',
|
||||
mac11: 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
|
||||
'mac11-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac-arm64.zip',
|
||||
mac12: 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
|
||||
'mac12-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac-arm64.zip',
|
||||
mac13: 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
|
||||
'mac13-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac-arm64.zip',
|
||||
mac14: 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
|
||||
'mac14-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac-arm64.zip',
|
||||
mac15: 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
|
||||
'mac15-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac-arm64.zip',
|
||||
win64: 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-win64.zip',
|
||||
},
|
||||
'chromium-with-symbols': {
|
||||
'chromium-tip-of-tree-headless-shell': {
|
||||
'<unknown>': undefined,
|
||||
'generic-linux': 'builds/chromium/%s/chromium-with-symbols-linux.zip',
|
||||
'generic-linux-arm64': 'builds/chromium/%s/chromium-with-symbols-linux-arm64.zip',
|
||||
'ubuntu18.04': 'builds/chromium/%s/chromium-with-symbols-linux.zip',
|
||||
'ubuntu20.04': 'builds/chromium/%s/chromium-with-symbols-linux.zip',
|
||||
'ubuntu22.04': 'builds/chromium/%s/chromium-with-symbols-linux.zip',
|
||||
'ubuntu18.04-arm64': 'builds/chromium/%s/chromium-with-symbols-linux-arm64.zip',
|
||||
'ubuntu20.04-arm64': 'builds/chromium/%s/chromium-with-symbols-linux-arm64.zip',
|
||||
'ubuntu22.04-arm64': 'builds/chromium/%s/chromium-with-symbols-linux-arm64.zip',
|
||||
'debian11': 'builds/chromium/%s/chromium-with-symbols-linux.zip',
|
||||
'debian11-arm64': 'builds/chromium/%s/chromium-with-symbols-linux-arm64.zip',
|
||||
'debian12': 'builds/chromium/%s/chromium-with-symbols-linux.zip',
|
||||
'debian12-arm64': 'builds/chromium/%s/chromium-with-symbols-linux-arm64.zip',
|
||||
'mac10.13': 'builds/chromium/%s/chromium-with-symbols-mac.zip',
|
||||
'mac10.14': 'builds/chromium/%s/chromium-with-symbols-mac.zip',
|
||||
'mac10.15': 'builds/chromium/%s/chromium-with-symbols-mac.zip',
|
||||
'mac11': 'builds/chromium/%s/chromium-with-symbols-mac.zip',
|
||||
'mac11-arm64': 'builds/chromium/%s/chromium-with-symbols-mac-arm64.zip',
|
||||
'mac12': 'builds/chromium/%s/chromium-with-symbols-mac.zip',
|
||||
'mac12-arm64': 'builds/chromium/%s/chromium-with-symbols-mac-arm64.zip',
|
||||
'mac13': 'builds/chromium/%s/chromium-with-symbols-mac.zip',
|
||||
'mac13-arm64': 'builds/chromium/%s/chromium-with-symbols-mac-arm64.zip',
|
||||
'win64': 'builds/chromium/%s/chromium-with-symbols-win64.zip',
|
||||
'ubuntu18.04-x64': undefined,
|
||||
'ubuntu20.04-x64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux.zip',
|
||||
'ubuntu22.04-x64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux.zip',
|
||||
'ubuntu24.04-x64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux.zip',
|
||||
'ubuntu18.04-arm64': undefined,
|
||||
'ubuntu20.04-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux-arm64.zip',
|
||||
'ubuntu22.04-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux-arm64.zip',
|
||||
'ubuntu24.04-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux-arm64.zip',
|
||||
'debian11-x64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux.zip',
|
||||
'debian11-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux-arm64.zip',
|
||||
'debian12-x64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux.zip',
|
||||
'debian12-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux-arm64.zip',
|
||||
'mac10.13': undefined,
|
||||
'mac10.14': undefined,
|
||||
'mac10.15': undefined,
|
||||
mac11:
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac.zip',
|
||||
'mac11-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac-arm64.zip',
|
||||
mac12:
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac.zip',
|
||||
'mac12-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac-arm64.zip',
|
||||
mac13:
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac.zip',
|
||||
'mac13-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac-arm64.zip',
|
||||
mac14:
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac.zip',
|
||||
'mac14-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac-arm64.zip',
|
||||
mac15:
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac.zip',
|
||||
'mac15-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac-arm64.zip',
|
||||
win64:
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-win64.zip',
|
||||
},
|
||||
'firefox': {
|
||||
firefox: {
|
||||
'<unknown>': undefined,
|
||||
'generic-linux': 'builds/firefox/%s/firefox-ubuntu-20.04.zip',
|
||||
'generic-linux-arm64': 'builds/firefox/%s/firefox-ubuntu-20.04-arm64.zip',
|
||||
'ubuntu18.04': 'builds/firefox/%s/firefox-ubuntu-18.04.zip',
|
||||
'ubuntu20.04': 'builds/firefox/%s/firefox-ubuntu-20.04.zip',
|
||||
'ubuntu22.04': 'builds/firefox/%s/firefox-ubuntu-22.04.zip',
|
||||
'ubuntu18.04-x64': undefined,
|
||||
'ubuntu20.04-x64': 'builds/firefox/%s/firefox-ubuntu-20.04.zip',
|
||||
'ubuntu22.04-x64': 'builds/firefox/%s/firefox-ubuntu-22.04.zip',
|
||||
'ubuntu24.04-x64': 'builds/firefox/%s/firefox-ubuntu-24.04.zip',
|
||||
'ubuntu18.04-arm64': undefined,
|
||||
'ubuntu20.04-arm64': 'builds/firefox/%s/firefox-ubuntu-20.04-arm64.zip',
|
||||
'ubuntu22.04-arm64': 'builds/firefox/%s/firefox-ubuntu-22.04-arm64.zip',
|
||||
'debian11': 'builds/firefox/%s/firefox-debian-11.zip',
|
||||
'ubuntu24.04-arm64': 'builds/firefox/%s/firefox-ubuntu-24.04-arm64.zip',
|
||||
'debian11-x64': 'builds/firefox/%s/firefox-debian-11.zip',
|
||||
'debian11-arm64': 'builds/firefox/%s/firefox-debian-11-arm64.zip',
|
||||
'debian12': undefined,
|
||||
'debian12-arm64': undefined,
|
||||
'mac10.13': 'builds/firefox/%s/firefox-mac-13.zip',
|
||||
'mac10.14': 'builds/firefox/%s/firefox-mac-13.zip',
|
||||
'mac10.15': 'builds/firefox/%s/firefox-mac-13.zip',
|
||||
'mac11': 'builds/firefox/%s/firefox-mac-13.zip',
|
||||
'mac11-arm64': 'builds/firefox/%s/firefox-mac-13-arm64.zip',
|
||||
'mac12': 'builds/firefox/%s/firefox-mac-13.zip',
|
||||
'mac12-arm64': 'builds/firefox/%s/firefox-mac-13-arm64.zip',
|
||||
'mac13': 'builds/firefox/%s/firefox-mac-13.zip',
|
||||
'mac13-arm64': 'builds/firefox/%s/firefox-mac-13-arm64.zip',
|
||||
'win64': 'builds/firefox/%s/firefox-win64.zip',
|
||||
'debian12-x64': 'builds/firefox/%s/firefox-debian-12.zip',
|
||||
'debian12-arm64': 'builds/firefox/%s/firefox-debian-12-arm64.zip',
|
||||
'mac10.13': 'builds/firefox/%s/firefox-mac.zip',
|
||||
'mac10.14': 'builds/firefox/%s/firefox-mac.zip',
|
||||
'mac10.15': 'builds/firefox/%s/firefox-mac.zip',
|
||||
mac11: 'builds/firefox/%s/firefox-mac.zip',
|
||||
'mac11-arm64': 'builds/firefox/%s/firefox-mac-arm64.zip',
|
||||
mac12: 'builds/firefox/%s/firefox-mac.zip',
|
||||
'mac12-arm64': 'builds/firefox/%s/firefox-mac-arm64.zip',
|
||||
mac13: 'builds/firefox/%s/firefox-mac.zip',
|
||||
'mac13-arm64': 'builds/firefox/%s/firefox-mac-arm64.zip',
|
||||
mac14: 'builds/firefox/%s/firefox-mac.zip',
|
||||
'mac14-arm64': 'builds/firefox/%s/firefox-mac-arm64.zip',
|
||||
mac15: 'builds/firefox/%s/firefox-mac.zip',
|
||||
'mac15-arm64': 'builds/firefox/%s/firefox-mac-arm64.zip',
|
||||
win64: 'builds/firefox/%s/firefox-win64.zip',
|
||||
},
|
||||
'firefox-beta': {
|
||||
'<unknown>': undefined,
|
||||
'generic-linux': 'builds/firefox-beta/%s/firefox-beta-ubuntu-20.04.zip',
|
||||
'generic-linux-arm64': undefined,
|
||||
'ubuntu18.04': 'builds/firefox-beta/%s/firefox-beta-ubuntu-18.04.zip',
|
||||
'ubuntu20.04': 'builds/firefox-beta/%s/firefox-beta-ubuntu-20.04.zip',
|
||||
'ubuntu22.04': 'builds/firefox-beta/%s/firefox-beta-ubuntu-22.04.zip',
|
||||
'ubuntu18.04-x64': undefined,
|
||||
'ubuntu20.04-x64': 'builds/firefox-beta/%s/firefox-beta-ubuntu-20.04.zip',
|
||||
'ubuntu22.04-x64': 'builds/firefox-beta/%s/firefox-beta-ubuntu-22.04.zip',
|
||||
'ubuntu24.04-x64': 'builds/firefox-beta/%s/firefox-beta-ubuntu-24.04.zip',
|
||||
'ubuntu18.04-arm64': undefined,
|
||||
'ubuntu20.04-arm64': undefined,
|
||||
'ubuntu22.04-arm64': 'builds/firefox-beta/%s/firefox-beta-ubuntu-22.04-arm64.zip',
|
||||
'debian11': 'builds/firefox-beta/%s/firefox-beta-debian-11.zip',
|
||||
'ubuntu22.04-arm64':
|
||||
'builds/firefox-beta/%s/firefox-beta-ubuntu-22.04-arm64.zip',
|
||||
'ubuntu24.04-arm64':
|
||||
'builds/firefox-beta/%s/firefox-beta-ubuntu-24.04-arm64.zip',
|
||||
'debian11-x64': 'builds/firefox-beta/%s/firefox-beta-debian-11.zip',
|
||||
'debian11-arm64': 'builds/firefox-beta/%s/firefox-beta-debian-11-arm64.zip',
|
||||
'debian12': undefined,
|
||||
'debian12-arm64': undefined,
|
||||
'mac10.13': 'builds/firefox-beta/%s/firefox-beta-mac-13.zip',
|
||||
'mac10.14': 'builds/firefox-beta/%s/firefox-beta-mac-13.zip',
|
||||
'mac10.15': 'builds/firefox-beta/%s/firefox-beta-mac-13.zip',
|
||||
'mac11': 'builds/firefox-beta/%s/firefox-beta-mac-13.zip',
|
||||
'mac11-arm64': 'builds/firefox-beta/%s/firefox-beta-mac-13-arm64.zip',
|
||||
'mac12': 'builds/firefox-beta/%s/firefox-beta-mac-13.zip',
|
||||
'mac12-arm64': 'builds/firefox-beta/%s/firefox-beta-mac-13-arm64.zip',
|
||||
'mac13': 'builds/firefox-beta/%s/firefox-beta-mac-13.zip',
|
||||
'mac13-arm64': 'builds/firefox-beta/%s/firefox-beta-mac-13-arm64.zip',
|
||||
'win64': 'builds/firefox-beta/%s/firefox-beta-win64.zip',
|
||||
'debian12-x64': 'builds/firefox-beta/%s/firefox-beta-debian-12.zip',
|
||||
'debian12-arm64': 'builds/firefox-beta/%s/firefox-beta-debian-12-arm64.zip',
|
||||
'mac10.13': 'builds/firefox-beta/%s/firefox-beta-mac.zip',
|
||||
'mac10.14': 'builds/firefox-beta/%s/firefox-beta-mac.zip',
|
||||
'mac10.15': 'builds/firefox-beta/%s/firefox-beta-mac.zip',
|
||||
mac11: 'builds/firefox-beta/%s/firefox-beta-mac.zip',
|
||||
'mac11-arm64': 'builds/firefox-beta/%s/firefox-beta-mac-arm64.zip',
|
||||
mac12: 'builds/firefox-beta/%s/firefox-beta-mac.zip',
|
||||
'mac12-arm64': 'builds/firefox-beta/%s/firefox-beta-mac-arm64.zip',
|
||||
mac13: 'builds/firefox-beta/%s/firefox-beta-mac.zip',
|
||||
'mac13-arm64': 'builds/firefox-beta/%s/firefox-beta-mac-arm64.zip',
|
||||
mac14: 'builds/firefox-beta/%s/firefox-beta-mac.zip',
|
||||
'mac14-arm64': 'builds/firefox-beta/%s/firefox-beta-mac-arm64.zip',
|
||||
mac15: 'builds/firefox-beta/%s/firefox-beta-mac.zip',
|
||||
'mac15-arm64': 'builds/firefox-beta/%s/firefox-beta-mac-arm64.zip',
|
||||
win64: 'builds/firefox-beta/%s/firefox-beta-win64.zip',
|
||||
},
|
||||
'webkit': {
|
||||
webkit: {
|
||||
'<unknown>': undefined,
|
||||
'generic-linux': 'builds/webkit/%s/webkit-ubuntu-20.04.zip',
|
||||
'generic-linux-arm64': 'builds/webkit/%s/webkit-ubuntu-20.04-arm64.zip',
|
||||
'ubuntu18.04': 'builds/deprecated-webkit-ubuntu-18.04/%s/deprecated-webkit-ubuntu-18.04.zip',
|
||||
'ubuntu20.04': 'builds/webkit/%s/webkit-ubuntu-20.04.zip',
|
||||
'ubuntu22.04': 'builds/webkit/%s/webkit-ubuntu-22.04.zip',
|
||||
'ubuntu18.04-x64': undefined,
|
||||
'ubuntu20.04-x64': 'builds/webkit/%s/webkit-ubuntu-20.04.zip',
|
||||
'ubuntu22.04-x64': 'builds/webkit/%s/webkit-ubuntu-22.04.zip',
|
||||
'ubuntu24.04-x64': 'builds/webkit/%s/webkit-ubuntu-24.04.zip',
|
||||
'ubuntu18.04-arm64': undefined,
|
||||
'ubuntu20.04-arm64': 'builds/webkit/%s/webkit-ubuntu-20.04-arm64.zip',
|
||||
'ubuntu22.04-arm64': 'builds/webkit/%s/webkit-ubuntu-22.04-arm64.zip',
|
||||
'debian11': 'builds/webkit/%s/webkit-debian-11.zip',
|
||||
'ubuntu24.04-arm64': 'builds/webkit/%s/webkit-ubuntu-24.04-arm64.zip',
|
||||
'debian11-x64': 'builds/webkit/%s/webkit-debian-11.zip',
|
||||
'debian11-arm64': 'builds/webkit/%s/webkit-debian-11-arm64.zip',
|
||||
'debian12': undefined,
|
||||
'debian12-arm64': undefined,
|
||||
'debian12-x64': 'builds/webkit/%s/webkit-debian-12.zip',
|
||||
'debian12-arm64': 'builds/webkit/%s/webkit-debian-12-arm64.zip',
|
||||
'mac10.13': undefined,
|
||||
'mac10.14': 'builds/deprecated-webkit-mac-10.14/%s/deprecated-webkit-mac-10.14.zip',
|
||||
'mac10.15': 'builds/deprecated-webkit-mac-10.15/%s/deprecated-webkit-mac-10.15.zip',
|
||||
'mac11': 'builds/webkit/%s/webkit-mac-11.zip',
|
||||
'mac10.14':
|
||||
'builds/deprecated-webkit-mac-10.14/%s/deprecated-webkit-mac-10.14.zip',
|
||||
'mac10.15':
|
||||
'builds/deprecated-webkit-mac-10.15/%s/deprecated-webkit-mac-10.15.zip',
|
||||
mac11: 'builds/webkit/%s/webkit-mac-11.zip',
|
||||
'mac11-arm64': 'builds/webkit/%s/webkit-mac-11-arm64.zip',
|
||||
'mac12': 'builds/webkit/%s/webkit-mac-12.zip',
|
||||
mac12: 'builds/webkit/%s/webkit-mac-12.zip',
|
||||
'mac12-arm64': 'builds/webkit/%s/webkit-mac-12-arm64.zip',
|
||||
'mac13': 'builds/webkit/%s/webkit-mac-13.zip',
|
||||
mac13: 'builds/webkit/%s/webkit-mac-13.zip',
|
||||
'mac13-arm64': 'builds/webkit/%s/webkit-mac-13-arm64.zip',
|
||||
'win64': 'builds/webkit/%s/webkit-win64.zip',
|
||||
mac14: 'builds/webkit/%s/webkit-mac-14.zip',
|
||||
'mac14-arm64': 'builds/webkit/%s/webkit-mac-14-arm64.zip',
|
||||
mac15: 'builds/webkit/%s/webkit-mac-15.zip',
|
||||
'mac15-arm64': 'builds/webkit/%s/webkit-mac-15-arm64.zip',
|
||||
win64: 'builds/webkit/%s/webkit-win64.zip',
|
||||
},
|
||||
'ffmpeg': {
|
||||
ffmpeg: {
|
||||
'<unknown>': undefined,
|
||||
'generic-linux': 'builds/ffmpeg/%s/ffmpeg-linux.zip',
|
||||
'generic-linux-arm64': 'builds/ffmpeg/%s/ffmpeg-linux-arm64.zip',
|
||||
'ubuntu18.04': 'builds/ffmpeg/%s/ffmpeg-linux.zip',
|
||||
'ubuntu20.04': 'builds/ffmpeg/%s/ffmpeg-linux.zip',
|
||||
'ubuntu22.04': 'builds/ffmpeg/%s/ffmpeg-linux.zip',
|
||||
'ubuntu18.04-arm64': 'builds/ffmpeg/%s/ffmpeg-linux-arm64.zip',
|
||||
'ubuntu18.04-x64': undefined,
|
||||
'ubuntu20.04-x64': 'builds/ffmpeg/%s/ffmpeg-linux.zip',
|
||||
'ubuntu22.04-x64': 'builds/ffmpeg/%s/ffmpeg-linux.zip',
|
||||
'ubuntu24.04-x64': 'builds/ffmpeg/%s/ffmpeg-linux.zip',
|
||||
'ubuntu18.04-arm64': undefined,
|
||||
'ubuntu20.04-arm64': 'builds/ffmpeg/%s/ffmpeg-linux-arm64.zip',
|
||||
'ubuntu22.04-arm64': 'builds/ffmpeg/%s/ffmpeg-linux-arm64.zip',
|
||||
'debian11': 'builds/ffmpeg/%s/ffmpeg-linux.zip',
|
||||
'ubuntu24.04-arm64': 'builds/ffmpeg/%s/ffmpeg-linux-arm64.zip',
|
||||
'debian11-x64': 'builds/ffmpeg/%s/ffmpeg-linux.zip',
|
||||
'debian11-arm64': 'builds/ffmpeg/%s/ffmpeg-linux-arm64.zip',
|
||||
'debian12': 'builds/ffmpeg/%s/ffmpeg-linux.zip',
|
||||
'debian12-x64': 'builds/ffmpeg/%s/ffmpeg-linux.zip',
|
||||
'debian12-arm64': 'builds/ffmpeg/%s/ffmpeg-linux-arm64.zip',
|
||||
'mac10.13': 'builds/ffmpeg/%s/ffmpeg-mac.zip',
|
||||
'mac10.14': 'builds/ffmpeg/%s/ffmpeg-mac.zip',
|
||||
'mac10.15': 'builds/ffmpeg/%s/ffmpeg-mac.zip',
|
||||
'mac11': 'builds/ffmpeg/%s/ffmpeg-mac.zip',
|
||||
mac11: 'builds/ffmpeg/%s/ffmpeg-mac.zip',
|
||||
'mac11-arm64': 'builds/ffmpeg/%s/ffmpeg-mac-arm64.zip',
|
||||
'mac12': 'builds/ffmpeg/%s/ffmpeg-mac.zip',
|
||||
mac12: 'builds/ffmpeg/%s/ffmpeg-mac.zip',
|
||||
'mac12-arm64': 'builds/ffmpeg/%s/ffmpeg-mac-arm64.zip',
|
||||
'mac13': 'builds/ffmpeg/%s/ffmpeg-mac.zip',
|
||||
mac13: 'builds/ffmpeg/%s/ffmpeg-mac.zip',
|
||||
'mac13-arm64': 'builds/ffmpeg/%s/ffmpeg-mac-arm64.zip',
|
||||
'win64': 'builds/ffmpeg/%s/ffmpeg-win64.zip',
|
||||
mac14: 'builds/ffmpeg/%s/ffmpeg-mac.zip',
|
||||
'mac14-arm64': 'builds/ffmpeg/%s/ffmpeg-mac-arm64.zip',
|
||||
mac15: 'builds/ffmpeg/%s/ffmpeg-mac.zip',
|
||||
'mac15-arm64': 'builds/ffmpeg/%s/ffmpeg-mac-arm64.zip',
|
||||
win64: 'builds/ffmpeg/%s/ffmpeg-win64.zip',
|
||||
},
|
||||
'android': {
|
||||
winldd: {
|
||||
'<unknown>': undefined,
|
||||
'ubuntu18.04-x64': undefined,
|
||||
'ubuntu20.04-x64': undefined,
|
||||
'ubuntu22.04-x64': undefined,
|
||||
'ubuntu24.04-x64': undefined,
|
||||
'ubuntu18.04-arm64': undefined,
|
||||
'ubuntu20.04-arm64': undefined,
|
||||
'ubuntu22.04-arm64': undefined,
|
||||
'ubuntu24.04-arm64': undefined,
|
||||
'debian11-x64': undefined,
|
||||
'debian11-arm64': undefined,
|
||||
'debian12-x64': undefined,
|
||||
'debian12-arm64': undefined,
|
||||
'mac10.13': undefined,
|
||||
'mac10.14': undefined,
|
||||
'mac10.15': undefined,
|
||||
mac11: undefined,
|
||||
'mac11-arm64': undefined,
|
||||
mac12: undefined,
|
||||
'mac12-arm64': undefined,
|
||||
mac13: undefined,
|
||||
'mac13-arm64': undefined,
|
||||
mac14: undefined,
|
||||
'mac14-arm64': undefined,
|
||||
mac15: undefined,
|
||||
'mac15-arm64': undefined,
|
||||
win64: 'builds/winldd/%s/winldd-win64.zip',
|
||||
},
|
||||
android: {
|
||||
'<unknown>': 'builds/android/%s/android.zip',
|
||||
'ubuntu18.04-x64': undefined,
|
||||
'ubuntu20.04-x64': 'builds/android/%s/android.zip',
|
||||
'ubuntu22.04-x64': 'builds/android/%s/android.zip',
|
||||
'ubuntu24.04-x64': 'builds/android/%s/android.zip',
|
||||
'ubuntu18.04-arm64': undefined,
|
||||
'ubuntu20.04-arm64': 'builds/android/%s/android.zip',
|
||||
'ubuntu22.04-arm64': 'builds/android/%s/android.zip',
|
||||
'ubuntu24.04-arm64': 'builds/android/%s/android.zip',
|
||||
'debian11-x64': 'builds/android/%s/android.zip',
|
||||
'debian11-arm64': 'builds/android/%s/android.zip',
|
||||
'debian12-x64': 'builds/android/%s/android.zip',
|
||||
'debian12-arm64': 'builds/android/%s/android.zip',
|
||||
'mac10.13': 'builds/android/%s/android.zip',
|
||||
'mac10.14': 'builds/android/%s/android.zip',
|
||||
'mac10.15': 'builds/android/%s/android.zip',
|
||||
mac11: 'builds/android/%s/android.zip',
|
||||
'mac11-arm64': 'builds/android/%s/android.zip',
|
||||
mac12: 'builds/android/%s/android.zip',
|
||||
'mac12-arm64': 'builds/android/%s/android.zip',
|
||||
mac13: 'builds/android/%s/android.zip',
|
||||
'mac13-arm64': 'builds/android/%s/android.zip',
|
||||
mac14: 'builds/android/%s/android.zip',
|
||||
'mac14-arm64': 'builds/android/%s/android.zip',
|
||||
mac15: 'builds/android/%s/android.zip',
|
||||
'mac15-arm64': 'builds/android/%s/android.zip',
|
||||
win64: 'builds/android/%s/android.zip',
|
||||
},
|
||||
} as const;
|
||||
|
||||
@@ -203,23 +366,59 @@ export class PlaywrightBinary extends AbstractBinary {
|
||||
if (!this.dirItems) {
|
||||
const packageData = await this.requestJSON(PACKAGE_URL);
|
||||
const nowDateISO = new Date().toISOString();
|
||||
const buildDirs: BinaryItem[] = [];
|
||||
for (const browserName of Object.keys(DOWNLOAD_PATHS)) {
|
||||
if (
|
||||
browserName === 'chromium-headless-shell' ||
|
||||
browserName === 'chromium-tip-of-tree-headless-shell'
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
buildDirs.push({
|
||||
name: `${browserName}/`,
|
||||
isDir: true,
|
||||
url: '',
|
||||
size: '-',
|
||||
date: nowDateISO,
|
||||
});
|
||||
}
|
||||
this.dirItems = {
|
||||
'/': [{ name: 'builds/', isDir: true, url: '', size: '-', date: nowDateISO }],
|
||||
'/builds/': Object.keys(DOWNLOAD_PATHS).map(
|
||||
dist => ({ name: `${dist}/`, isDir: true, url: '', size: '-', date: nowDateISO })),
|
||||
...Object.fromEntries(Object.keys(DOWNLOAD_PATHS).map(dist => [ `/builds/${dist}/`, []])),
|
||||
'/': [
|
||||
{
|
||||
name: 'builds/',
|
||||
isDir: true,
|
||||
url: '',
|
||||
size: '-',
|
||||
date: nowDateISO,
|
||||
},
|
||||
],
|
||||
'/builds/': buildDirs,
|
||||
};
|
||||
for (const browserName of Object.keys(DOWNLOAD_PATHS)) {
|
||||
if (
|
||||
browserName === 'chromium-headless-shell' ||
|
||||
browserName === 'chromium-tip-of-tree-headless-shell'
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
this.dirItems[`/builds/${browserName}/`] = [];
|
||||
}
|
||||
|
||||
// Only download beta and release versions of packages to reduce amount of request
|
||||
const packageVersions = Object.keys(packageData.versions)
|
||||
.filter(version => version.match(/^(?:\d+\.\d+\.\d+)(?:-beta-\d+)?$/))
|
||||
// select recently update 20 items
|
||||
.slice(-20);
|
||||
const browsers: { name: keyof typeof DOWNLOAD_PATHS; revision: string; browserVersion: string; revisionOverrides?: Record<string, string> }[] = [];
|
||||
const browsers: {
|
||||
name: keyof typeof DOWNLOAD_PATHS;
|
||||
revision: string;
|
||||
browserVersion: string;
|
||||
revisionOverrides?: Record<string, string>;
|
||||
}[] = [];
|
||||
await Promise.all(
|
||||
packageVersions.map(version =>
|
||||
this.requestJSON(
|
||||
`https://unpkg.com/playwright-core@${version}/browsers.json`,
|
||||
`https://unpkg.com/playwright-core@${version}/browsers.json`
|
||||
)
|
||||
.then(data => {
|
||||
// browsers: [
|
||||
@@ -232,27 +431,76 @@ export class PlaywrightBinary extends AbstractBinary {
|
||||
// },
|
||||
// ]
|
||||
browsers.push(...data.browsers);
|
||||
return data;
|
||||
})
|
||||
.catch(err => {
|
||||
/* c8 ignore next 2 */
|
||||
this.logger.warn('[PlaywrightBinary.fetch:error] Playwright version %s browser data request failed: %s',
|
||||
version, err);
|
||||
}),
|
||||
),
|
||||
this.logger.warn(
|
||||
'[PlaywrightBinary.fetch:error] Playwright version %s browser data request failed: %s',
|
||||
version,
|
||||
err
|
||||
);
|
||||
})
|
||||
)
|
||||
);
|
||||
// if chromium-headless-shell not exists on browsers, copy chromium to chromium-headless-shell
|
||||
if (
|
||||
!browsers.some(browser => browser.name === 'chromium-headless-shell')
|
||||
) {
|
||||
const chromium = browsers.find(browser => browser.name === 'chromium');
|
||||
// {
|
||||
// "name": "chromium",
|
||||
// "revision": "1155",
|
||||
// "installByDefault": true,
|
||||
// "browserVersion": "133.0.6943.16"
|
||||
// }
|
||||
if (chromium) {
|
||||
browsers.push({
|
||||
...chromium,
|
||||
name: 'chromium-headless-shell',
|
||||
});
|
||||
}
|
||||
}
|
||||
// if chromium-tip-of-tree-headless-shell not exists on browsers, copy chromium-tip-of-tree to chromium-tip-of-tree-headless-shell
|
||||
if (
|
||||
!browsers.some(
|
||||
browser => browser.name === 'chromium-tip-of-tree-headless-shell'
|
||||
)
|
||||
) {
|
||||
const chromiumTipOfTree = browsers.find(
|
||||
browser => browser.name === 'chromium-tip-of-tree'
|
||||
);
|
||||
if (chromiumTipOfTree) {
|
||||
browsers.push({
|
||||
...chromiumTipOfTree,
|
||||
name: 'chromium-tip-of-tree-headless-shell',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for (const browser of browsers) {
|
||||
const downloadPaths = DOWNLOAD_PATHS[browser.name];
|
||||
if (!downloadPaths) continue;
|
||||
for (const [ platform, remotePath ] of Object.entries(downloadPaths)) {
|
||||
let browserDirname = browser.name;
|
||||
if (browser.name === 'chromium-headless-shell') {
|
||||
// chromium-headless-shell should be under chromium
|
||||
// https://playwright.azureedge.net/builds/chromium/1155/chromium-headless-shell-mac-arm64.zip
|
||||
browserDirname = 'chromium';
|
||||
} else if (browser.name === 'chromium-tip-of-tree-headless-shell') {
|
||||
// chromium-tip-of-tree-headless-shell should be under chromium-tip-of-tree
|
||||
// https://playwright.azureedge.net/builds/chromium-tip-of-tree/1293/chromium-tip-of-tree-headless-shell-mac-arm64.zip
|
||||
browserDirname = 'chromium-tip-of-tree';
|
||||
}
|
||||
for (const [platform, remotePath] of Object.entries(downloadPaths)) {
|
||||
if (typeof remotePath !== 'string') continue;
|
||||
const revision = browser.revisionOverrides?.[platform] ?? browser.revision;
|
||||
const revision =
|
||||
browser.revisionOverrides?.[platform] ?? browser.revision;
|
||||
const itemDate = browser.browserVersion || revision;
|
||||
const url = DOWNLOAD_HOST + util.format(remotePath, revision);
|
||||
const name = path.basename(remotePath);
|
||||
const dir = `/builds/${browser.name}/${revision}/`;
|
||||
const dir = `/builds/${browserDirname}/${revision}/`;
|
||||
if (!this.dirItems[dir]) {
|
||||
this.dirItems[`/builds/${browser.name}/`].push({
|
||||
this.dirItems[`/builds/${browserDirname}/`].push({
|
||||
name: `${revision}/`,
|
||||
isDir: true,
|
||||
url: '',
|
||||
@@ -261,8 +509,14 @@ export class PlaywrightBinary extends AbstractBinary {
|
||||
});
|
||||
this.dirItems[dir] = [];
|
||||
}
|
||||
if (!this.dirItems[dir].find(item => item.name === name)) {
|
||||
this.dirItems[dir].push({ name, isDir: false, url, size: '-', date: itemDate });
|
||||
if (!this.dirItems[dir].some(item => item.name === name)) {
|
||||
this.dirItems[dir].push({
|
||||
name,
|
||||
isDir: false,
|
||||
url,
|
||||
size: '-',
|
||||
date: itemDate,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,15 @@
|
||||
import path from 'node:path';
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import binaries, { BinaryName } from '../../../../config/binaries';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
|
||||
import { SingletonProto } from 'egg';
|
||||
|
||||
import binaries, { type BinaryName } from '../../../../config/binaries.ts';
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import {
|
||||
AbstractBinary,
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Prisma)
|
||||
@@ -30,14 +37,19 @@ export class PrismaBinary extends AbstractBinary {
|
||||
const commitIdMap: Record<string, boolean> = {};
|
||||
// https://list-binaries.prisma-orm.workers.dev/?delimiter=/&prefix=all_commits/61023c35d2c8762f66f09bc4183d2f630b541d08/
|
||||
for (const version in data.versions) {
|
||||
const major = parseInt(version.split('.', 1)[0]);
|
||||
const major = Number.parseInt(version.split('.', 1)[0]);
|
||||
// need >= 3.0.0
|
||||
if (major < 3) continue;
|
||||
const date = data.time[version];
|
||||
const pkg = data.versions[version];
|
||||
// https://registry.npmjs.com/@prisma/engines/4.14.1
|
||||
const enginesVersion = pkg.devDependencies['@prisma/engines-version'] || '';
|
||||
// https://registry.npmjs.com/@prisma/engines/5.7.0 should read from dependencies
|
||||
const enginesVersion =
|
||||
pkg.devDependencies?.['@prisma/engines-version'] ||
|
||||
pkg.dependencies?.['@prisma/engines-version'] ||
|
||||
'';
|
||||
// "@prisma/engines-version": "4.14.0-67.d9a4c5988f480fa576d43970d5a23641aa77bc9c"
|
||||
// "@prisma/engines-version": "5.7.0-41.79fb5193cf0a8fdbef536e4b4a159cad677ab1b9"
|
||||
const matched = /\.(\w{30,})$/.exec(enginesVersion);
|
||||
if (!matched) continue;
|
||||
const commitId = matched[1];
|
||||
@@ -53,19 +65,23 @@ export class PrismaBinary extends AbstractBinary {
|
||||
}
|
||||
}
|
||||
|
||||
async fetch(dir: string, binaryName: BinaryName): Promise<FetchResult | undefined> {
|
||||
async fetch(
|
||||
dir: string,
|
||||
binaryName: BinaryName
|
||||
): Promise<FetchResult | undefined> {
|
||||
const existsItems = this.dirItems[dir];
|
||||
if (existsItems) {
|
||||
return { items: existsItems, nextParams: null };
|
||||
}
|
||||
// /foo/ => foo/
|
||||
const binaryConfig = binaries[binaryName];
|
||||
const subDir = dir.substring(1);
|
||||
const subDir = dir.slice(1);
|
||||
const url = `${binaryConfig.distUrl}?delimiter=/&prefix=${encodeURIComponent(subDir)}`;
|
||||
const result = await this.requestJSON(url);
|
||||
return { items: this.#parseItems(result), nextParams: null };
|
||||
}
|
||||
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
#parseItems(result: any): BinaryItem[] {
|
||||
const items: BinaryItem[] = [];
|
||||
// objects": [
|
||||
|
||||
@@ -1,6 +1,17 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
import { SingletonProto } from 'egg';
|
||||
import { XMLParser } from 'fast-xml-parser';
|
||||
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import {
|
||||
AbstractBinary,
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
|
||||
export const platforms = ['Linux_x64', 'Mac', 'Mac_Arm', 'Win', 'Win_x64'];
|
||||
|
||||
const MAX_DEPTH = 1;
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Puppeteer)
|
||||
@@ -13,72 +24,28 @@ export class PuppeteerBinary extends AbstractBinary {
|
||||
this.dirItems = undefined;
|
||||
}
|
||||
|
||||
async fetch(dir: string): Promise<FetchResult | undefined> {
|
||||
async fetch(
|
||||
dir: string,
|
||||
_binaryName: string,
|
||||
lastData?: Record<string, unknown>
|
||||
): Promise<FetchResult | undefined> {
|
||||
if (!this.dirItems) {
|
||||
const pkgUrl = 'https://registry.npmjs.com/puppeteer';
|
||||
const data = await this.requestJSON(pkgUrl);
|
||||
const s3Url = 'https://chromium-browser-snapshots.storage.googleapis.com';
|
||||
const chromiumRevisions = new Map<string, string>();
|
||||
this.dirItems = {};
|
||||
this.dirItems['/'] = [];
|
||||
const chromiumRevisions = new Map<string, string>();
|
||||
for (const version in data.versions) {
|
||||
// find chromium versions
|
||||
const pkg = data.versions[version];
|
||||
const revision = pkg.puppeteer?.chromium_revision ? String(pkg.puppeteer.chromium_revision) : '';
|
||||
if (revision && !chromiumRevisions.has(revision)) {
|
||||
chromiumRevisions.set(revision, data.time[version]);
|
||||
}
|
||||
}
|
||||
|
||||
// https://unpkg.com/puppeteer@5.1.0/lib/cjs/revisions.js
|
||||
// https://unpkg.com/puppeteer@latest/lib/cjs/puppeteer/revisions.js
|
||||
// exports.PUPPETEER_REVISIONS = {
|
||||
// chromium: '768783',
|
||||
// firefox: 'latest',
|
||||
// };
|
||||
const unpkgURL = 'https://unpkg.com/puppeteer-core@latest/lib/cjs/puppeteer/revisions.js';
|
||||
const text = await this.requestXml(unpkgURL);
|
||||
const m = /chromium:\s+\'(\d+)\'\,/.exec(text);
|
||||
if (m && !chromiumRevisions.has(m[1])) {
|
||||
chromiumRevisions.set(m[1], new Date().toISOString());
|
||||
}
|
||||
|
||||
// download LAST_CHANGE
|
||||
// https://github.com/chaopeng/chromium-downloader/blob/master/get-chromium#L28
|
||||
const LAST_CHANGE_URL = 'https://www.googleapis.com/download/storage/v1/b/chromium-browser-snapshots/o/Linux_x64%2FLAST_CHANGE?alt=media';
|
||||
const lastRevision = await this.requestXml(LAST_CHANGE_URL);
|
||||
if (lastRevision) {
|
||||
chromiumRevisions.set(lastRevision, new Date().toISOString());
|
||||
}
|
||||
|
||||
// old versions
|
||||
// v5.0.0
|
||||
chromiumRevisions.set('756035', data.time['5.0.0']);
|
||||
// v5.2.0
|
||||
chromiumRevisions.set('768783', data.time['5.2.0']);
|
||||
// v5.2.1
|
||||
chromiumRevisions.set('782078', data.time['5.2.1']);
|
||||
// v5.3.0
|
||||
chromiumRevisions.set('800071', data.time['5.3.0']);
|
||||
// v5.4.0
|
||||
chromiumRevisions.set('809590', data.time['5.4.0']);
|
||||
// v5.5.0
|
||||
chromiumRevisions.set('818858', data.time['5.5.0']);
|
||||
// v6.0.0
|
||||
chromiumRevisions.set('843427', data.time['6.0.0']);
|
||||
// "7.0.0"
|
||||
chromiumRevisions.set('848005', data.time['7.0.0']);
|
||||
// https://github.com/puppeteer/puppeteer/blob/v8.0.0/src/revisions.ts#L23
|
||||
// "8.0.0":"2021-02-26T08:36:50.107Z"
|
||||
chromiumRevisions.set('856583', data.time['8.0.0']);
|
||||
// "9.0.0":"2021-04-21T11:27:32.513Z"
|
||||
chromiumRevisions.set('869685', data.time['9.0.0']);
|
||||
// "10.0.0":"2021-05-31T12:42:27.486Z"
|
||||
chromiumRevisions.set('884014', data.time['10.0.0']);
|
||||
// "11.0.0":"2021-11-03T09:29:12.751Z"
|
||||
chromiumRevisions.set('901912', data.time['11.0.0']);
|
||||
|
||||
const platforms = [ 'Linux_x64', 'Mac', 'Mac_Arm', 'Win', 'Win_x64' ];
|
||||
for (const platform of platforms) {
|
||||
const revision = lastData?.[platform] as string;
|
||||
if (!revision) {
|
||||
// 丢弃库中历史不带 lastData 的任务,防止遍历任务过多
|
||||
this.logger.info(
|
||||
'drop puppeteer task if has no last data for platform %s, lastPlatform',
|
||||
platform,
|
||||
lastData
|
||||
);
|
||||
return;
|
||||
}
|
||||
let marker = revision ? `${platform}/${revision}/REVISIONS` : undefined;
|
||||
this.dirItems['/'].push({
|
||||
name: `${platform}/`,
|
||||
date: new Date().toISOString(),
|
||||
@@ -87,8 +54,35 @@ export class PuppeteerBinary extends AbstractBinary {
|
||||
url: '',
|
||||
});
|
||||
this.dirItems[`/${platform}/`] = [];
|
||||
let i = 0;
|
||||
do {
|
||||
let requestUrl = `${s3Url}?prefix=${platform}&max-keys=100`;
|
||||
if (marker) {
|
||||
requestUrl += `&marker=${marker}`;
|
||||
}
|
||||
const xml = await this.requestXml(requestUrl);
|
||||
const parser = new XMLParser();
|
||||
const obj = parser.parse(xml);
|
||||
if (
|
||||
obj.ListBucketResult.IsTruncated === true &&
|
||||
obj.ListBucketResult.NextMarker
|
||||
) {
|
||||
marker = obj.ListBucketResult.NextMarker;
|
||||
} else {
|
||||
marker = undefined;
|
||||
}
|
||||
for (const content of obj.ListBucketResult.Contents) {
|
||||
// /Linux_x64/1041455/REVISIONS
|
||||
if (content.Key.endsWith('/REVISIONS')) {
|
||||
const revision = content.Key.split('/')[1].trim();
|
||||
chromiumRevisions.set(revision, content.LastModified);
|
||||
}
|
||||
}
|
||||
// 最多遍历 100 次防止内存爆炸,下次同步任务会继续
|
||||
} while (i++ < MAX_DEPTH && marker !== undefined);
|
||||
}
|
||||
for (const [ revision, date ] of chromiumRevisions.entries()) {
|
||||
|
||||
for (const [revision, date] of chromiumRevisions.entries()) {
|
||||
// https://github.com/puppeteer/puppeteer/blob/eebf452d38b79bb2ea1a1ba84c3d2ea6f2f9f899/src/node/BrowserFetcher.ts#L40
|
||||
// chrome: {
|
||||
// linux: '%s/chromium-browser-snapshots/Linux_x64/%d/%s.zip',
|
||||
@@ -113,7 +107,7 @@ export class PuppeteerBinary extends AbstractBinary {
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `https://storage.googleapis.com/chromium-browser-snapshots/${platform}/${revision}/${name}`,
|
||||
ignoreDownloadStatuses: [ 404 ],
|
||||
ignoreDownloadStatuses: [404],
|
||||
},
|
||||
];
|
||||
}
|
||||
@@ -124,15 +118,14 @@ export class PuppeteerBinary extends AbstractBinary {
|
||||
}
|
||||
|
||||
// https://github.com/puppeteer/puppeteer/blob/eebf452d38b79bb2ea1a1ba84c3d2ea6f2f9f899/src/node/BrowserFetcher.ts#L72
|
||||
private archiveName(
|
||||
platform: string,
|
||||
revision: string,
|
||||
): string {
|
||||
private archiveName(platform: string, revision: string): string {
|
||||
if (platform === 'Linux_x64') return 'chrome-linux';
|
||||
if (platform === 'Mac' || platform === 'Mac_Arm') return 'chrome-mac';
|
||||
if (platform === 'Win' || platform === 'Win_x64') {
|
||||
// Windows archive name changed at r591479.
|
||||
return parseInt(revision, 10) > 591479 ? 'chrome-win' : 'chrome-win32';
|
||||
return Number.parseInt(revision, 10) > 591_479
|
||||
? 'chrome-win'
|
||||
: 'chrome-win32';
|
||||
}
|
||||
return '';
|
||||
}
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
import { SingletonProto } from 'egg';
|
||||
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import {
|
||||
AbstractBinary,
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Sqlcipher)
|
||||
@@ -16,7 +22,8 @@ export class SqlcipherBinary extends AbstractBinary {
|
||||
} = {
|
||||
'/': [],
|
||||
};
|
||||
const s3Url = 'https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher';
|
||||
const s3Url =
|
||||
'https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher';
|
||||
const pkgUrl = 'https://registry.npmjs.com/@journeyapps/sqlcipher';
|
||||
const data = await this.requestJSON(pkgUrl);
|
||||
// https://github.com/journeyapps/node-sqlcipher/blob/master/.circleci/config.yml#L407
|
||||
@@ -44,11 +51,12 @@ export class SqlcipherBinary extends AbstractBinary {
|
||||
'win32-ia32',
|
||||
];
|
||||
for (const version in data.versions) {
|
||||
const major = parseInt(version.split('.', 1)[0]);
|
||||
const major = Number.parseInt(version.split('.', 1)[0]);
|
||||
if (major < 5) continue;
|
||||
// >= 5.0.0
|
||||
const pkgVersion = data.versions[version];
|
||||
const napiVersions = pkgVersion.binary && pkgVersion.binary.napi_versions || [];
|
||||
const napiVersions =
|
||||
(pkgVersion.binary && pkgVersion.binary.napi_versions) || [];
|
||||
const date = data.time[version];
|
||||
dirItems['/'].push({
|
||||
name: `v${version}/`,
|
||||
@@ -74,7 +82,7 @@ export class SqlcipherBinary extends AbstractBinary {
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `${s3Url}/v${version}/${name}`,
|
||||
ignoreDownloadStatuses: [ 404, 403 ],
|
||||
ignoreDownloadStatuses: [404, 403],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,32 +1,38 @@
|
||||
import {
|
||||
ImplDecorator,
|
||||
Inject,
|
||||
QualifierImplDecoratorUtil,
|
||||
} from '@eggjs/tegg';
|
||||
import { RegistryType } from '../../../common/enum/Registry';
|
||||
import { Registry } from '../../../core/entity/Registry';
|
||||
import {
|
||||
EggHttpClient,
|
||||
EggLogger,
|
||||
type ImplDecorator,
|
||||
Logger,
|
||||
HttpClient,
|
||||
} from 'egg';
|
||||
|
||||
import type { RegistryType } from '../../../common/enum/Registry.ts';
|
||||
import type { Registry } from '../../../core/entity/Registry.ts';
|
||||
|
||||
export const CHANGE_STREAM_ATTRIBUTE = 'CHANGE_STREAM_ATTRIBUTE';
|
||||
export type ChangesStreamChange = {
|
||||
export interface ChangesStreamChange {
|
||||
seq: string;
|
||||
fullname: string;
|
||||
};
|
||||
}
|
||||
|
||||
export abstract class AbstractChangeStream {
|
||||
@Inject()
|
||||
protected logger: EggLogger;
|
||||
protected logger: Logger;
|
||||
|
||||
@Inject()
|
||||
protected httpclient: EggHttpClient;
|
||||
protected httpClient: HttpClient;
|
||||
|
||||
abstract getInitialSince(registry: Registry): Promise<string>;
|
||||
abstract fetchChanges(registry: Registry, since: string): AsyncGenerator<ChangesStreamChange>;
|
||||
abstract fetchChanges(
|
||||
registry: Registry,
|
||||
since: string
|
||||
): AsyncGenerator<ChangesStreamChange>;
|
||||
|
||||
getChangesStreamUrl(registry: Registry, since: string, limit?: number): string {
|
||||
getChangesStreamUrl(
|
||||
registry: Registry,
|
||||
since: string,
|
||||
limit?: number
|
||||
): string {
|
||||
const url = new URL(registry.changeStream);
|
||||
url.searchParams.set('since', since);
|
||||
if (limit) {
|
||||
@@ -36,5 +42,10 @@ export abstract class AbstractChangeStream {
|
||||
}
|
||||
}
|
||||
|
||||
export const RegistryChangesStream: ImplDecorator<AbstractChangeStream, typeof RegistryType> =
|
||||
QualifierImplDecoratorUtil.generatorDecorator(AbstractChangeStream, CHANGE_STREAM_ATTRIBUTE);
|
||||
export const RegistryChangesStream: ImplDecorator<
|
||||
AbstractChangeStream,
|
||||
typeof RegistryType
|
||||
> = QualifierImplDecoratorUtil.generatorDecorator(
|
||||
AbstractChangeStream,
|
||||
CHANGE_STREAM_ATTRIBUTE
|
||||
);
|
||||
|
||||
@@ -1,35 +1,43 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { RegistryType } from '../../../common/enum/Registry';
|
||||
import { Registry } from '../../../core/entity/Registry';
|
||||
import { E500 } from 'egg-errors';
|
||||
import { AbstractChangeStream, RegistryChangesStream } from './AbstractChangesStream';
|
||||
import { SingletonProto } from 'egg';
|
||||
import { E500 } from 'egg/errors';
|
||||
|
||||
import { RegistryType } from '../../../common/enum/Registry.ts';
|
||||
import type { Registry } from '../../../core/entity/Registry.ts';
|
||||
import {
|
||||
AbstractChangeStream,
|
||||
RegistryChangesStream,
|
||||
} from './AbstractChangesStream.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@RegistryChangesStream(RegistryType.Cnpmcore)
|
||||
export class CnpmcoreChangesStream extends AbstractChangeStream {
|
||||
|
||||
async getInitialSince(registry: Registry): Promise<string> {
|
||||
const db = (new URL(registry.changeStream)).origin;
|
||||
const { status, data } = await this.httpclient.request(db, {
|
||||
const db = new URL(registry.changeStream).origin;
|
||||
const { status, data } = await this.httpClient.request(db, {
|
||||
followRedirect: true,
|
||||
timeout: 10000,
|
||||
timeout: 10_000,
|
||||
dataType: 'json',
|
||||
});
|
||||
if (!data.update_seq) {
|
||||
throw new E500(`get getInitialSince failed: ${data.update_seq}`);
|
||||
}
|
||||
const since = String(data.update_seq - 10);
|
||||
this.logger.warn('[NpmChangesStream.getInitialSince:firstSeq] GET %s status: %s, data: %j, since: %s',
|
||||
registry.name, status, data, since);
|
||||
this.logger.warn(
|
||||
'[NpmChangesStream.getInitialSince:firstSeq] GET %s status: %s, data: %j, since: %s',
|
||||
registry.name,
|
||||
status,
|
||||
data,
|
||||
since
|
||||
);
|
||||
return since;
|
||||
}
|
||||
|
||||
async* fetchChanges(registry: Registry, since: string) {
|
||||
async *fetchChanges(registry: Registry, since: string) {
|
||||
const db = this.getChangesStreamUrl(registry, since);
|
||||
// json mode
|
||||
const { data } = await this.httpclient.request(db, {
|
||||
const { data } = await this.httpClient.request(db, {
|
||||
followRedirect: true,
|
||||
timeout: 30000,
|
||||
timeout: 30_000,
|
||||
dataType: 'json',
|
||||
gzip: true,
|
||||
});
|
||||
|
||||
@@ -1,50 +1,61 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { RegistryType } from '../../../common/enum/Registry';
|
||||
import { Registry } from '../../../core/entity/Registry';
|
||||
import { E500 } from 'egg-errors';
|
||||
import { AbstractChangeStream, RegistryChangesStream } from './AbstractChangesStream';
|
||||
import { SingletonProto } from 'egg';
|
||||
import { E500 } from 'egg/errors';
|
||||
|
||||
const MAX_LIMIT = 10000;
|
||||
import { RegistryType } from '../../../common/enum/Registry.ts';
|
||||
import type { Registry } from '../../../core/entity/Registry.ts';
|
||||
import {
|
||||
AbstractChangeStream,
|
||||
RegistryChangesStream,
|
||||
} from './AbstractChangesStream.ts';
|
||||
|
||||
type FetchResults = {
|
||||
const MAX_LIMIT = 10_000;
|
||||
|
||||
interface FetchResults {
|
||||
results: {
|
||||
seq: number;
|
||||
type: string;
|
||||
id: string;
|
||||
changes: Record<string, string>[];
|
||||
gmt_modified: Date,
|
||||
gmt_modified: Date;
|
||||
}[];
|
||||
};
|
||||
}
|
||||
|
||||
@SingletonProto()
|
||||
@RegistryChangesStream(RegistryType.Cnpmjsorg)
|
||||
export class CnpmjsorgChangesStream extends AbstractChangeStream {
|
||||
|
||||
// cnpmjsorg 未实现 update_seq 字段
|
||||
// 默认返回当前时间戳字符串
|
||||
async getInitialSince(registry: Registry): Promise<string> {
|
||||
const since = String((new Date()).getTime());
|
||||
this.logger.warn(`[CnpmjsorgChangesStream.getInitialSince] since: ${since}, skip query ${registry.changeStream}`);
|
||||
const since = String(Date.now());
|
||||
this.logger.warn(
|
||||
`[CnpmjsorgChangesStream.getInitialSince] since: ${since}, skip query ${registry.changeStream}`
|
||||
);
|
||||
return since;
|
||||
}
|
||||
|
||||
private async tryFetch(registry: Registry, since: string, limit = 1000): Promise<{ data: FetchResults }> {
|
||||
private async tryFetch(
|
||||
registry: Registry,
|
||||
since: string,
|
||||
limit = 1000
|
||||
): Promise<{ data: FetchResults }> {
|
||||
if (limit > MAX_LIMIT) {
|
||||
throw new E500(`limit too large, current since: ${since}, limit: ${limit}`);
|
||||
throw new E500(
|
||||
`limit too large, current since: ${since}, limit: ${limit}`
|
||||
);
|
||||
}
|
||||
const db = this.getChangesStreamUrl(registry, since, limit);
|
||||
// json mode
|
||||
const res = await this.httpclient.request<FetchResults>(db, {
|
||||
const res = await this.httpClient.request<FetchResults>(db, {
|
||||
followRedirect: true,
|
||||
timeout: 30000,
|
||||
timeout: 30_000,
|
||||
dataType: 'json',
|
||||
gzip: true,
|
||||
});
|
||||
const { results = [] } = res.data;
|
||||
if (results?.length >= limit) {
|
||||
const [ first ] = results;
|
||||
const [first] = results;
|
||||
const last = results[results.length - 1];
|
||||
if (first.gmt_modified === last.gmt_modified) {
|
||||
if (first.gmt_modified === last?.gmt_modified) {
|
||||
return await this.tryFetch(registry, since, limit + 1000);
|
||||
}
|
||||
}
|
||||
@@ -52,7 +63,7 @@ export class CnpmjsorgChangesStream extends AbstractChangeStream {
|
||||
return res;
|
||||
}
|
||||
|
||||
async* fetchChanges(registry: Registry, since: string) {
|
||||
async *fetchChanges(registry: Registry, since: string) {
|
||||
// ref: https://github.com/cnpm/cnpmjs.org/pull/1734
|
||||
// 由于 cnpmjsorg 无法计算准确的 seq
|
||||
// since 是一个时间戳,需要确保一次返回的结果中首尾两个 gmtModified 不相等
|
||||
@@ -60,7 +71,7 @@ export class CnpmjsorgChangesStream extends AbstractChangeStream {
|
||||
|
||||
if (data.results?.length > 0) {
|
||||
for (const change of data.results) {
|
||||
const seq = new Date(change.gmt_modified).getTime() + '';
|
||||
const seq = `${new Date(change.gmt_modified).getTime()}`;
|
||||
const fullname = change.id;
|
||||
if (seq && fullname && seq !== since) {
|
||||
const change = {
|
||||
|
||||
@@ -1,55 +1,84 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { E500 } from 'egg-errors';
|
||||
import { RegistryType } from '../../../common/enum/Registry';
|
||||
import { Registry } from '../../../core/entity/Registry';
|
||||
import { AbstractChangeStream, ChangesStreamChange, RegistryChangesStream } from './AbstractChangesStream';
|
||||
import { SingletonProto } from 'egg';
|
||||
import { E500 } from 'egg/errors';
|
||||
|
||||
import { RegistryType } from '../../../common/enum/Registry.ts';
|
||||
import type { Registry } from '../../../core/entity/Registry.ts';
|
||||
import {
|
||||
AbstractChangeStream,
|
||||
RegistryChangesStream,
|
||||
type ChangesStreamChange,
|
||||
} from './AbstractChangesStream.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@RegistryChangesStream(RegistryType.Npm)
|
||||
export class NpmChangesStream extends AbstractChangeStream {
|
||||
|
||||
async getInitialSince(registry: Registry): Promise<string> {
|
||||
const db = (new URL(registry.changeStream)).origin;
|
||||
const { status, data } = await this.httpclient.request(db, {
|
||||
const db = new URL(registry.changeStream).origin;
|
||||
const { status, data } = await this.httpClient.request(db, {
|
||||
followRedirect: true,
|
||||
timeout: 10000,
|
||||
timeout: 10_000,
|
||||
dataType: 'json',
|
||||
headers: {
|
||||
'npm-replication-opt-in': 'true',
|
||||
},
|
||||
});
|
||||
const since = String(data.update_seq - 10);
|
||||
if (!data.update_seq) {
|
||||
throw new E500(`get getInitialSince failed: ${data.update_seq}`);
|
||||
}
|
||||
this.logger.warn('[NpmChangesStream.getInitialSince] GET %s status: %s, data: %j, since: %s',
|
||||
registry.name, registry.changeStream, status, data, since);
|
||||
this.logger.warn(
|
||||
'[NpmChangesStream.getInitialSince] GET %s status: %s, data: %j, since: %s',
|
||||
registry.name,
|
||||
registry.changeStream,
|
||||
status,
|
||||
data,
|
||||
since
|
||||
);
|
||||
return since;
|
||||
}
|
||||
|
||||
async* fetchChanges(registry: Registry, since: string) {
|
||||
async *fetchChanges(
|
||||
registry: Registry,
|
||||
since: string
|
||||
): AsyncGenerator<ChangesStreamChange> {
|
||||
// https://github.com/orgs/community/discussions/152515
|
||||
const db = this.getChangesStreamUrl(registry, since);
|
||||
const { res } = await this.httpclient.request(db, {
|
||||
streaming: true,
|
||||
timeout: 10000,
|
||||
const { data, headers } = await this.httpClient.request(db, {
|
||||
timeout: 60_000,
|
||||
headers: {
|
||||
'npm-replication-opt-in': 'true',
|
||||
},
|
||||
dataType: 'json',
|
||||
gzip: true,
|
||||
});
|
||||
const count = data.results?.length;
|
||||
const last_seq = data.last_seq;
|
||||
this.logger.info(
|
||||
'[NpmChangesStream.fetchChanges] %s, count: %s, last_seq: %s, headers: %j',
|
||||
db,
|
||||
count,
|
||||
last_seq,
|
||||
headers
|
||||
);
|
||||
|
||||
let buf = '';
|
||||
for await (const chunk of res) {
|
||||
const text = chunk.toString();
|
||||
const lines = text.split('\n');
|
||||
|
||||
for (const line of lines) {
|
||||
const content = buf + line;
|
||||
const match = /"seq":(\d+),"id":"([^"]+)"/g.exec(content);
|
||||
const seq = match?.[1];
|
||||
const fullname = match?.[2];
|
||||
if (seq && fullname) {
|
||||
buf = '';
|
||||
const change: ChangesStreamChange = { fullname, seq };
|
||||
if (data.results?.length > 0) {
|
||||
for (const change of data.results) {
|
||||
// {
|
||||
// seq: 2495018,
|
||||
// id: 'ng-create-all-project',
|
||||
// changes: [ { rev: '3-be3a014aab8e379ba28a28adb8e10142' }, [length]: 1 ],
|
||||
// deleted: true
|
||||
// },
|
||||
const seq = String(change.seq);
|
||||
const fullname = change.id;
|
||||
if (seq && fullname && seq !== since) {
|
||||
const change = {
|
||||
fullname,
|
||||
seq,
|
||||
};
|
||||
yield change;
|
||||
} else {
|
||||
buf += line;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,27 +1,34 @@
|
||||
import { performance } from 'perf_hooks';
|
||||
import { Advice, AdviceContext, IAdvice } from '@eggjs/tegg/aop';
|
||||
import { Inject } from '@eggjs/tegg';
|
||||
import { EggLogger } from 'egg';
|
||||
import { performance } from 'node:perf_hooks';
|
||||
|
||||
import { Advice, type AdviceContext, type IAdvice } from 'egg/aop';
|
||||
import { Inject, Logger } from 'egg';
|
||||
|
||||
const START = Symbol('AsyncTimer#start');
|
||||
const SUCCEED = Symbol('AsyncTimer#succeed');
|
||||
|
||||
// auto print async function call performance timer log into logger
|
||||
@Advice()
|
||||
export class AsyncTimer implements IAdvice {
|
||||
@Inject()
|
||||
private readonly logger: EggLogger;
|
||||
private start: number;
|
||||
private succeed = true;
|
||||
private readonly logger: Logger;
|
||||
|
||||
async beforeCall() {
|
||||
this.start = performance.now();
|
||||
async beforeCall(ctx: AdviceContext) {
|
||||
ctx.set(START, performance.now());
|
||||
ctx.set(SUCCEED, true);
|
||||
}
|
||||
|
||||
async afterThrow() {
|
||||
this.succeed = false;
|
||||
async afterThrow(ctx: AdviceContext) {
|
||||
ctx.set(SUCCEED, false);
|
||||
}
|
||||
|
||||
async afterFinally(ctx: AdviceContext) {
|
||||
const ms = Math.floor((performance.now() - this.start) * 1000) / 1000;
|
||||
this.logger.info('[%s] [%s:%s|%s]',
|
||||
ms, ctx.that.constructor.name, ctx.method, this.succeed ? 'T' : 'F');
|
||||
const ms = Math.floor((performance.now() - ctx.get(START)) * 1000) / 1000;
|
||||
this.logger.info(
|
||||
'[%s] [%s:%s|%s]',
|
||||
ms,
|
||||
ctx.that.constructor.name,
|
||||
ctx.method,
|
||||
ctx.get(SUCCEED) ? 'T' : 'F'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
export const BUG_VERSIONS = 'bug-versions';
|
||||
export const LATEST_TAG = 'latest';
|
||||
export const GLOBAL_WORKER = 'GLOBAL_WORKER';
|
||||
export const PROXY_CACHE_DIR_NAME = 'proxy-cache-packages';
|
||||
export const ABBREVIATED_META_TYPE = 'application/vnd.npm.install-v1+json';
|
||||
export const NOT_IMPLEMENTED_PATH = [ '/-/npm/v1/security/audits/quick', '/-/npm/v1/security/advisories/bulk' ];
|
||||
|
||||
export enum SyncMode {
|
||||
none = 'none',
|
||||
admin = 'admin',
|
||||
proxy = 'proxy',
|
||||
exist = 'exist',
|
||||
all = 'all',
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import dayjs from 'dayjs';
|
||||
import customParseFormat from 'dayjs/plugin/customParseFormat';
|
||||
import customParseFormat from 'dayjs/plugin/customParseFormat.js';
|
||||
dayjs.extend(customParseFormat);
|
||||
|
||||
export default dayjs;
|
||||
|
||||
@@ -3,6 +3,7 @@ export enum BinaryType {
|
||||
Bucket = 'bucket',
|
||||
Cypress = 'cypress',
|
||||
Electron = 'electron',
|
||||
Firefox = 'firefox',
|
||||
GitHub = 'github',
|
||||
Imagemin = 'imagemin',
|
||||
Node = 'node',
|
||||
@@ -13,4 +14,5 @@ export enum BinaryType {
|
||||
Prisma = 'prisma',
|
||||
Sqlcipher = 'sqlcipher',
|
||||
ChromeForTesting = 'chromeForTesting',
|
||||
Edgedriver = 'edgedriver',
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ export enum TaskType {
|
||||
SyncPackage = 'sync_package',
|
||||
ChangesStream = 'changes_stream',
|
||||
SyncBinary = 'sync_binary',
|
||||
UpdateProxyCache = 'update_proxy_cache',
|
||||
CreateHook = 'create_hook',
|
||||
TriggerHook = 'trigger_hook',
|
||||
}
|
||||
|
||||
4
app/common/enum/Total.ts
Normal file
4
app/common/enum/Total.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export enum TotalType {
|
||||
PackageCount = 'packageCount',
|
||||
PackageVersionCount = 'packageVersionCount',
|
||||
}
|
||||
@@ -1,11 +1,11 @@
|
||||
export enum LoginResultCode {
|
||||
UserNotFound,
|
||||
Success,
|
||||
Fail,
|
||||
UserNotFound = 0,
|
||||
Success = 1,
|
||||
Fail = 2,
|
||||
}
|
||||
|
||||
export enum WanStatusCode {
|
||||
UserNotFound,
|
||||
Unbound,
|
||||
Bound,
|
||||
UserNotFound = 0,
|
||||
Unbound = 1,
|
||||
Bound = 2,
|
||||
}
|
||||
|
||||
@@ -2,5 +2,6 @@
|
||||
"name": "cnpmcore-common",
|
||||
"eggModule": {
|
||||
"name": "cnpmcoreCommon"
|
||||
}
|
||||
},
|
||||
"type": "module"
|
||||
}
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import { CnpmcoreConfig } from '../port/config';
|
||||
import { Readable } from 'stream';
|
||||
import { IncomingHttpHeaders } from 'http';
|
||||
import { EggContext } from '@eggjs/tegg';
|
||||
import type { Readable } from 'node:stream';
|
||||
import type { IncomingHttpHeaders } from 'node:http';
|
||||
|
||||
import type { Context } from 'egg';
|
||||
import type { estypes } from '@elastic/elasticsearch';
|
||||
import type { CnpmcoreConfig } from '../port/config.ts';
|
||||
|
||||
export interface UploadResult {
|
||||
key: string;
|
||||
@@ -18,8 +20,8 @@ export interface UploadOptions {
|
||||
|
||||
export interface AppendOptions {
|
||||
key: string;
|
||||
position?: string,
|
||||
headers?: IncomingHttpHeaders,
|
||||
position?: string;
|
||||
headers?: IncomingHttpHeaders;
|
||||
}
|
||||
|
||||
export interface DownloadOptions {
|
||||
@@ -39,7 +41,11 @@ export interface NFSClient {
|
||||
|
||||
createDownloadStream(key: string): Promise<Readable | undefined>;
|
||||
|
||||
download(key: string, filepath: string, options: DownloadOptions): Promise<void>;
|
||||
download(
|
||||
key: string,
|
||||
filepath: string,
|
||||
options: DownloadOptions
|
||||
): Promise<void>;
|
||||
|
||||
url?(key: string): string;
|
||||
}
|
||||
@@ -50,6 +56,13 @@ export interface QueueAdapter {
|
||||
length(key: string): Promise<number>;
|
||||
}
|
||||
|
||||
export interface SearchAdapter {
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
search<T>(query: any): Promise<estypes.SearchHitsMetadata<T>>;
|
||||
upsert<T>(id: string, document: T): Promise<string>;
|
||||
delete(id: string): Promise<string>;
|
||||
}
|
||||
|
||||
export interface AuthUrlResult {
|
||||
loginUrl: string;
|
||||
doneUrl: string;
|
||||
@@ -60,14 +73,11 @@ export interface userResult {
|
||||
email: string;
|
||||
}
|
||||
export interface AuthClient {
|
||||
getAuthUrl(ctx: EggContext): Promise<AuthUrlResult>;
|
||||
getAuthUrl(ctx: Context): Promise<AuthUrlResult>;
|
||||
ensureCurrentUser(): Promise<userResult | null>;
|
||||
}
|
||||
|
||||
declare module 'egg' {
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore
|
||||
// avoid TS2310 Type 'EggAppConfig' recursively references itself as a base type.
|
||||
interface EggAppConfig {
|
||||
cnpmcore: CnpmcoreConfig;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
|
||||
interface BinaryData extends EntityData {
|
||||
binaryId: string;
|
||||
@@ -21,7 +21,7 @@ export class Binary extends Entity {
|
||||
isDir: boolean;
|
||||
size: number;
|
||||
date: string;
|
||||
sourceUrl?: string;
|
||||
sourceUrl: string;
|
||||
ignoreDownloadStatuses?: number[];
|
||||
|
||||
constructor(data: BinaryData) {
|
||||
|
||||
@@ -12,11 +12,11 @@ export class BugVersion {
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
listAllPackagesHasBugs(): Array<string> {
|
||||
listAllPackagesHasBugs(): string[] {
|
||||
return Object.keys(this.data);
|
||||
}
|
||||
|
||||
listBugVersions(pkgName: string): Array<string> {
|
||||
listBugVersions(pkgName: string): string[] {
|
||||
const bugVersionPackage = this.data[pkgName];
|
||||
if (!bugVersionPackage) {
|
||||
return [];
|
||||
@@ -31,18 +31,24 @@ export class BugVersion {
|
||||
}
|
||||
|
||||
// TODO manifest typing
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
fixManifest(bugVersionManifest: any, fixVersionManifest: any): any {
|
||||
// If the tarball is same, manifest has fixed.
|
||||
if (bugVersionManifest.dist.tarball === fixVersionManifest.dist.tarball) {
|
||||
return;
|
||||
}
|
||||
const advice = this.fixVersion(bugVersionManifest.name, bugVersionManifest.version);
|
||||
const advice = this.fixVersion(
|
||||
bugVersionManifest.name,
|
||||
bugVersionManifest.version
|
||||
);
|
||||
if (!advice) {
|
||||
return;
|
||||
}
|
||||
const newManifest = JSON.parse(JSON.stringify(fixVersionManifest));
|
||||
const newManifest = structuredClone(fixVersionManifest);
|
||||
const hotfixDeprecated = `[WARNING] Use ${advice.version} instead of ${bugVersionManifest.version}, reason: ${advice.reason}`;
|
||||
newManifest.deprecated = bugVersionManifest.deprecated ? `${bugVersionManifest.deprecated} (${hotfixDeprecated})` : hotfixDeprecated;
|
||||
newManifest.deprecated = bugVersionManifest.deprecated
|
||||
? `${bugVersionManifest.deprecated} (${hotfixDeprecated})`
|
||||
: hotfixDeprecated;
|
||||
// don't change version
|
||||
newManifest.version = bugVersionManifest.version;
|
||||
return newManifest;
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
|
||||
interface ChangeData extends EntityData {
|
||||
changeId: string;
|
||||
type: string;
|
||||
targetName: string;
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
data: any;
|
||||
}
|
||||
|
||||
@@ -12,6 +13,7 @@ export class Change extends Entity {
|
||||
changeId: string;
|
||||
type: string;
|
||||
targetName: string;
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
data: any;
|
||||
|
||||
constructor(data: ChangeData) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
|
||||
interface DistData extends EntityData {
|
||||
distId: string;
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { HookType } from '../../common/enum/Hook';
|
||||
import crypto from 'crypto';
|
||||
import crypto from 'node:crypto';
|
||||
|
||||
export type CreateHookData = Omit<EasyData<HookData, 'hookId'>, 'enable' | 'latestTaskId'>;
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
import type { HookType } from '../../common/enum/Hook.ts';
|
||||
|
||||
export type CreateHookData = Omit<
|
||||
EasyData<HookData, 'hookId'>,
|
||||
'enable' | 'latestTaskId'
|
||||
>;
|
||||
|
||||
export interface HookData extends EntityData {
|
||||
hookId: string;
|
||||
@@ -39,10 +43,11 @@ export class Hook extends Entity {
|
||||
}
|
||||
|
||||
static create(data: CreateHookData): Hook {
|
||||
const hookData: EasyData<HookData, 'hookId'> = Object.assign({}, data, {
|
||||
const hookData: EasyData<HookData, 'hookId'> = {
|
||||
...data,
|
||||
enable: true,
|
||||
latestTaskId: undefined,
|
||||
});
|
||||
};
|
||||
const newData = EntityUtil.defaultData(hookData, 'hookId');
|
||||
return new Hook(newData);
|
||||
}
|
||||
@@ -50,7 +55,8 @@ export class Hook extends Entity {
|
||||
// payload 可能会特别大,如果做多次 stringify 浪费太多 cpu
|
||||
signPayload(payload: object) {
|
||||
const payloadStr = JSON.stringify(payload);
|
||||
const digest = crypto.createHmac('sha256', this.secret)
|
||||
const digest = crypto
|
||||
.createHmac('sha256', this.secret)
|
||||
.update(JSON.stringify(payload))
|
||||
.digest('hex');
|
||||
return {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { HookEventType } from '../../common/enum/Hook';
|
||||
import { HookEventType } from '../../common/enum/Hook.ts';
|
||||
|
||||
export interface PublishChangePayload {
|
||||
'dist-tag'?: string;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { Dist } from './Dist';
|
||||
import { getFullname } from '../../common/PackageUtil';
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
import { Dist } from './Dist.ts';
|
||||
import { getFullname } from '../../common/PackageUtil.ts';
|
||||
|
||||
interface PackageData extends EntityData {
|
||||
scope: string;
|
||||
@@ -22,6 +22,13 @@ export enum DIST_NAMES {
|
||||
ABBREVIATED_MANIFESTS = 'abbreviated_manifests.json',
|
||||
}
|
||||
|
||||
export function isPkgManifest(fileType: DIST_NAMES) {
|
||||
return (
|
||||
fileType === DIST_NAMES.FULL_MANIFESTS ||
|
||||
fileType === DIST_NAMES.ABBREVIATED_MANIFESTS
|
||||
);
|
||||
}
|
||||
|
||||
interface FileInfo {
|
||||
size: number;
|
||||
shasum: string;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
|
||||
interface PackageTagData extends EntityData {
|
||||
packageId: string;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Dist } from './Dist';
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { PaddingSemVer } from './PaddingSemVer';
|
||||
import type { Dist } from './Dist.ts';
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
import { PaddingSemVer } from './PaddingSemVer.ts';
|
||||
|
||||
interface PackageVersionData extends EntityData {
|
||||
packageId: string;
|
||||
@@ -48,7 +48,9 @@ export class PackageVersion extends Entity {
|
||||
}
|
||||
}
|
||||
|
||||
static create(data: EasyData<PackageVersionData, 'packageVersionId'>): PackageVersion {
|
||||
static create(
|
||||
data: EasyData<PackageVersionData, 'packageVersionId'>
|
||||
): PackageVersion {
|
||||
const newData = EntityUtil.defaultData(data, 'packageVersionId');
|
||||
return new PackageVersion(newData);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
|
||||
interface PackageVersionBlockData extends EntityData {
|
||||
packageVersionBlockId: string;
|
||||
@@ -22,7 +22,9 @@ export class PackageVersionBlock extends Entity {
|
||||
this.reason = data.reason;
|
||||
}
|
||||
|
||||
static create(data: EasyData<PackageVersionBlockData, 'packageVersionBlockId'>): PackageVersionBlock {
|
||||
static create(
|
||||
data: EasyData<PackageVersionBlockData, 'packageVersionBlockId'>
|
||||
): PackageVersionBlock {
|
||||
const newData = EntityUtil.defaultData(data, 'packageVersionBlockId');
|
||||
return new PackageVersionBlock(newData);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Dist } from './Dist';
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
import type { Dist } from './Dist.ts';
|
||||
|
||||
interface PackageVersionFileData extends EntityData {
|
||||
packageVersionFileId: string;
|
||||
@@ -33,10 +33,14 @@ export class PackageVersionFile extends Entity {
|
||||
}
|
||||
|
||||
get path() {
|
||||
return this.directory === '/' ? `/${this.name}` : `${this.directory}/${this.name}`;
|
||||
return this.directory === '/'
|
||||
? `/${this.name}`
|
||||
: `${this.directory}/${this.name}`;
|
||||
}
|
||||
|
||||
static create(data: EasyData<PackageVersionFileData, 'packageVersionFileId'>): PackageVersionFile {
|
||||
static create(
|
||||
data: EasyData<PackageVersionFileData, 'packageVersionFileId'>
|
||||
): PackageVersionFile {
|
||||
const newData = EntityUtil.defaultData(data, 'packageVersionFileId');
|
||||
return new PackageVersionFile(newData);
|
||||
}
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
|
||||
interface PackageVersionManifestData extends EntityData {
|
||||
packageId: string;
|
||||
packageVersionId: string;
|
||||
packageVersionManifestId: string;
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
manifest: any;
|
||||
}
|
||||
|
||||
@@ -12,6 +13,7 @@ export class PackageVersionManifest extends Entity {
|
||||
packageId: string;
|
||||
packageVersionId: string;
|
||||
packageVersionManifestId: string;
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
manifest: any;
|
||||
|
||||
constructor(data: PackageVersionManifestData) {
|
||||
@@ -22,7 +24,9 @@ export class PackageVersionManifest extends Entity {
|
||||
this.manifest = data.manifest;
|
||||
}
|
||||
|
||||
static create(data: EasyData<PackageVersionManifestData, 'packageVersionManifestId'>): PackageVersionManifest {
|
||||
static create(
|
||||
data: EasyData<PackageVersionManifestData, 'packageVersionManifestId'>
|
||||
): PackageVersionManifest {
|
||||
const newData = EntityUtil.defaultData(data, 'packageVersionManifestId');
|
||||
return new PackageVersionManifest(newData);
|
||||
}
|
||||
|
||||
@@ -14,9 +14,10 @@ export class PaddingSemVer {
|
||||
return;
|
||||
}
|
||||
this.semver = new SemVer(semver);
|
||||
if ((this.semver as any).includePrerelease) {
|
||||
// @ts-expect-error type definition is not correct
|
||||
if (this.semver.includePrerelease) {
|
||||
this.isPreRelease = true;
|
||||
} else if (this.semver.prerelease && this.semver.prerelease.length) {
|
||||
} else if (this.semver.prerelease && this.semver.prerelease.length > 0) {
|
||||
this.isPreRelease = true;
|
||||
} else {
|
||||
this.isPreRelease = false;
|
||||
@@ -25,9 +26,10 @@ export class PaddingSemVer {
|
||||
|
||||
get paddingVersion(): string {
|
||||
if (!this._paddingVersion) {
|
||||
this._paddingVersion = PaddingSemVer.paddingVersion(this.semver.major)
|
||||
+ PaddingSemVer.paddingVersion(this.semver.minor)
|
||||
+ PaddingSemVer.paddingVersion(this.semver.patch);
|
||||
this._paddingVersion =
|
||||
PaddingSemVer.paddingVersion(this.semver.major) +
|
||||
PaddingSemVer.paddingVersion(this.semver.minor) +
|
||||
PaddingSemVer.paddingVersion(this.semver.patch);
|
||||
}
|
||||
return this._paddingVersion;
|
||||
}
|
||||
@@ -37,7 +39,8 @@ export class PaddingSemVer {
|
||||
static paddingVersion(v: number) {
|
||||
const t = String(v);
|
||||
if (t.length <= 16) {
|
||||
const padding = new Array(16 - t.length).fill(0)
|
||||
const padding = Array.from({ length: 16 - t.length })
|
||||
.fill(0)
|
||||
.join('');
|
||||
return padding + t;
|
||||
}
|
||||
|
||||
43
app/core/entity/ProxyCache.ts
Normal file
43
app/core/entity/ProxyCache.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { isPkgManifest, type DIST_NAMES } from './Package.ts';
|
||||
import type { EasyData } from '../util/EntityUtil.ts';
|
||||
import { PROXY_CACHE_DIR_NAME } from '../../common/constants.ts';
|
||||
interface ProxyCacheData extends EntityData {
|
||||
fullname: string;
|
||||
fileType: DIST_NAMES;
|
||||
version?: string;
|
||||
}
|
||||
|
||||
export type CreateProxyCacheData = Omit<
|
||||
EasyData<ProxyCacheData, 'id'>,
|
||||
'id' | 'filePath'
|
||||
>;
|
||||
|
||||
export class ProxyCache extends Entity {
|
||||
readonly fullname: string;
|
||||
readonly fileType: DIST_NAMES;
|
||||
readonly filePath: string;
|
||||
readonly version?: string;
|
||||
|
||||
constructor(data: ProxyCacheData) {
|
||||
super(data);
|
||||
this.fullname = data.fullname;
|
||||
this.fileType = data.fileType;
|
||||
this.version = data.version;
|
||||
if (isPkgManifest(data.fileType)) {
|
||||
this.filePath = `/${PROXY_CACHE_DIR_NAME}/${data.fullname}/${data.fileType}`;
|
||||
} else {
|
||||
this.filePath = `/${PROXY_CACHE_DIR_NAME}/${data.fullname}/${data.version}/${data.fileType}`;
|
||||
}
|
||||
}
|
||||
|
||||
public static create(data: CreateProxyCacheData): ProxyCache {
|
||||
const newData = { ...data, createdAt: new Date(), updatedAt: new Date() };
|
||||
return new ProxyCache(newData);
|
||||
}
|
||||
|
||||
public static update(data: ProxyCache): ProxyCache {
|
||||
data.updatedAt = new Date();
|
||||
return data;
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import type { RegistryType } from '../../common/enum/Registry';
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
import type { RegistryType } from '../../common/enum/Registry.ts';
|
||||
|
||||
interface RegistryData extends EntityData {
|
||||
name: string;
|
||||
@@ -9,9 +9,13 @@ interface RegistryData extends EntityData {
|
||||
changeStream: string;
|
||||
userPrefix: string;
|
||||
type: RegistryType;
|
||||
authToken?: string;
|
||||
}
|
||||
|
||||
export type CreateRegistryData = Omit<EasyData<RegistryData, 'registryId'>, 'id'>;
|
||||
export type CreateRegistryData = Omit<
|
||||
EasyData<RegistryData, 'registryId'>,
|
||||
'id'
|
||||
>;
|
||||
|
||||
export class Registry extends Entity {
|
||||
name: string;
|
||||
@@ -20,6 +24,7 @@ export class Registry extends Entity {
|
||||
changeStream: string;
|
||||
userPrefix: string;
|
||||
type: RegistryType;
|
||||
authToken?: string;
|
||||
|
||||
constructor(data: RegistryData) {
|
||||
super(data);
|
||||
@@ -29,10 +34,14 @@ export class Registry extends Entity {
|
||||
this.changeStream = data.changeStream;
|
||||
this.userPrefix = data.userPrefix;
|
||||
this.type = data.type;
|
||||
this.authToken = data.authToken;
|
||||
}
|
||||
|
||||
public static create(data: CreateRegistryData): Registry {
|
||||
const newData = EntityUtil.defaultData(data, 'registryId');
|
||||
const newData = EntityUtil.defaultData<RegistryData, 'registryId'>(
|
||||
data,
|
||||
'registryId'
|
||||
);
|
||||
return new Registry(newData);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
|
||||
interface ScopeData extends EntityData {
|
||||
name: string;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Range, Comparator } from 'semver';
|
||||
import { PaddingSemVer } from './PaddingSemVer';
|
||||
import { Comparator, Range } from 'semver';
|
||||
import { PaddingSemVer } from './PaddingSemVer.ts';
|
||||
|
||||
const OPERATOR_MAP = {
|
||||
'<': '$lt',
|
||||
@@ -21,7 +21,8 @@ export class SqlRange {
|
||||
}
|
||||
|
||||
private comparatorToSql(comparator: Comparator) {
|
||||
if (comparator.semver === (Comparator as any).ANY) {
|
||||
// @ts-expect-error type definition is not correct
|
||||
if (comparator.semver === Comparator.ANY) {
|
||||
return {
|
||||
$and: [
|
||||
{
|
||||
@@ -38,11 +39,13 @@ export class SqlRange {
|
||||
};
|
||||
}
|
||||
const paddingSemver = new PaddingSemVer(comparator.semver);
|
||||
const operator = OPERATOR_MAP[comparator.operator as keyof typeof OPERATOR_MAP];
|
||||
const operator =
|
||||
OPERATOR_MAP[comparator.operator as keyof typeof OPERATOR_MAP];
|
||||
if (!operator) {
|
||||
throw new Error(`unknown operator ${comparator.operator}`);
|
||||
}
|
||||
this._containPreRelease = this._containPreRelease || paddingSemver.isPreRelease;
|
||||
this._containPreRelease =
|
||||
this._containPreRelease || paddingSemver.isPreRelease;
|
||||
return {
|
||||
$and: [
|
||||
{
|
||||
@@ -59,8 +62,8 @@ export class SqlRange {
|
||||
};
|
||||
}
|
||||
|
||||
private comparatorSetToSql(comparatorSet: Array<Comparator>) {
|
||||
const condition: Array<object> = [];
|
||||
private comparatorSetToSql(comparatorSet: Comparator[]) {
|
||||
const condition: object[] = [];
|
||||
for (const comparator of comparatorSet) {
|
||||
condition.push(this.comparatorToSql(comparator));
|
||||
}
|
||||
@@ -68,7 +71,7 @@ export class SqlRange {
|
||||
}
|
||||
|
||||
private generateWhere() {
|
||||
const conditions: Array<object> = [];
|
||||
const conditions: object[] = [];
|
||||
for (const rangeSet of this.range.set) {
|
||||
conditions.push(this.comparatorSetToSql(rangeSet as Comparator[]));
|
||||
}
|
||||
|
||||
@@ -1,16 +1,22 @@
|
||||
import os from 'os';
|
||||
import path from 'path';
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { TaskType, TaskState } from '../../common/enum/Task';
|
||||
import dayjs from '../../common/dayjs';
|
||||
import { HookEvent } from './HookEvent';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
|
||||
import { InternalServerError } from 'egg/errors';
|
||||
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
import { TaskState, TaskType } from '../../common/enum/Task.ts';
|
||||
import { PROXY_CACHE_DIR_NAME } from '../../common/constants.ts';
|
||||
import dayjs from '../../common/dayjs.ts';
|
||||
import type { HookEvent } from './HookEvent.ts';
|
||||
import { isPkgManifest, type DIST_NAMES } from './Package.ts';
|
||||
|
||||
export const HOST_NAME = os.hostname();
|
||||
export const PID = process.pid;
|
||||
|
||||
export interface TaskBaseData {
|
||||
taskWorker: string;
|
||||
shouldNotMerge?: boolean;
|
||||
}
|
||||
|
||||
export interface TaskData<T = TaskBaseData> extends EntityData {
|
||||
@@ -28,18 +34,23 @@ export interface TaskData<T = TaskBaseData> extends EntityData {
|
||||
bizId?: string;
|
||||
}
|
||||
|
||||
export type SyncPackageTaskOptions = {
|
||||
export interface SyncPackageTaskOptions {
|
||||
authorId?: string;
|
||||
authorIp?: string;
|
||||
remoteAuthToken?: string;
|
||||
tips?: string;
|
||||
skipDependencies?: boolean;
|
||||
syncDownloadData?: boolean;
|
||||
// force sync history version
|
||||
forceSyncHistory?: boolean;
|
||||
registryId?: string;
|
||||
specificVersions?: Array<string>;
|
||||
};
|
||||
specificVersions?: string[];
|
||||
}
|
||||
|
||||
export interface UpdateProxyCacheTaskOptions {
|
||||
fullname: string;
|
||||
version?: string;
|
||||
fileType: DIST_NAMES;
|
||||
}
|
||||
|
||||
export interface CreateHookTaskData extends TaskBaseData {
|
||||
hookEvent: HookEvent;
|
||||
@@ -52,20 +63,28 @@ export interface TriggerHookTaskData extends TaskBaseData {
|
||||
}
|
||||
|
||||
export interface CreateSyncPackageTaskData extends TaskBaseData {
|
||||
remoteAuthToken?: string;
|
||||
tips?: string;
|
||||
skipDependencies?: boolean;
|
||||
syncDownloadData?: boolean;
|
||||
forceSyncHistory?: boolean;
|
||||
specificVersions?: Array<string>;
|
||||
specificVersions?: string[];
|
||||
}
|
||||
|
||||
export interface CreateUpdateProxyCacheTaskData extends TaskBaseData {
|
||||
fullname: string;
|
||||
version?: string;
|
||||
fileType: DIST_NAMES;
|
||||
filePath: string;
|
||||
}
|
||||
|
||||
export type SyncBinaryTaskData = Record<string, unknown> & TaskBaseData;
|
||||
|
||||
export interface ChangesStreamTaskData extends TaskBaseData {
|
||||
since: string;
|
||||
last_package?: string,
|
||||
last_package_created?: Date,
|
||||
task_count?: number,
|
||||
registryId?: string,
|
||||
last_package?: string;
|
||||
last_package_created?: Date;
|
||||
task_count?: number;
|
||||
registryId?: string;
|
||||
}
|
||||
|
||||
export interface TaskUpdateCondition {
|
||||
@@ -77,6 +96,8 @@ export type CreateHookTask = Task<CreateHookTaskData>;
|
||||
export type TriggerHookTask = Task<TriggerHookTaskData>;
|
||||
export type CreateSyncPackageTask = Task<CreateSyncPackageTaskData>;
|
||||
export type ChangesStreamTask = Task<ChangesStreamTaskData>;
|
||||
export type CreateUpdateProxyCacheTask = Task<CreateUpdateProxyCacheTaskData>;
|
||||
export type SyncBinaryTask = Task<SyncBinaryTaskData>;
|
||||
|
||||
export class Task<T extends TaskBaseData = TaskBaseData> extends Entity {
|
||||
taskId: string;
|
||||
@@ -118,12 +139,17 @@ export class Task<T extends TaskBaseData = TaskBaseData> extends Entity {
|
||||
this.data.taskWorker = `${HOST_NAME}:${PID}`;
|
||||
}
|
||||
|
||||
private static create<T extends TaskBaseData>(data: EasyData<TaskData<T>, 'taskId'>): Task<T> {
|
||||
private static create<T extends TaskBaseData>(
|
||||
data: EasyData<TaskData<T>, 'taskId'>
|
||||
): Task<T> {
|
||||
const newData = EntityUtil.defaultData(data, 'taskId');
|
||||
return new Task(newData);
|
||||
}
|
||||
|
||||
public static createSyncPackage(fullname: string, options?: SyncPackageTaskOptions): CreateSyncPackageTask {
|
||||
public static createSyncPackage(
|
||||
fullname: string,
|
||||
options?: SyncPackageTaskOptions
|
||||
): CreateSyncPackageTask {
|
||||
const data = {
|
||||
type: TaskType.SyncPackage,
|
||||
state: TaskState.Waiting,
|
||||
@@ -133,7 +159,6 @@ export class Task<T extends TaskBaseData = TaskBaseData> extends Entity {
|
||||
data: {
|
||||
// task execute worker
|
||||
taskWorker: '',
|
||||
remoteAuthToken: options?.remoteAuthToken,
|
||||
tips: options?.tips,
|
||||
registryId: options?.registryId ?? '',
|
||||
skipDependencies: options?.skipDependencies,
|
||||
@@ -147,7 +172,11 @@ export class Task<T extends TaskBaseData = TaskBaseData> extends Entity {
|
||||
return task;
|
||||
}
|
||||
|
||||
public static createChangesStream(targetName: string, registryId = '', since = ''): ChangesStreamTask {
|
||||
public static createChangesStream(
|
||||
targetName: string,
|
||||
registryId = '',
|
||||
since = ''
|
||||
): ChangesStreamTask {
|
||||
const data = {
|
||||
type: TaskType.ChangesStream,
|
||||
state: TaskState.Waiting,
|
||||
@@ -195,7 +224,10 @@ export class Task<T extends TaskBaseData = TaskBaseData> extends Entity {
|
||||
return task;
|
||||
}
|
||||
|
||||
public static createTriggerHookTask(hookEvent: HookEvent, hookId: string): TriggerHookTask {
|
||||
public static createTriggerHookTask(
|
||||
hookEvent: HookEvent,
|
||||
hookId: string
|
||||
): TriggerHookTask {
|
||||
const data = {
|
||||
type: TaskType.TriggerHook,
|
||||
state: TaskState.Waiting,
|
||||
@@ -215,7 +247,10 @@ export class Task<T extends TaskBaseData = TaskBaseData> extends Entity {
|
||||
return task;
|
||||
}
|
||||
|
||||
public static createSyncBinary(targetName: string, lastData: any): Task {
|
||||
public static createSyncBinary(
|
||||
targetName: string,
|
||||
lastData?: Record<string, unknown>
|
||||
): Task {
|
||||
const data = {
|
||||
type: TaskType.SyncBinary,
|
||||
state: TaskState.Waiting,
|
||||
@@ -234,6 +269,45 @@ export class Task<T extends TaskBaseData = TaskBaseData> extends Entity {
|
||||
return task;
|
||||
}
|
||||
|
||||
needMergeWhenWaiting(): boolean {
|
||||
// 历史任务补偿时,将 shouldNotMerge 设置为 true,避免合并
|
||||
// 补偿任务单独执行
|
||||
if (this.data.shouldNotMerge === true) {
|
||||
return false;
|
||||
}
|
||||
// 仅合并二进制镜像与 npm 包
|
||||
return [TaskType.SyncBinary, TaskType.SyncPackage].includes(this.type);
|
||||
}
|
||||
|
||||
public static createUpdateProxyCache(
|
||||
targetName: string,
|
||||
options: UpdateProxyCacheTaskOptions
|
||||
): CreateUpdateProxyCacheTask {
|
||||
if (!isPkgManifest(options.fileType)) {
|
||||
throw new InternalServerError(
|
||||
'should not update package version manifest.'
|
||||
);
|
||||
}
|
||||
const filePath = `/${PROXY_CACHE_DIR_NAME}/${options.fullname}/${options.fileType}`;
|
||||
const data = {
|
||||
type: TaskType.UpdateProxyCache,
|
||||
state: TaskState.Waiting,
|
||||
targetName,
|
||||
authorId: `pid_${PID}`,
|
||||
authorIp: HOST_NAME,
|
||||
data: {
|
||||
taskWorker: '',
|
||||
fullname: options.fullname,
|
||||
version: options?.version,
|
||||
fileType: options.fileType,
|
||||
filePath,
|
||||
},
|
||||
};
|
||||
const task = this.create(data);
|
||||
task.logPath = `/${PROXY_CACHE_DIR_NAME}/${options.fullname}/update-manifest-log/${options.fileType.split('.json')[0]}-${dayjs().format('YYYY/MM/DDHHmm')}-${task.taskId}.log`;
|
||||
return task;
|
||||
}
|
||||
|
||||
start(): TaskUpdateCondition {
|
||||
const condition = {
|
||||
taskId: this.taskId,
|
||||
@@ -246,8 +320,8 @@ export class Task<T extends TaskBaseData = TaskBaseData> extends Entity {
|
||||
}
|
||||
}
|
||||
|
||||
export type SyncInfo = {
|
||||
export interface SyncInfo {
|
||||
lastSince: string;
|
||||
taskCount: number;
|
||||
lastPackage?: string;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import dayjs from 'dayjs';
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
|
||||
export enum TokenType {
|
||||
granular = 'granular',
|
||||
@@ -17,7 +18,7 @@ interface BaseTokenData extends EntityData {
|
||||
lastUsedAt?: Date;
|
||||
}
|
||||
|
||||
interface ClassicTokenData extends BaseTokenData{
|
||||
interface ClassicTokenData extends BaseTokenData {
|
||||
isAutomation?: boolean;
|
||||
}
|
||||
interface GranularTokenData extends BaseTokenData {
|
||||
@@ -31,7 +32,9 @@ interface GranularTokenData extends BaseTokenData {
|
||||
|
||||
type TokenData = ClassicTokenData | GranularTokenData;
|
||||
|
||||
export function isGranularToken(data: TokenData | Token): data is GranularTokenData {
|
||||
export function isGranularToken(
|
||||
data: TokenData | Token
|
||||
): data is GranularTokenData {
|
||||
return data.type === TokenType.granular;
|
||||
}
|
||||
|
||||
@@ -51,7 +54,7 @@ export class Token extends Entity {
|
||||
readonly expires?: number;
|
||||
lastUsedAt: Date | null;
|
||||
allowedPackages?: string[];
|
||||
token?: string;
|
||||
token: string;
|
||||
|
||||
constructor(data: TokenData) {
|
||||
super(data);
|
||||
@@ -79,9 +82,10 @@ export class Token extends Entity {
|
||||
static create(data: EasyData<TokenData, 'tokenId'>): Token {
|
||||
const newData = EntityUtil.defaultData(data, 'tokenId');
|
||||
if (isGranularToken(newData) && !newData.expiredAt) {
|
||||
newData.expiredAt = dayjs(newData.createdAt).add(newData.expires, 'days').toDate();
|
||||
newData.expiredAt = dayjs(newData.createdAt)
|
||||
.add(newData.expires, 'days')
|
||||
.toDate();
|
||||
}
|
||||
return new Token(newData);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { cleanUserPrefix } from '../../common/PackageUtil';
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
import { cleanUserPrefix } from '../../common/PackageUtil.ts';
|
||||
|
||||
interface UserData extends EntityData {
|
||||
userId: string;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
|
||||
interface WebauthnCredentialData extends EntityData {
|
||||
wancId: string;
|
||||
@@ -25,7 +25,9 @@ export class WebauthnCredential extends Entity {
|
||||
this.browserType = data.browserType;
|
||||
}
|
||||
|
||||
static create(data: EasyData<WebauthnCredentialData, 'wancId'>): WebauthnCredential {
|
||||
static create(
|
||||
data: EasyData<WebauthnCredentialData, 'wancId'>
|
||||
): WebauthnCredential {
|
||||
const newData = EntityUtil.defaultData(data, 'wancId');
|
||||
return new WebauthnCredential(newData);
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { Event, Inject } from '@eggjs/tegg';
|
||||
import { EggLogger } from 'egg';
|
||||
import { PACKAGE_VERSION_ADDED } from './index';
|
||||
import { BUG_VERSIONS } from '../../common/constants';
|
||||
import { BugVersionService } from '../service/BugVersionService';
|
||||
import { Event, Inject, Logger } from 'egg';
|
||||
|
||||
import { PACKAGE_VERSION_ADDED } from './index.ts';
|
||||
import { BUG_VERSIONS } from '../../common/constants.ts';
|
||||
import type { BugVersionService } from '../service/BugVersionService.ts';
|
||||
|
||||
@Event(PACKAGE_VERSION_ADDED)
|
||||
export class BugVersionFixHandler {
|
||||
@@ -10,7 +10,7 @@ export class BugVersionFixHandler {
|
||||
private readonly bugVersionService: BugVersionService;
|
||||
|
||||
@Inject()
|
||||
private readonly logger: EggLogger;
|
||||
private readonly logger: Logger;
|
||||
|
||||
async handle(fullname: string) {
|
||||
if (fullname !== BUG_VERSIONS) return;
|
||||
|
||||
@@ -1,18 +1,19 @@
|
||||
import { Event, Inject } from '@eggjs/tegg';
|
||||
import { Event, Inject } from 'egg';
|
||||
|
||||
import {
|
||||
PACKAGE_UNPUBLISHED,
|
||||
PACKAGE_BLOCKED,
|
||||
PACKAGE_UNBLOCKED,
|
||||
PACKAGE_VERSION_ADDED,
|
||||
PACKAGE_VERSION_REMOVED,
|
||||
PACKAGE_TAG_ADDED,
|
||||
PACKAGE_TAG_CHANGED,
|
||||
PACKAGE_TAG_REMOVED,
|
||||
PACKAGE_MAINTAINER_CHANGED,
|
||||
PACKAGE_MAINTAINER_REMOVED,
|
||||
PACKAGE_META_CHANGED,
|
||||
} from './index';
|
||||
import { CacheService } from '../../core/service/CacheService';
|
||||
PACKAGE_TAG_ADDED,
|
||||
PACKAGE_TAG_CHANGED,
|
||||
PACKAGE_TAG_REMOVED,
|
||||
PACKAGE_UNBLOCKED,
|
||||
PACKAGE_UNPUBLISHED,
|
||||
PACKAGE_VERSION_ADDED,
|
||||
PACKAGE_VERSION_REMOVED,
|
||||
} from './index.ts';
|
||||
import type { CacheService } from '../../core/service/CacheService.ts';
|
||||
|
||||
class CacheCleanerEvent {
|
||||
@Inject()
|
||||
@@ -24,77 +25,77 @@ class CacheCleanerEvent {
|
||||
}
|
||||
|
||||
@Event(PACKAGE_UNPUBLISHED)
|
||||
export class PackageUnpublished extends CacheCleanerEvent {
|
||||
export class PackageUnpublishedCacheCleanEvent extends CacheCleanerEvent {
|
||||
async handle(fullname: string) {
|
||||
await this.removeCache(fullname);
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_BLOCKED)
|
||||
export class PackageBlocked extends CacheCleanerEvent {
|
||||
export class PackageBlockedCacheCleanEvent extends CacheCleanerEvent {
|
||||
async handle(fullname: string) {
|
||||
await this.removeCache(fullname);
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_UNBLOCKED)
|
||||
export class PackageUnblocked extends CacheCleanerEvent {
|
||||
export class PackageUnblockedCacheCleanEvent extends CacheCleanerEvent {
|
||||
async handle(fullname: string) {
|
||||
await this.removeCache(fullname);
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_VERSION_ADDED)
|
||||
export class PackageVersionAdded extends CacheCleanerEvent {
|
||||
export class PackageVersionAddedCacheCleanEvent extends CacheCleanerEvent {
|
||||
async handle(fullname: string) {
|
||||
await this.removeCache(fullname);
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_VERSION_REMOVED)
|
||||
export class PackageVersionRemoved extends CacheCleanerEvent {
|
||||
export class PackageVersionRemovedCacheCleanEvent extends CacheCleanerEvent {
|
||||
async handle(fullname: string) {
|
||||
await this.removeCache(fullname);
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_TAG_ADDED)
|
||||
export class PackageTagAdded extends CacheCleanerEvent {
|
||||
export class PackageTagAddedCacheCleanEvent extends CacheCleanerEvent {
|
||||
async handle(fullname: string) {
|
||||
await this.removeCache(fullname);
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_TAG_CHANGED)
|
||||
export class PackageTagChanged extends CacheCleanerEvent {
|
||||
export class PackageTagChangedCacheCleanEvent extends CacheCleanerEvent {
|
||||
async handle(fullname: string) {
|
||||
await this.removeCache(fullname);
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_TAG_REMOVED)
|
||||
export class PackageTagRemoved extends CacheCleanerEvent {
|
||||
export class PackageTagRemovedCacheCleanEvent extends CacheCleanerEvent {
|
||||
async handle(fullname: string) {
|
||||
await this.removeCache(fullname);
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_MAINTAINER_CHANGED)
|
||||
export class PackageMaintainerChanged extends CacheCleanerEvent {
|
||||
export class PackageMaintainerChangedCacheCleanEvent extends CacheCleanerEvent {
|
||||
async handle(fullname: string) {
|
||||
await this.removeCache(fullname);
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_MAINTAINER_REMOVED)
|
||||
export class PackageMaintainerRemoved extends CacheCleanerEvent {
|
||||
export class PackageMaintainerRemovedCacheCleanEvent extends CacheCleanerEvent {
|
||||
async handle(fullname: string) {
|
||||
await this.removeCache(fullname);
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_META_CHANGED)
|
||||
export class PackageMetaChanged extends CacheCleanerEvent {
|
||||
export class PackageMetaChangedCacheCleanEvent extends CacheCleanerEvent {
|
||||
async handle(fullname: string) {
|
||||
await this.removeCache(fullname);
|
||||
}
|
||||
|
||||
@@ -1,22 +1,24 @@
|
||||
import { EggAppConfig } from 'egg';
|
||||
import { Event, Inject } from '@eggjs/tegg';
|
||||
|
||||
import { Event, Inject, Config } from 'egg';
|
||||
|
||||
import {
|
||||
PACKAGE_UNPUBLISHED,
|
||||
PACKAGE_VERSION_ADDED,
|
||||
PACKAGE_VERSION_REMOVED,
|
||||
type PackageMetaChange,
|
||||
PACKAGE_MAINTAINER_CHANGED,
|
||||
PACKAGE_MAINTAINER_REMOVED,
|
||||
PACKAGE_META_CHANGED,
|
||||
PACKAGE_TAG_ADDED,
|
||||
PACKAGE_TAG_CHANGED,
|
||||
PACKAGE_TAG_REMOVED,
|
||||
PACKAGE_MAINTAINER_CHANGED,
|
||||
PACKAGE_MAINTAINER_REMOVED,
|
||||
PACKAGE_META_CHANGED, PackageMetaChange,
|
||||
} from './index';
|
||||
import { ChangeRepository } from '../../repository/ChangeRepository';
|
||||
import { Change } from '../entity/Change';
|
||||
import { HookEvent } from '../entity/HookEvent';
|
||||
import { Task } from '../entity/Task';
|
||||
import { User } from '../entity/User';
|
||||
import { TaskService } from '../service/TaskService';
|
||||
PACKAGE_UNPUBLISHED,
|
||||
PACKAGE_VERSION_ADDED,
|
||||
PACKAGE_VERSION_REMOVED,
|
||||
} from './index.ts';
|
||||
import type { ChangeRepository } from '../../repository/ChangeRepository.ts';
|
||||
import { Change } from '../entity/Change.ts';
|
||||
import { HookEvent } from '../entity/HookEvent.ts';
|
||||
import { Task } from '../entity/Task.ts';
|
||||
import type { User } from '../entity/User.ts';
|
||||
import type { TaskService } from '../service/TaskService.ts';
|
||||
|
||||
class ChangesStreamEvent {
|
||||
@Inject()
|
||||
@@ -26,13 +28,17 @@ class ChangesStreamEvent {
|
||||
protected readonly taskService: TaskService;
|
||||
|
||||
@Inject()
|
||||
protected readonly config: EggAppConfig;
|
||||
protected readonly config: Config;
|
||||
|
||||
protected get hookEnable() {
|
||||
return this.config.cnpmcore.hookEnable;
|
||||
}
|
||||
|
||||
protected async addChange(type: string, fullname: string, data: object): Promise<Change> {
|
||||
protected async addChange(
|
||||
type: string,
|
||||
fullname: string,
|
||||
data: object
|
||||
): Promise<Change> {
|
||||
const change = Change.create({
|
||||
type,
|
||||
targetName: fullname,
|
||||
@@ -44,79 +50,101 @@ class ChangesStreamEvent {
|
||||
}
|
||||
|
||||
@Event(PACKAGE_UNPUBLISHED)
|
||||
export class PackageUnpublished extends ChangesStreamEvent {
|
||||
export class PackageUnpublishedChangesStreamEvent extends ChangesStreamEvent {
|
||||
async handle(fullname: string) {
|
||||
const change = await this.addChange(PACKAGE_UNPUBLISHED, fullname, {});
|
||||
if (this.hookEnable) {
|
||||
const task = Task.createCreateHookTask(HookEvent.createUnpublishEvent(fullname, change.changeId));
|
||||
const task = Task.createCreateHookTask(
|
||||
HookEvent.createUnpublishEvent(fullname, change.changeId)
|
||||
);
|
||||
await this.taskService.createTask(task, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_VERSION_ADDED)
|
||||
export class PackageVersionAdded extends ChangesStreamEvent {
|
||||
export class PackageVersionAddedChangesStreamEvent extends ChangesStreamEvent {
|
||||
async handle(fullname: string, version: string, tag?: string) {
|
||||
const change = await this.addChange(PACKAGE_VERSION_ADDED, fullname, { version });
|
||||
const change = await this.addChange(PACKAGE_VERSION_ADDED, fullname, {
|
||||
version,
|
||||
});
|
||||
if (this.hookEnable) {
|
||||
const task = Task.createCreateHookTask(HookEvent.createPublishEvent(fullname, change.changeId, version, tag));
|
||||
const task = Task.createCreateHookTask(
|
||||
HookEvent.createPublishEvent(fullname, change.changeId, version, tag)
|
||||
);
|
||||
await this.taskService.createTask(task, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_VERSION_REMOVED)
|
||||
export class PackageVersionRemoved extends ChangesStreamEvent {
|
||||
export class PackageVersionRemovedChangesStreamEvent extends ChangesStreamEvent {
|
||||
async handle(fullname: string, version: string, tag?: string) {
|
||||
const change = await this.addChange(PACKAGE_VERSION_REMOVED, fullname, { version });
|
||||
const change = await this.addChange(PACKAGE_VERSION_REMOVED, fullname, {
|
||||
version,
|
||||
});
|
||||
if (this.hookEnable) {
|
||||
const task = Task.createCreateHookTask(HookEvent.createUnpublishEvent(fullname, change.changeId, version, tag));
|
||||
const task = Task.createCreateHookTask(
|
||||
HookEvent.createUnpublishEvent(fullname, change.changeId, version, tag)
|
||||
);
|
||||
await this.taskService.createTask(task, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_TAG_ADDED)
|
||||
export class PackageTagAdded extends ChangesStreamEvent {
|
||||
export class PackageTagAddedChangesStreamEvent extends ChangesStreamEvent {
|
||||
async handle(fullname: string, tag: string) {
|
||||
const change = await this.addChange(PACKAGE_TAG_ADDED, fullname, { tag });
|
||||
if (this.hookEnable) {
|
||||
const task = Task.createCreateHookTask(HookEvent.createDistTagEvent(fullname, change.changeId, tag));
|
||||
const task = Task.createCreateHookTask(
|
||||
HookEvent.createDistTagEvent(fullname, change.changeId, tag)
|
||||
);
|
||||
await this.taskService.createTask(task, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_TAG_CHANGED)
|
||||
export class PackageTagChanged extends ChangesStreamEvent {
|
||||
export class PackageTagChangedChangesStreamEvent extends ChangesStreamEvent {
|
||||
async handle(fullname: string, tag: string) {
|
||||
const change = await this.addChange(PACKAGE_TAG_CHANGED, fullname, { tag });
|
||||
if (this.hookEnable) {
|
||||
const task = Task.createCreateHookTask(HookEvent.createDistTagEvent(fullname, change.changeId, tag));
|
||||
const task = Task.createCreateHookTask(
|
||||
HookEvent.createDistTagEvent(fullname, change.changeId, tag)
|
||||
);
|
||||
await this.taskService.createTask(task, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_TAG_REMOVED)
|
||||
export class PackageTagRemoved extends ChangesStreamEvent {
|
||||
export class PackageTagRemovedChangesStreamEvent extends ChangesStreamEvent {
|
||||
async handle(fullname: string, tag: string) {
|
||||
const change = await this.addChange(PACKAGE_TAG_REMOVED, fullname, { tag });
|
||||
if (this.hookEnable) {
|
||||
const task = Task.createCreateHookTask(HookEvent.createDistTagRmEvent(fullname, change.changeId, tag));
|
||||
const task = Task.createCreateHookTask(
|
||||
HookEvent.createDistTagRmEvent(fullname, change.changeId, tag)
|
||||
);
|
||||
await this.taskService.createTask(task, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_MAINTAINER_CHANGED)
|
||||
export class PackageMaintainerChanged extends ChangesStreamEvent {
|
||||
export class PackageMaintainerChangedChangesStreamEvent extends ChangesStreamEvent {
|
||||
async handle(fullname: string, maintainers: User[]) {
|
||||
const change = await this.addChange(PACKAGE_MAINTAINER_CHANGED, fullname, {});
|
||||
const change = await this.addChange(
|
||||
PACKAGE_MAINTAINER_CHANGED,
|
||||
fullname,
|
||||
{}
|
||||
);
|
||||
// TODO 应该比较差值,而不是全量推送
|
||||
if (this.hookEnable) {
|
||||
for (const maintainer of maintainers) {
|
||||
const task = Task.createCreateHookTask(HookEvent.createOwnerEvent(fullname, change.changeId, maintainer.name));
|
||||
const task = Task.createCreateHookTask(
|
||||
HookEvent.createOwnerEvent(fullname, change.changeId, maintainer.name)
|
||||
);
|
||||
await this.taskService.createTask(task, true);
|
||||
}
|
||||
}
|
||||
@@ -124,24 +152,36 @@ export class PackageMaintainerChanged extends ChangesStreamEvent {
|
||||
}
|
||||
|
||||
@Event(PACKAGE_MAINTAINER_REMOVED)
|
||||
export class PackageMaintainerRemoved extends ChangesStreamEvent {
|
||||
export class PackageMaintainerRemovedChangesStreamEvent extends ChangesStreamEvent {
|
||||
async handle(fullname: string, maintainer: string) {
|
||||
const change = await this.addChange(PACKAGE_MAINTAINER_REMOVED, fullname, { maintainer });
|
||||
const change = await this.addChange(PACKAGE_MAINTAINER_REMOVED, fullname, {
|
||||
maintainer,
|
||||
});
|
||||
if (this.hookEnable) {
|
||||
const task = Task.createCreateHookTask(HookEvent.createOwnerRmEvent(fullname, change.changeId, maintainer));
|
||||
const task = Task.createCreateHookTask(
|
||||
HookEvent.createOwnerRmEvent(fullname, change.changeId, maintainer)
|
||||
);
|
||||
await this.taskService.createTask(task, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_META_CHANGED)
|
||||
export class PackageMetaChanged extends ChangesStreamEvent {
|
||||
export class PackageMetaChangedChangesStreamEvent extends ChangesStreamEvent {
|
||||
async handle(fullname: string, meta: PackageMetaChange) {
|
||||
const change = await this.addChange(PACKAGE_META_CHANGED, fullname, { ...meta });
|
||||
const change = await this.addChange(PACKAGE_META_CHANGED, fullname, {
|
||||
...meta,
|
||||
});
|
||||
const { deprecateds } = meta;
|
||||
if (this.hookEnable) {
|
||||
for (const deprecated of deprecateds || []) {
|
||||
const task = Task.createCreateHookTask(HookEvent.createDeprecatedEvent(fullname, change.changeId, deprecated.version));
|
||||
const task = Task.createCreateHookTask(
|
||||
HookEvent.createDeprecatedEvent(
|
||||
fullname,
|
||||
change.changeId,
|
||||
deprecated.version
|
||||
)
|
||||
);
|
||||
await this.taskService.createTask(task, true);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,30 +1,38 @@
|
||||
import { Event, Inject } from '@eggjs/tegg';
|
||||
import {
|
||||
EggAppConfig,
|
||||
} from 'egg';
|
||||
import { PACKAGE_VERSION_ADDED } from './index';
|
||||
import { getScopeAndName } from '../../common/PackageUtil';
|
||||
import { PackageVersionManifest as PackageVersionManifestEntity } from '../entity/PackageVersionManifest';
|
||||
import { PackageRepository } from '../../repository/PackageRepository';
|
||||
import { DistRepository } from '../../repository/DistRepository';
|
||||
import { Config, Event, Inject } from 'egg';
|
||||
|
||||
import { PACKAGE_VERSION_ADDED } from './index.ts';
|
||||
import { getScopeAndName } from '../../common/PackageUtil.ts';
|
||||
import { PackageVersionManifest as PackageVersionManifestEntity } from '../entity/PackageVersionManifest.ts';
|
||||
import type { PackageRepository } from '../../repository/PackageRepository.ts';
|
||||
import type { DistRepository } from '../../repository/DistRepository.ts';
|
||||
|
||||
class StoreManifestEvent {
|
||||
@Inject()
|
||||
protected readonly config: EggAppConfig;
|
||||
protected readonly config: Config;
|
||||
@Inject()
|
||||
private readonly packageRepository: PackageRepository;
|
||||
@Inject()
|
||||
private readonly distRepository: DistRepository;
|
||||
|
||||
protected async savePackageVersionManifest(fullname: string, version: string) {
|
||||
if (!this.config.cnpmcore.enableStoreFullPackageVersionManifestsToDatabase) return;
|
||||
protected async savePackageVersionManifest(
|
||||
fullname: string,
|
||||
version: string
|
||||
) {
|
||||
if (!this.config.cnpmcore.enableStoreFullPackageVersionManifestsToDatabase)
|
||||
return;
|
||||
|
||||
const [ scope, name ] = getScopeAndName(fullname);
|
||||
const [scope, name] = getScopeAndName(fullname);
|
||||
const packageId = await this.packageRepository.findPackageId(scope, name);
|
||||
if (!packageId) return;
|
||||
const packageVersion = await this.packageRepository.findPackageVersion(packageId, version);
|
||||
const packageVersion = await this.packageRepository.findPackageVersion(
|
||||
packageId,
|
||||
version
|
||||
);
|
||||
if (!packageVersion) return;
|
||||
const manifest = await this.distRepository.findPackageVersionManifest(packageId, version);
|
||||
const manifest = await this.distRepository.findPackageVersionManifest(
|
||||
packageId,
|
||||
version
|
||||
);
|
||||
if (!manifest) return;
|
||||
const entity = PackageVersionManifestEntity.create({
|
||||
packageId,
|
||||
@@ -36,7 +44,7 @@ class StoreManifestEvent {
|
||||
}
|
||||
|
||||
@Event(PACKAGE_VERSION_ADDED)
|
||||
export class PackageVersionAdded extends StoreManifestEvent {
|
||||
export class PackageVersionAddedStoreManifestEvent extends StoreManifestEvent {
|
||||
async handle(fullname: string, version: string) {
|
||||
await this.savePackageVersionManifest(fullname, version);
|
||||
}
|
||||
|
||||
53
app/core/event/SyncESPackage.ts
Normal file
53
app/core/event/SyncESPackage.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
// TODO sync event
|
||||
import { Config, Event, Inject } from 'egg';
|
||||
import {
|
||||
PACKAGE_BLOCKED,
|
||||
PACKAGE_MAINTAINER_CHANGED,
|
||||
PACKAGE_MAINTAINER_REMOVED,
|
||||
PACKAGE_META_CHANGED,
|
||||
PACKAGE_TAG_ADDED,
|
||||
PACKAGE_TAG_CHANGED,
|
||||
PACKAGE_TAG_REMOVED,
|
||||
PACKAGE_UNBLOCKED,
|
||||
PACKAGE_UNPUBLISHED,
|
||||
PACKAGE_VERSION_ADDED,
|
||||
PACKAGE_VERSION_REMOVED,
|
||||
} from './index.ts';
|
||||
import type { PackageSearchService } from '../service/PackageSearchService.ts';
|
||||
|
||||
class SyncESPackage {
|
||||
@Inject()
|
||||
protected readonly packageSearchService: PackageSearchService;
|
||||
|
||||
@Inject()
|
||||
protected readonly config: Config;
|
||||
|
||||
protected async syncPackage(fullname: string) {
|
||||
if (!this.config.cnpmcore.enableElasticsearch) return;
|
||||
await this.packageSearchService.syncPackage(fullname, true);
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_UNPUBLISHED)
|
||||
@Event(PACKAGE_BLOCKED)
|
||||
export class PackageUnpublishedSyncESEvent extends SyncESPackage {
|
||||
async handle(fullname: string) {
|
||||
if (!this.config.cnpmcore.enableElasticsearch) return;
|
||||
await this.packageSearchService.removePackage(fullname);
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_VERSION_ADDED)
|
||||
@Event(PACKAGE_META_CHANGED)
|
||||
@Event(PACKAGE_VERSION_REMOVED)
|
||||
@Event(PACKAGE_TAG_ADDED)
|
||||
@Event(PACKAGE_TAG_CHANGED)
|
||||
@Event(PACKAGE_TAG_REMOVED)
|
||||
@Event(PACKAGE_MAINTAINER_CHANGED)
|
||||
@Event(PACKAGE_MAINTAINER_REMOVED)
|
||||
@Event(PACKAGE_UNBLOCKED)
|
||||
export class PackageVersionAddedSyncESEvent extends SyncESPackage {
|
||||
async handle(fullname: string) {
|
||||
await this.syncPackage(fullname);
|
||||
}
|
||||
}
|
||||
@@ -1,49 +1,81 @@
|
||||
import { Event, Inject } from '@eggjs/tegg';
|
||||
import { Event, Inject, Config, Logger } from 'egg';
|
||||
import { ForbiddenError } from 'egg/errors';
|
||||
|
||||
import {
|
||||
EggAppConfig,
|
||||
} from 'egg';
|
||||
import { PACKAGE_VERSION_ADDED, PACKAGE_TAG_ADDED, PACKAGE_TAG_CHANGED } from './index';
|
||||
import { getScopeAndName } from '../../common/PackageUtil';
|
||||
import { PackageManagerService } from '../service/PackageManagerService';
|
||||
import { PackageVersionFileService } from '../service/PackageVersionFileService';
|
||||
PACKAGE_TAG_ADDED,
|
||||
PACKAGE_TAG_CHANGED,
|
||||
PACKAGE_VERSION_ADDED,
|
||||
} from './index.ts';
|
||||
import { getScopeAndName } from '../../common/PackageUtil.ts';
|
||||
import type { PackageManagerService } from '../service/PackageManagerService.ts';
|
||||
import type { PackageVersionFileService } from '../service/PackageVersionFileService.ts';
|
||||
|
||||
class SyncPackageVersionFileEvent {
|
||||
@Inject()
|
||||
protected readonly config: EggAppConfig;
|
||||
protected readonly config: Config;
|
||||
@Inject()
|
||||
protected readonly logger: Logger;
|
||||
@Inject()
|
||||
private readonly packageManagerService: PackageManagerService;
|
||||
@Inject()
|
||||
private readonly packageVersionFileService: PackageVersionFileService;
|
||||
|
||||
protected async syncPackageVersionFile(fullname: string, version: string) {
|
||||
// must set enableUnpkg and enableSyncUnpkgFiles = true both
|
||||
if (!this.config.cnpmcore.enableUnpkg) return;
|
||||
if (!this.config.cnpmcore.enableSyncUnpkgFiles) return;
|
||||
// ignore sync on unittest
|
||||
if (this.config.env === 'unittest' && fullname !== '@cnpm/unittest-unpkg-demo') return;
|
||||
const [ scope, name ] = getScopeAndName(fullname);
|
||||
const { packageVersion } = await this.packageManagerService.showPackageVersionByVersionOrTag(
|
||||
scope, name, version);
|
||||
if (
|
||||
this.config.env === 'unittest' &&
|
||||
fullname !== '@cnpm/unittest-unpkg-demo'
|
||||
)
|
||||
return;
|
||||
const [scope, name] = getScopeAndName(fullname);
|
||||
const { packageVersion } =
|
||||
await this.packageManagerService.showPackageVersionByVersionOrTag(
|
||||
scope,
|
||||
name,
|
||||
version
|
||||
);
|
||||
if (!packageVersion) return;
|
||||
await this.packageVersionFileService.syncPackageVersionFiles(packageVersion);
|
||||
try {
|
||||
await this.packageVersionFileService.syncPackageVersionFiles(
|
||||
packageVersion
|
||||
);
|
||||
} catch (err) {
|
||||
if (err instanceof ForbiddenError) {
|
||||
this.logger.info(
|
||||
'[SyncPackageVersionFileEvent.syncPackageVersionFile] ignore sync files, cause: %s',
|
||||
err.message
|
||||
);
|
||||
return;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
protected async syncPackageReadmeToLatestVersion(fullname: string) {
|
||||
const [ scope, name ] = getScopeAndName(fullname);
|
||||
const { pkg, packageVersion } = await this.packageManagerService.showPackageVersionByVersionOrTag(
|
||||
scope, name, 'latest');
|
||||
const [scope, name] = getScopeAndName(fullname);
|
||||
const { pkg, packageVersion } =
|
||||
await this.packageManagerService.showPackageVersionByVersionOrTag(
|
||||
scope,
|
||||
name,
|
||||
'latest'
|
||||
);
|
||||
if (!pkg || !packageVersion) return;
|
||||
await this.packageVersionFileService.syncPackageReadme(pkg, packageVersion);
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_VERSION_ADDED)
|
||||
export class PackageVersionAdded extends SyncPackageVersionFileEvent {
|
||||
export class PackageVersionAddedSyncPackageVersionFileEvent extends SyncPackageVersionFileEvent {
|
||||
async handle(fullname: string, version: string) {
|
||||
await this.syncPackageVersionFile(fullname, version);
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_TAG_ADDED)
|
||||
export class PackageTagAdded extends SyncPackageVersionFileEvent {
|
||||
export class PackageTagAddedSyncPackageVersionFileEvent extends SyncPackageVersionFileEvent {
|
||||
async handle(fullname: string, tag: string) {
|
||||
if (tag !== 'latest') return;
|
||||
await this.syncPackageReadmeToLatestVersion(fullname);
|
||||
@@ -51,7 +83,7 @@ export class PackageTagAdded extends SyncPackageVersionFileEvent {
|
||||
}
|
||||
|
||||
@Event(PACKAGE_TAG_CHANGED)
|
||||
export class PackageTagChanged extends SyncPackageVersionFileEvent {
|
||||
export class PackageTagChangedSyncPackageVersionFileEvent extends SyncPackageVersionFileEvent {
|
||||
async handle(fullname: string, tag: string) {
|
||||
if (tag !== 'latest') return;
|
||||
await this.syncPackageReadmeToLatestVersion(fullname);
|
||||
|
||||
23
app/core/event/TotalHandler.ts
Normal file
23
app/core/event/TotalHandler.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { Event, Inject } from 'egg';
|
||||
|
||||
import { PACKAGE_ADDED, PACKAGE_VERSION_ADDED } from './index.ts';
|
||||
import type { TotalRepository } from '../../repository/TotalRepository.ts';
|
||||
|
||||
class TotalHandlerEvent {
|
||||
@Inject()
|
||||
protected readonly totalRepository: TotalRepository;
|
||||
}
|
||||
|
||||
@Event(PACKAGE_ADDED)
|
||||
export class PackageAddedTotalHandlerEvent extends TotalHandlerEvent {
|
||||
async handle() {
|
||||
await this.totalRepository.incrementPackageCount();
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_VERSION_ADDED)
|
||||
export class PackageVersionAddedTotalHandlerEvent extends TotalHandlerEvent {
|
||||
async handle() {
|
||||
await this.totalRepository.incrementPackageVersionCount();
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import '@eggjs/tegg';
|
||||
import { User } from '../entity/User';
|
||||
import type { User } from '../entity/User.ts';
|
||||
|
||||
export const PACKAGE_ADDED = 'PACKAGE_ADDED';
|
||||
export const PACKAGE_UNPUBLISHED = 'PACKAGE_UNPUBLISHED';
|
||||
export const PACKAGE_BLOCKED = 'PACKAGE_BLOCKED';
|
||||
export const PACKAGE_UNBLOCKED = 'PACKAGE_UNBLOCKED';
|
||||
@@ -15,26 +15,43 @@ export const PACKAGE_META_CHANGED = 'PACKAGE_META_CHANGED';
|
||||
|
||||
export interface PackageDeprecated {
|
||||
version: string;
|
||||
deprecated: string;
|
||||
deprecated?: string;
|
||||
}
|
||||
|
||||
export interface PackageMetaChange {
|
||||
deprecateds?: Array<PackageDeprecated>;
|
||||
deprecateds?: PackageDeprecated[];
|
||||
}
|
||||
|
||||
|
||||
declare module '@eggjs/tegg' {
|
||||
declare module 'egg' {
|
||||
interface Events {
|
||||
[PACKAGE_ADDED]: (fullname: string) => Promise<void>;
|
||||
[PACKAGE_UNPUBLISHED]: (fullname: string) => Promise<void>;
|
||||
[PACKAGE_BLOCKED]: (fullname: string) => Promise<void>;
|
||||
[PACKAGE_UNBLOCKED]: (fullname: string) => Promise<void>;
|
||||
[PACKAGE_VERSION_ADDED]: (fullname: string, version: string, tag?: string) => Promise<void>;
|
||||
[PACKAGE_VERSION_REMOVED]: (fullname: string, version: string, tag?: string) => Promise<void>;
|
||||
[PACKAGE_VERSION_ADDED]: (
|
||||
fullname: string,
|
||||
version: string,
|
||||
tag?: string
|
||||
) => Promise<void>;
|
||||
[PACKAGE_VERSION_REMOVED]: (
|
||||
fullname: string,
|
||||
version: string,
|
||||
tag?: string
|
||||
) => Promise<void>;
|
||||
[PACKAGE_TAG_ADDED]: (fullname: string, tag: string) => Promise<void>;
|
||||
[PACKAGE_TAG_CHANGED]: (fullname: string, tag: string) => Promise<void>;
|
||||
[PACKAGE_TAG_REMOVED]: (fullname: string, tag: string) => Promise<void>;
|
||||
[PACKAGE_MAINTAINER_CHANGED]: (fullname: string, maintainers: User[]) => Promise<void>;
|
||||
[PACKAGE_MAINTAINER_REMOVED]: (fullname: string, maintainer: string) => Promise<void>;
|
||||
[PACKAGE_META_CHANGED]: (fullname: string, meta: PackageMetaChange) => Promise<void>;
|
||||
[PACKAGE_MAINTAINER_CHANGED]: (
|
||||
fullname: string,
|
||||
maintainers: User[]
|
||||
) => Promise<void>;
|
||||
[PACKAGE_MAINTAINER_REMOVED]: (
|
||||
fullname: string,
|
||||
maintainer: string
|
||||
) => Promise<void>;
|
||||
[PACKAGE_META_CHANGED]: (
|
||||
fullname: string,
|
||||
meta: PackageMetaChange
|
||||
) => Promise<void>;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,5 +2,6 @@
|
||||
"name": "cnpmcore-core",
|
||||
"eggModule": {
|
||||
"name": "cnpmcoreCore"
|
||||
}
|
||||
},
|
||||
"type": "module"
|
||||
}
|
||||
|
||||
@@ -1,26 +1,33 @@
|
||||
import { rm } from 'fs/promises';
|
||||
import fs from 'node:fs/promises';
|
||||
|
||||
import {
|
||||
AccessLevel,
|
||||
SingletonProto,
|
||||
Inject,
|
||||
EggObjectFactory,
|
||||
} from '@eggjs/tegg';
|
||||
import {
|
||||
EggHttpClient,
|
||||
SingletonProto,
|
||||
type EggObjectFactory,
|
||||
HttpClient,
|
||||
} from 'egg';
|
||||
import fs from 'fs/promises';
|
||||
import binaries, { BinaryName, CategoryName } from '../../../config/binaries';
|
||||
import { NFSAdapter } from '../../common/adapter/NFSAdapter';
|
||||
import { TaskType, TaskState } from '../../common/enum/Task';
|
||||
import { downloadToTempfile } from '../../common/FileUtil';
|
||||
import { BinaryRepository } from '../../repository/BinaryRepository';
|
||||
import { Task } from '../entity/Task';
|
||||
import { Binary } from '../entity/Binary';
|
||||
import { TaskService } from './TaskService';
|
||||
import { AbstractBinary, BinaryItem } from '../../common/adapter/binary/AbstractBinary';
|
||||
import { AbstractService } from '../../common/AbstractService';
|
||||
import { TaskRepository } from '../../repository/TaskRepository';
|
||||
import { BinaryType } from '../../common/enum/Binary';
|
||||
import { sortBy } from 'lodash-es';
|
||||
|
||||
import binaries, {
|
||||
type BinaryName,
|
||||
type CategoryName,
|
||||
} from '../../../config/binaries.ts';
|
||||
import type { BinaryRepository } from '../../repository/BinaryRepository.ts';
|
||||
import { Task, type SyncBinaryTask } from '../entity/Task.ts';
|
||||
import { Binary } from '../entity/Binary.ts';
|
||||
import type { TaskService } from './TaskService.ts';
|
||||
import type { NFSAdapter } from '../../common/adapter/NFSAdapter.ts';
|
||||
import { downloadToTempfile } from '../../common/FileUtil.ts';
|
||||
import { isTimeoutError } from '../../common/ErrorUtil.ts';
|
||||
import {
|
||||
AbstractBinary,
|
||||
type BinaryItem,
|
||||
} from '../../common/adapter/binary/AbstractBinary.ts';
|
||||
import { AbstractService } from '../../common/AbstractService.ts';
|
||||
import { BinaryType } from '../../common/enum/Binary.ts';
|
||||
import { TaskState, TaskType } from '../../common/enum/Task.ts';
|
||||
import { platforms } from '../../common/adapter/binary/PuppeteerBinary.ts';
|
||||
|
||||
function isoNow() {
|
||||
return new Date().toISOString();
|
||||
@@ -35,9 +42,7 @@ export class BinarySyncerService extends AbstractService {
|
||||
@Inject()
|
||||
private readonly taskService: TaskService;
|
||||
@Inject()
|
||||
private readonly taskRepository: TaskRepository;
|
||||
@Inject()
|
||||
private readonly httpclient: EggHttpClient;
|
||||
private readonly httpClient: HttpClient;
|
||||
@Inject()
|
||||
private readonly nfsAdapter: NFSAdapter;
|
||||
@Inject()
|
||||
@@ -46,40 +51,49 @@ export class BinarySyncerService extends AbstractService {
|
||||
// canvas/v2.6.1/canvas-v2.6.1-node-v57-linux-glibc-x64.tar.gz
|
||||
// -> node-canvas-prebuilt/v2.6.1/node-canvas-prebuilt-v2.6.1-node-v57-linux-glibc-x64.tar.gz
|
||||
// canvas 历史版本的 targetName 可能是 category 需要兼容
|
||||
public async findBinary(targetName: BinaryName | CategoryName, parent: string, name: string) {
|
||||
public async findBinary(
|
||||
targetName: BinaryName | CategoryName,
|
||||
parent: string,
|
||||
name: string
|
||||
) {
|
||||
return await this.binaryRepository.findBinary(targetName, parent, name);
|
||||
}
|
||||
|
||||
public async listDirBinaries(binary: Binary) {
|
||||
return await this.binaryRepository.listBinaries(binary.category, `${binary.parent}${binary.name}`);
|
||||
public async listDirBinaries(
|
||||
binary: Binary,
|
||||
options?: {
|
||||
limit: number;
|
||||
since: string;
|
||||
}
|
||||
) {
|
||||
return await this.binaryRepository.listBinaries(
|
||||
binary.category,
|
||||
`${binary.parent}${binary.name}`,
|
||||
options
|
||||
);
|
||||
}
|
||||
|
||||
public async listRootBinaries(binaryName: BinaryName) {
|
||||
// 通常 binaryName 和 category 是一样的,但是有些特殊的 binaryName 会有多个 category,比如 canvas
|
||||
// 所以查询 canvas 的时候,需要将 binaryName 和 category 的数据都查出来
|
||||
const {
|
||||
category,
|
||||
} = binaries[binaryName];
|
||||
const reqs = [
|
||||
this.binaryRepository.listBinaries(binaryName, '/'),
|
||||
];
|
||||
const { category } = binaries[binaryName];
|
||||
const reqs = [this.binaryRepository.listBinaries(binaryName, '/')];
|
||||
if (category && category !== binaryName) {
|
||||
reqs.push(this.binaryRepository.listBinaries(category, '/'));
|
||||
}
|
||||
|
||||
const [
|
||||
rootBinary,
|
||||
categoryBinary,
|
||||
] = await Promise.all(reqs);
|
||||
const [rootBinary, categoryBinary] = await Promise.all(reqs);
|
||||
|
||||
const versions = rootBinary.map(b => b.name);
|
||||
categoryBinary?.forEach(b => {
|
||||
const version = b.name;
|
||||
// 只将没有的版本添加进去
|
||||
if (!versions.includes(version)) {
|
||||
rootBinary.push(b);
|
||||
const versions = new Set(rootBinary.map(b => b.name));
|
||||
if (categoryBinary) {
|
||||
for (const b of categoryBinary) {
|
||||
const version = b.name;
|
||||
// 只将没有的版本添加进去
|
||||
if (!versions.has(version)) {
|
||||
rootBinary.push(b);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return rootBinary;
|
||||
}
|
||||
@@ -88,135 +102,250 @@ export class BinarySyncerService extends AbstractService {
|
||||
return await this.nfsAdapter.getDownloadUrlOrStream(binary.storePath);
|
||||
}
|
||||
|
||||
// SyncBinary 由定时任务每台单机定时触发,手动去重
|
||||
// 添加 bizId 在 db 防止重复,记录 id 错误
|
||||
public async createTask(binaryName: BinaryName, lastData?: any) {
|
||||
const existsTask = await this.taskRepository.findTaskByTargetName(binaryName, TaskType.SyncBinary);
|
||||
if (existsTask) {
|
||||
return existsTask;
|
||||
public async createTask(
|
||||
binaryName: BinaryName,
|
||||
lastData?: Record<string, unknown>
|
||||
) {
|
||||
// chromium-browser-snapshots 产物极大,完整遍历 s3 bucket 耗时会太长
|
||||
// 必须从上次同步的 revision 之后开始遍历
|
||||
// 如果需要补偿数据,可以
|
||||
if (binaryName === 'chromium-browser-snapshots') {
|
||||
lastData = lastData || {};
|
||||
for (const platform of platforms) {
|
||||
if (lastData[platform]) continue;
|
||||
const binaryDir = await this.binaryRepository.findLatestBinaryDir(
|
||||
'chromium-browser-snapshots',
|
||||
`/${platform}/`
|
||||
);
|
||||
if (binaryDir) {
|
||||
lastData[platform] = binaryDir.name.slice(0, -1);
|
||||
}
|
||||
}
|
||||
const latestBinary = await this.binaryRepository.findLatestBinary(
|
||||
'chromium-browser-snapshots'
|
||||
);
|
||||
if (latestBinary && !lastData.lastSyncTime) {
|
||||
lastData.lastSyncTime = latestBinary.date;
|
||||
}
|
||||
}
|
||||
try {
|
||||
return await this.taskService.createTask(Task.createSyncBinary(binaryName, lastData), false);
|
||||
return await this.taskService.createTask(
|
||||
Task.createSyncBinary(binaryName, lastData),
|
||||
false
|
||||
);
|
||||
} catch (e) {
|
||||
this.logger.error('[BinarySyncerService.createTask] binaryName: %s, error: %s', binaryName, e);
|
||||
this.logger.error(
|
||||
'[BinarySyncerService.createTask] binaryName: %s, error: %s',
|
||||
binaryName,
|
||||
e
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public async findTask(taskId: string) {
|
||||
return await this.taskService.findTask(taskId);
|
||||
public async findTask(taskId: string): Promise<SyncBinaryTask | null> {
|
||||
return (await this.taskService.findTask(taskId)) as SyncBinaryTask;
|
||||
}
|
||||
|
||||
public async findTaskLog(task: Task) {
|
||||
public async findTaskLog(task: SyncBinaryTask) {
|
||||
return await this.taskService.findTaskLog(task);
|
||||
}
|
||||
|
||||
public async findExecuteTask() {
|
||||
return await this.taskService.findExecuteTask(TaskType.SyncBinary);
|
||||
public async findExecuteTask(): Promise<SyncBinaryTask | null> {
|
||||
return (await this.taskService.findExecuteTask(
|
||||
TaskType.SyncBinary
|
||||
)) as SyncBinaryTask;
|
||||
}
|
||||
|
||||
public async executeTask(task: Task) {
|
||||
public async executeTask(task: SyncBinaryTask) {
|
||||
const binaryName = task.targetName as BinaryName;
|
||||
const binaryAdapter = await this.getBinaryAdapter(binaryName);
|
||||
const logUrl = `${this.config.cnpmcore.registry}/-/binary/${binaryName}/syncs/${task.taskId}/log`;
|
||||
let logs: string[] = [];
|
||||
logs.push(`[${isoNow()}] 🚧🚧🚧🚧🚧 Start sync binary "${binaryName}" 🚧🚧🚧🚧🚧`);
|
||||
logs.push(
|
||||
`[${isoNow()}] 🚧🚧🚧🚧🚧 Start sync binary "${binaryName}" 🚧🚧🚧🚧🚧`
|
||||
);
|
||||
if (!binaryAdapter) {
|
||||
task.error = 'unknow binaryName';
|
||||
logs.push(`[${isoNow()}] ❌ Synced "${binaryName}" fail, ${task.error}, log: ${logUrl}`);
|
||||
logs.push(
|
||||
`[${isoNow()}] ❌ Synced "${binaryName}" fail, ${task.error}, log: ${logUrl}`
|
||||
);
|
||||
logs.push(`[${isoNow()}] ❌❌❌❌❌ "${binaryName}" ❌❌❌❌❌`);
|
||||
this.logger.error('[BinarySyncerService.executeTask:fail] taskId: %s, targetName: %s, %s',
|
||||
task.taskId, task.targetName, task.error);
|
||||
this.logger.error(
|
||||
'[BinarySyncerService.executeTask:fail] taskId: %s, targetName: %s, %s',
|
||||
task.taskId,
|
||||
task.targetName,
|
||||
task.error
|
||||
);
|
||||
await this.taskService.finishTask(task, TaskState.Fail, logs.join('\n'));
|
||||
return;
|
||||
}
|
||||
|
||||
await this.taskService.appendTaskLog(task, logs.join('\n'));
|
||||
logs = [];
|
||||
this.logger.info('[BinarySyncerService.executeTask:start] taskId: %s, targetName: %s, log: %s',
|
||||
task.taskId, task.targetName, logUrl);
|
||||
this.logger.info(
|
||||
'[BinarySyncerService.executeTask:start] taskId: %s, targetName: %s, log: %s',
|
||||
task.taskId,
|
||||
task.targetName,
|
||||
logUrl
|
||||
);
|
||||
try {
|
||||
await this.syncDir(binaryAdapter, task, '/');
|
||||
const [hasDownloadError] = await this.syncDir(binaryAdapter, task, '/');
|
||||
logs.push(`[${isoNow()}] 🟢 log: ${logUrl}`);
|
||||
logs.push(`[${isoNow()}] 🟢🟢🟢🟢🟢 "${binaryName}" 🟢🟢🟢🟢🟢`);
|
||||
await this.taskService.finishTask(task, TaskState.Success, logs.join('\n'));
|
||||
this.logger.info('[BinarySyncerService.executeTask:success] taskId: %s, targetName: %s, log: %s',
|
||||
task.taskId, task.targetName, logUrl);
|
||||
} catch (err: any) {
|
||||
task.error = err.message;
|
||||
logs.push(`[${isoNow()}] ❌ Synced "${binaryName}" fail, ${task.error}, log: ${logUrl}`);
|
||||
await this.taskService.finishTask(
|
||||
task,
|
||||
TaskState.Success,
|
||||
logs.join('\n')
|
||||
);
|
||||
// 确保没有下载异常才算 success
|
||||
await binaryAdapter.finishFetch(!hasDownloadError, binaryName);
|
||||
this.logger.info(
|
||||
'[BinarySyncerService.executeTask:success] taskId: %s, targetName: %s, log: %s, hasDownloadError: %s',
|
||||
task.taskId,
|
||||
task.targetName,
|
||||
logUrl,
|
||||
hasDownloadError
|
||||
);
|
||||
} catch (err) {
|
||||
task.error = `${err.name}: ${err.message}`;
|
||||
logs.push(
|
||||
`[${isoNow()}] ❌ Synced "${binaryName}" fail, ${task.error}, log: ${logUrl}`
|
||||
);
|
||||
logs.push(`[${isoNow()}] ❌❌❌❌❌ "${binaryName}" ❌❌❌❌❌`);
|
||||
this.logger.error('[BinarySyncerService.executeTask:fail] taskId: %s, targetName: %s, %s',
|
||||
task.taskId, task.targetName, task.error);
|
||||
this.logger.error(err);
|
||||
if (isTimeoutError(err)) {
|
||||
this.logger.warn(
|
||||
'[BinarySyncerService.executeTask:fail] taskId: %s, targetName: %s, %s',
|
||||
task.taskId,
|
||||
task.targetName,
|
||||
task.error
|
||||
);
|
||||
this.logger.warn(err);
|
||||
} else {
|
||||
this.logger.error(
|
||||
'[BinarySyncerService.executeTask:fail] taskId: %s, targetName: %s, %s',
|
||||
task.taskId,
|
||||
task.targetName,
|
||||
task.error
|
||||
);
|
||||
this.logger.error(err);
|
||||
}
|
||||
await binaryAdapter.finishFetch(false, binaryName);
|
||||
await this.taskService.finishTask(task, TaskState.Fail, logs.join('\n'));
|
||||
}
|
||||
}
|
||||
|
||||
private async syncDir(binaryAdapter: AbstractBinary, task: Task, dir: string, parentIndex = '') {
|
||||
private async syncDir(
|
||||
binaryAdapter: AbstractBinary,
|
||||
task: SyncBinaryTask,
|
||||
dir: string,
|
||||
parentIndex = '',
|
||||
latestVersionParent = '/'
|
||||
) {
|
||||
const binaryName = task.targetName as BinaryName;
|
||||
const result = await binaryAdapter.fetch(dir, binaryName);
|
||||
const result = await binaryAdapter.fetch(dir, binaryName, task.data);
|
||||
let hasDownloadError = false;
|
||||
let hasItems = false;
|
||||
if (result && result.items.length > 0) {
|
||||
hasItems = true;
|
||||
let logs: string[] = [];
|
||||
const newItems = await this.diff(binaryName, dir, result.items);
|
||||
logs.push(`[${isoNow()}][${dir}] 🚧 Syncing diff: ${result.items.length} => ${newItems.length}, Binary class: ${binaryAdapter.constructor.name}`);
|
||||
for (const [ index, { item, reason }] of newItems.entries()) {
|
||||
const { newItems, latestVersionDir } = await this.diff(
|
||||
binaryName,
|
||||
dir,
|
||||
result.items,
|
||||
latestVersionParent
|
||||
);
|
||||
logs.push(
|
||||
`[${isoNow()}][${dir}] 🚧 Syncing diff: ${result.items.length} => ${newItems.length}, Binary class: ${binaryAdapter.constructor.name}`
|
||||
);
|
||||
// re-check latest version
|
||||
for (const [index, { item, reason }] of newItems.entries()) {
|
||||
if (item.isDir) {
|
||||
logs.push(`[${isoNow()}][${dir}] 🚧 [${parentIndex}${index}] Start sync dir ${JSON.stringify(item)}, reason: ${reason}`);
|
||||
logs.push(
|
||||
`[${isoNow()}][${dir}] 🚧 [${parentIndex}${index}] Start sync dir ${JSON.stringify(item)}, reason: ${reason}`
|
||||
);
|
||||
await this.taskService.appendTaskLog(task, logs.join('\n'));
|
||||
logs = [];
|
||||
const [ hasError, hasSubItems ] = await this.syncDir(binaryAdapter, task, `${dir}${item.name}`, `${parentIndex}${index}.`);
|
||||
const [hasError, hasSubItems] = await this.syncDir(
|
||||
binaryAdapter,
|
||||
task,
|
||||
`${dir}${item.name}`,
|
||||
`${parentIndex}${index}.`,
|
||||
latestVersionDir
|
||||
);
|
||||
if (hasError) {
|
||||
hasDownloadError = true;
|
||||
} else {
|
||||
} else if (hasSubItems) {
|
||||
// if any file download error, let dir sync again next time
|
||||
// if empty dir, don't save it
|
||||
if (hasSubItems) {
|
||||
await this.saveBinaryItem(item);
|
||||
}
|
||||
await this.saveBinaryItem(item);
|
||||
}
|
||||
} else {
|
||||
// download to nfs
|
||||
logs.push(`[${isoNow()}][${dir}] 🚧 [${parentIndex}${index}] Downloading ${JSON.stringify(item)}, reason: ${reason}`);
|
||||
logs.push(
|
||||
`[${isoNow()}][${dir}] 🚧 [${parentIndex}${index}] Downloading ${JSON.stringify(item)}, reason: ${reason}`
|
||||
);
|
||||
// skip exists binary file
|
||||
const existsBinary = await this.binaryRepository.findBinary(item.category, item.parent, item.name);
|
||||
const existsBinary = await this.binaryRepository.findBinary(
|
||||
item.category,
|
||||
item.parent,
|
||||
item.name
|
||||
);
|
||||
if (existsBinary && existsBinary.date === item.date) {
|
||||
logs.push(`[${isoNow()}][${dir}] 🟢 [${parentIndex}${index}] binary file exists, skip download, binaryId: ${existsBinary.binaryId}`);
|
||||
this.logger.info('[BinarySyncerService.syncDir:skipDownload] binaryId: %s exists, storePath: %s',
|
||||
existsBinary.binaryId, existsBinary.storePath);
|
||||
logs.push(
|
||||
`[${isoNow()}][${dir}] 🟢 [${parentIndex}${index}] binary file exists, skip download, binaryId: ${existsBinary.binaryId}`
|
||||
);
|
||||
this.logger.info(
|
||||
'[BinarySyncerService.syncDir:skipDownload] binaryId: %s exists, storePath: %s',
|
||||
existsBinary.binaryId,
|
||||
existsBinary.storePath
|
||||
);
|
||||
continue;
|
||||
}
|
||||
await this.taskService.appendTaskLog(task, logs.join('\n'));
|
||||
logs = [];
|
||||
let localFile = '';
|
||||
try {
|
||||
const { tmpfile, headers, timing } =
|
||||
await downloadToTempfile(
|
||||
this.httpclient, this.config.dataDir, item.sourceUrl!, { ignoreDownloadStatuses: item.ignoreDownloadStatuses });
|
||||
const { tmpfile, headers, timing } = await downloadToTempfile(
|
||||
this.httpClient,
|
||||
this.config.dataDir,
|
||||
item.sourceUrl,
|
||||
{ ignoreDownloadStatuses: item.ignoreDownloadStatuses }
|
||||
);
|
||||
const log = `[${isoNow()}][${dir}] 🟢 [${parentIndex}${index}] HTTP content-length: ${headers['content-length']}, timing: ${JSON.stringify(timing)}, ${item.sourceUrl} => ${tmpfile}`;
|
||||
logs.push(log);
|
||||
this.logger.info('[BinarySyncerService.syncDir:downloadToTempfile] %s', log);
|
||||
this.logger.info(
|
||||
'[BinarySyncerService.syncDir:downloadToTempfile] %s',
|
||||
log
|
||||
);
|
||||
localFile = tmpfile;
|
||||
const binary = await this.saveBinaryItem(item, tmpfile);
|
||||
logs.push(`[${isoNow()}][${dir}] 🟢 [${parentIndex}${index}] Synced file success, binaryId: ${binary.binaryId}`);
|
||||
logs.push(
|
||||
`[${isoNow()}][${dir}] 🟢 [${parentIndex}${index}] Synced file success, binaryId: ${binary.binaryId}`
|
||||
);
|
||||
await this.taskService.appendTaskLog(task, logs.join('\n'));
|
||||
logs = [];
|
||||
} catch (err: any) {
|
||||
} catch (err) {
|
||||
if (err.name === 'DownloadNotFoundError') {
|
||||
this.logger.info('Not found %s, skip it', item.sourceUrl);
|
||||
logs.push(`[${isoNow()}][${dir}] 🧪️ [${parentIndex}${index}] Download ${item.sourceUrl} not found, skip it`);
|
||||
logs.push(
|
||||
`[${isoNow()}][${dir}] 🧪️ [${parentIndex}${index}] Download ${item.sourceUrl} not found, skip it`
|
||||
);
|
||||
} else {
|
||||
this.logger.error('Download binary %s %s', item.sourceUrl, err);
|
||||
if (err.name === 'DownloadStatusInvalidError') {
|
||||
this.logger.warn('Download binary %s %s', item.sourceUrl, err);
|
||||
} else {
|
||||
this.logger.error('Download binary %s %s', item.sourceUrl, err);
|
||||
}
|
||||
hasDownloadError = true;
|
||||
logs.push(`[${isoNow()}][${dir}] ❌ [${parentIndex}${index}] Download ${item.sourceUrl} error: ${err}`);
|
||||
logs.push(
|
||||
`[${isoNow()}][${dir}] ❌ [${parentIndex}${index}] Download ${item.sourceUrl} error: ${err}`
|
||||
);
|
||||
}
|
||||
await this.taskService.appendTaskLog(task, logs.join('\n'));
|
||||
logs = [];
|
||||
} finally {
|
||||
if (localFile) {
|
||||
await rm(localFile, { force: true });
|
||||
await fs.rm(localFile, { force: true });
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -224,20 +353,35 @@ export class BinarySyncerService extends AbstractService {
|
||||
if (hasDownloadError) {
|
||||
logs.push(`[${isoNow()}][${dir}] ❌ Synced dir fail`);
|
||||
} else {
|
||||
logs.push(`[${isoNow()}][${dir}] 🟢 Synced dir success`);
|
||||
logs.push(
|
||||
`[${isoNow()}][${dir}] 🟢 Synced dir success, hasItems: ${hasItems}`
|
||||
);
|
||||
}
|
||||
await this.taskService.appendTaskLog(task, logs.join('\n'));
|
||||
}
|
||||
return [ hasDownloadError, hasItems ];
|
||||
return [hasDownloadError, hasItems];
|
||||
}
|
||||
|
||||
private async diff(binaryName: BinaryName, dir: string, fetchItems: BinaryItem[]) {
|
||||
const existsItems = await this.binaryRepository.listBinaries(binaryName, dir);
|
||||
// see https://github.com/cnpm/cnpmcore/issues/556
|
||||
// 上游可能正在发布新版本、同步流程中断,导致同步的时候,文件列表不一致
|
||||
// 如果的当前目录命中 latestVersionParent 父目录,那么就再校验一下当前目录
|
||||
// 如果 existsItems 为空或者经过修改,那么就不需要 revalidate 了
|
||||
private async diff(
|
||||
binaryName: BinaryName,
|
||||
dir: string,
|
||||
fetchItems: BinaryItem[],
|
||||
latestVersionParent = '/'
|
||||
) {
|
||||
const existsItems = await this.binaryRepository.listBinaries(
|
||||
binaryName,
|
||||
dir
|
||||
);
|
||||
const existsMap = new Map<string, Binary>();
|
||||
for (const item of existsItems) {
|
||||
existsMap.set(item.name, item);
|
||||
}
|
||||
const diffItems: { item: Binary; reason: string }[] = [];
|
||||
let latestItem: BinaryItem | undefined;
|
||||
for (const item of fetchItems) {
|
||||
const existsItem = existsMap.get(item.name);
|
||||
if (!existsItem) {
|
||||
@@ -262,9 +406,25 @@ export class BinarySyncerService extends AbstractService {
|
||||
existsItem.sourceUrl = item.url;
|
||||
existsItem.ignoreDownloadStatuses = item.ignoreDownloadStatuses;
|
||||
existsItem.date = item.date;
|
||||
} else if (dir.endsWith(latestVersionParent)) {
|
||||
if (!latestItem) {
|
||||
latestItem = sortBy(fetchItems, ['date']).pop();
|
||||
}
|
||||
const isLatestItem = latestItem?.name === item.name;
|
||||
if (isLatestItem && existsItem.isDir) {
|
||||
diffItems.push({
|
||||
item: existsItem,
|
||||
reason: `revalidate latest version, latest parent dir is ${latestVersionParent}, current dir is ${dir}, current name is ${existsItem.name}`,
|
||||
});
|
||||
latestVersionParent = `${latestVersionParent}${existsItem.name}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
return diffItems;
|
||||
|
||||
return {
|
||||
newItems: diffItems,
|
||||
latestVersionDir: latestVersionParent,
|
||||
};
|
||||
}
|
||||
|
||||
private async saveBinaryItem(binary: Binary, tmpfile?: string) {
|
||||
@@ -272,22 +432,35 @@ export class BinarySyncerService extends AbstractService {
|
||||
const stat = await fs.stat(tmpfile);
|
||||
binary.size = stat.size;
|
||||
await this.nfsAdapter.uploadFile(binary.storePath, tmpfile);
|
||||
this.logger.info('[BinarySyncerService.saveBinaryItem:uploadFile] binaryId: %s, size: %d, %s => %s',
|
||||
binary.binaryId, stat.size, tmpfile, binary.storePath);
|
||||
this.logger.info(
|
||||
'[BinarySyncerService.saveBinaryItem:uploadFile] binaryId: %s, size: %d, %s => %s',
|
||||
binary.binaryId,
|
||||
stat.size,
|
||||
tmpfile,
|
||||
binary.storePath
|
||||
);
|
||||
}
|
||||
await this.binaryRepository.saveBinary(binary);
|
||||
return binary;
|
||||
}
|
||||
|
||||
private async getBinaryAdapter(binaryName: BinaryName): Promise<AbstractBinary | undefined> {
|
||||
private async getBinaryAdapter(
|
||||
binaryName: BinaryName
|
||||
): Promise<AbstractBinary | undefined> {
|
||||
const config = this.config.cnpmcore;
|
||||
const binaryConfig = binaries[binaryName];
|
||||
|
||||
let binaryAdapter: AbstractBinary;
|
||||
if (config.sourceRegistryIsCNpm) {
|
||||
binaryAdapter = await this.eggObjectFactory.getEggObject(AbstractBinary, BinaryType.Api);
|
||||
binaryAdapter = await this.eggObjectFactory.getEggObject(
|
||||
AbstractBinary,
|
||||
BinaryType.Api
|
||||
);
|
||||
} else {
|
||||
binaryAdapter = await this.eggObjectFactory.getEggObject(AbstractBinary, binaryConfig.type);
|
||||
binaryAdapter = await this.eggObjectFactory.getEggObject(
|
||||
AbstractBinary,
|
||||
binaryConfig.type
|
||||
);
|
||||
}
|
||||
await binaryAdapter.initFetch(binaryName);
|
||||
return binaryAdapter;
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
import { AccessLevel, SingletonProto, Inject } from '@eggjs/tegg';
|
||||
import { EggLogger } from 'egg';
|
||||
import { AccessLevel, Inject, SingletonProto, Logger } from 'egg';
|
||||
import pMap from 'p-map';
|
||||
import { BugVersion } from '../entity/BugVersion';
|
||||
import { PackageJSONType, PackageRepository } from '../../repository/PackageRepository';
|
||||
import { DistRepository } from '../../repository/DistRepository';
|
||||
import { getScopeAndName } from '../../common/PackageUtil';
|
||||
import { CacheService } from './CacheService';
|
||||
import { BUG_VERSIONS, LATEST_TAG } from '../../common/constants';
|
||||
import { BugVersionStore } from '../../common/adapter/BugVersionStore';
|
||||
import { BugVersion } from '../entity/BugVersion.ts';
|
||||
import type {
|
||||
PackageJSONType,
|
||||
PackageRepository,
|
||||
} from '../../repository/PackageRepository.ts';
|
||||
import type { DistRepository } from '../../repository/DistRepository.ts';
|
||||
import { getScopeAndName } from '../../common/PackageUtil.ts';
|
||||
import type { CacheService } from './CacheService.ts';
|
||||
import { BUG_VERSIONS, LATEST_TAG } from '../../common/constants.ts';
|
||||
import type { BugVersionStore } from '../../common/adapter/BugVersionStore.ts';
|
||||
|
||||
@SingletonProto({
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
@@ -20,7 +22,7 @@ export class BugVersionService {
|
||||
private readonly distRepository: DistRepository;
|
||||
|
||||
@Inject()
|
||||
private readonly logger: EggLogger;
|
||||
private readonly logger: Logger;
|
||||
|
||||
@Inject()
|
||||
private readonly cacheService: CacheService;
|
||||
@@ -33,66 +35,109 @@ export class BugVersionService {
|
||||
const pkg = await this.packageRepository.findPackage('', BUG_VERSIONS);
|
||||
if (!pkg) return;
|
||||
/* c8 ignore next 10 */
|
||||
const tag = await this.packageRepository.findPackageTag(pkg!.packageId, LATEST_TAG);
|
||||
const tag = await this.packageRepository.findPackageTag(
|
||||
pkg.packageId,
|
||||
LATEST_TAG
|
||||
);
|
||||
if (!tag) return;
|
||||
let bugVersion = this.bugVersionStore.getBugVersion(tag!.version);
|
||||
let bugVersion = this.bugVersionStore.getBugVersion(tag.version);
|
||||
if (!bugVersion) {
|
||||
const packageVersionJson = (await this.distRepository.findPackageVersionManifest(pkg!.packageId, tag!.version)) as PackageJSONType;
|
||||
const packageVersionJson =
|
||||
(await this.distRepository.findPackageVersionManifest(
|
||||
pkg.packageId,
|
||||
tag.version
|
||||
)) as PackageJSONType;
|
||||
if (!packageVersionJson) return;
|
||||
const data = packageVersionJson.config?.['bug-versions'];
|
||||
bugVersion = new BugVersion(data || {});
|
||||
this.bugVersionStore.setBugVersion(bugVersion, tag!.version);
|
||||
this.bugVersionStore.setBugVersion(bugVersion, tag.version);
|
||||
}
|
||||
return bugVersion;
|
||||
}
|
||||
|
||||
async cleanBugVersionPackageCaches(bugVersion: BugVersion) {
|
||||
const fullnames = bugVersion.listAllPackagesHasBugs();
|
||||
await pMap(fullnames, async fullname => {
|
||||
await this.cacheService.removeCache(fullname);
|
||||
}, {
|
||||
concurrency: 50,
|
||||
stopOnError: false,
|
||||
});
|
||||
await pMap(
|
||||
fullnames,
|
||||
async fullname => {
|
||||
await this.cacheService.removeCache(fullname);
|
||||
},
|
||||
{
|
||||
concurrency: 50,
|
||||
stopOnError: false,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
async fixPackageBugVersions(bugVersion: BugVersion, fullname: string, manifests: Record<string, any>) {
|
||||
async fixPackageBugVersions(
|
||||
bugVersion: BugVersion,
|
||||
fullname: string,
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
manifests: Record<string, any>
|
||||
) {
|
||||
// If package all version unpublished(like pinyin-tool), versions is undefined
|
||||
if (!manifests) return;
|
||||
for (const manifest of Object.values(manifests)) {
|
||||
this.fixPackageBugVersionWithAllVersions(fullname, bugVersion, manifest, manifests);
|
||||
this.fixPackageBugVersionWithAllVersions(
|
||||
fullname,
|
||||
bugVersion,
|
||||
manifest,
|
||||
manifests
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async fixPackageBugVersion(bugVersion: BugVersion, fullname: string, manifest: any) {
|
||||
async fixPackageBugVersion(
|
||||
bugVersion: BugVersion,
|
||||
fullname: string,
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
manifest: any
|
||||
) {
|
||||
const advice = bugVersion.fixVersion(fullname, manifest.version);
|
||||
if (!advice) {
|
||||
return manifest;
|
||||
}
|
||||
const [ scope, name ] = getScopeAndName(fullname);
|
||||
const [scope, name] = getScopeAndName(fullname);
|
||||
const pkg = await this.packageRepository.findPackage(scope, name);
|
||||
if (!pkg) {
|
||||
return manifest;
|
||||
}
|
||||
const packageVersion = await this.packageRepository.findPackageVersion(pkg.packageId, advice.version);
|
||||
const packageVersion = await this.packageRepository.findPackageVersion(
|
||||
pkg.packageId,
|
||||
advice.version
|
||||
);
|
||||
if (!packageVersion) {
|
||||
return manifest;
|
||||
}
|
||||
const fixedManifest = await this.distRepository.findPackageVersionManifest(packageVersion.packageId, advice.version);
|
||||
const fixedManifest = await this.distRepository.findPackageVersionManifest(
|
||||
packageVersion.packageId,
|
||||
advice.version
|
||||
);
|
||||
if (!fixedManifest) {
|
||||
return manifest;
|
||||
}
|
||||
return bugVersion.fixManifest(manifest, fixedManifest);
|
||||
}
|
||||
|
||||
private fixPackageBugVersionWithAllVersions(fullname: string, bugVersion: BugVersion, manifest: any, manifests: Record<string, any>) {
|
||||
private fixPackageBugVersionWithAllVersions(
|
||||
fullname: string,
|
||||
bugVersion: BugVersion,
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
manifest: any,
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
manifests: Record<string, any>
|
||||
) {
|
||||
const advice = bugVersion.fixVersion(fullname, manifest.version);
|
||||
if (!advice) {
|
||||
return;
|
||||
}
|
||||
const fixedManifest = manifests[advice.version];
|
||||
if (!fixedManifest) {
|
||||
this.logger.warn('[BugVersionService] not found pkg for %s@%s manifest', fullname, advice.version);
|
||||
this.logger.warn(
|
||||
'[BugVersionService] not found pkg for %s@%s manifest',
|
||||
fullname,
|
||||
advice.version
|
||||
);
|
||||
return;
|
||||
}
|
||||
const newManifest = bugVersion.fixManifest(manifest, fixedManifest);
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user