Compare commits
137 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
95543b1f9d | ||
|
|
d7de1cded8 | ||
|
|
0f11e7730a | ||
|
|
0d32146562 | ||
|
|
6c29f084b2 | ||
|
|
01385f4954 | ||
|
|
098c75a4ca | ||
|
|
c3059c7138 | ||
|
|
0c987a7225 | ||
|
|
89f1250927 | ||
|
|
e1848c71ec | ||
|
|
e8a3ee5208 | ||
|
|
3671c52513 | ||
|
|
35a7be34e8 | ||
|
|
957f43dab1 | ||
|
|
4213bd8823 | ||
|
|
f3f3584d62 | ||
|
|
80663505cb | ||
|
|
e5162f20aa | ||
|
|
b6c96defa4 | ||
|
|
3e1dbd819c | ||
|
|
faee3b96f1 | ||
|
|
606c983363 | ||
|
|
cb931417c1 | ||
|
|
324511d159 | ||
|
|
2e51399db1 | ||
|
|
5b1da74746 | ||
|
|
c7df471c0e | ||
|
|
c16ec7bad0 | ||
|
|
17b1b81eea | ||
|
|
bb0bdef070 | ||
|
|
87da4c359c | ||
|
|
8587d27d85 | ||
|
|
13b2da0411 | ||
|
|
3aa4c688b5 | ||
|
|
0a5500bafd | ||
|
|
f94531cf35 | ||
|
|
c3a22fd319 | ||
|
|
b23f3fe22e | ||
|
|
3dbff542ba | ||
|
|
1a4a1c5cf5 | ||
|
|
2cc0f361fb | ||
|
|
722a5d70b9 | ||
|
|
64951041eb | ||
|
|
1001e1ffa2 | ||
|
|
b4cf36e289 | ||
|
|
bed4778cbc | ||
|
|
5b96443cf1 | ||
|
|
e4cd535486 | ||
|
|
5b98c9dd2b | ||
|
|
e932624c6f | ||
|
|
69ef574527 | ||
|
|
5a42764806 | ||
|
|
eaed6fe478 | ||
|
|
8b341d2299 | ||
|
|
87b6cbedef | ||
|
|
3e395405c1 | ||
|
|
b8c7c06536 | ||
|
|
76e3f267c1 | ||
|
|
12aa425c26 | ||
|
|
01f393eb94 | ||
|
|
490dce3ad2 | ||
|
|
cb3768c82f | ||
|
|
54e3335abd | ||
|
|
a1a3859d75 | ||
|
|
1239dc9f49 | ||
|
|
9efd2932e8 | ||
|
|
f6f53149a1 | ||
|
|
2e90486454 | ||
|
|
297bd7a745 | ||
|
|
b8f2ac5f85 | ||
|
|
c5f1642a1f | ||
|
|
2d8bad3ca2 | ||
|
|
81620e3ed5 | ||
|
|
de3a6153b0 | ||
|
|
3203c64c48 | ||
|
|
d6c243cf6b | ||
|
|
0a6eab325e | ||
|
|
a17aed8fb1 | ||
|
|
770fc7fdaf | ||
|
|
de97428ffd | ||
|
|
22da5c7f70 | ||
|
|
f4f09c2d66 | ||
|
|
3dce867d3a | ||
|
|
af3672ebdb | ||
|
|
e89d8528df | ||
|
|
58a1a9c2af | ||
|
|
ce0fd9d9c2 | ||
|
|
080035f7bc | ||
|
|
ffd90473f7 | ||
|
|
facb26d192 | ||
|
|
53060c817a | ||
|
|
9f4b8ebb00 | ||
|
|
5c132b882b | ||
|
|
a1e1072d1a | ||
|
|
5d5f40ba26 | ||
|
|
b322f2c9ec | ||
|
|
dd5ee4ff30 | ||
|
|
0c9a515874 | ||
|
|
884ff50a2f | ||
|
|
1517750513 | ||
|
|
64beee1ba1 | ||
|
|
acb988b553 | ||
|
|
ffe723e65f | ||
|
|
ed4d5d07ad | ||
|
|
b92354d280 | ||
|
|
f51f6028ac | ||
|
|
3f3751b2d5 | ||
|
|
7938919d81 | ||
|
|
52c2494355 | ||
|
|
0d4fec90dd | ||
|
|
b8d055a74e | ||
|
|
5c8b024a0a | ||
|
|
914aee2560 | ||
|
|
bc068d165c | ||
|
|
9ffb09eaa8 | ||
|
|
cbefb5c6d0 | ||
|
|
0a64698ec0 | ||
|
|
1922bf2f76 | ||
|
|
47da2f40cf | ||
|
|
305457777e | ||
|
|
ae88145317 | ||
|
|
069afb98cc | ||
|
|
b6c781ec25 | ||
|
|
05b3b798b6 | ||
|
|
e72e396e3c | ||
|
|
d095d3f48c | ||
|
|
b0f7bf0967 | ||
|
|
02a1ee35d7 | ||
|
|
1e9d710b0f | ||
|
|
d18981e658 | ||
|
|
5103409f40 | ||
|
|
ea4823c017 | ||
|
|
4427a4fca5 | ||
|
|
455fc3a444 | ||
|
|
386974272d | ||
|
|
9f4f1f1e28 |
36
.docker/alpine/Dockerfile
Normal file
36
.docker/alpine/Dockerfile
Normal file
@@ -0,0 +1,36 @@
|
||||
FROM node:22-alpine
|
||||
|
||||
# Create app directory
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# Install app dependencies
|
||||
COPY . .
|
||||
|
||||
RUN .docker/build.sh
|
||||
|
||||
ENV NODE_ENV=production \
|
||||
EGG_SERVER_ENV=prod \
|
||||
CNPMCORE_CONFIG_REGISTRY= \
|
||||
CNPMCORE_CONFIG_SOURCE_REGISTRY=https://registry.npmmirror.com \
|
||||
CNPMCORE_CONFIG_SOURCE_REGISTRY_IS_CNPM=true \
|
||||
CNPMCORE_DATABASE_TYPE= \
|
||||
CNPMCORE_DATABASE_NAME= \
|
||||
CNPMCORE_DATABASE_HOST= \
|
||||
CNPMCORE_DATABASE_PORT=3306 \
|
||||
CNPMCORE_DATABASE_USER= \
|
||||
CNPMCORE_DATABASE_PASSWORD= \
|
||||
CNPMCORE_REDIS_HOST= \
|
||||
CNPMCORE_REDIS_PORT=6379 \
|
||||
CNPMCORE_REDIS_PASSWORD= \
|
||||
CNPMCORE_REDIS_DB= \
|
||||
CNPMCORE_NFS_TYPE=s3 \
|
||||
CNPMCORE_NFS_S3_CLIENT_ENDPOINT= \
|
||||
CNPMCORE_NFS_S3_CLIENT_BUCKET= \
|
||||
CNPMCORE_NFS_S3_CLIENT_ID= \
|
||||
CNPMCORE_NFS_S3_CLIENT_SECRET= \
|
||||
CNPMCORE_NFS_S3_CLIENT_FORCE_PATH_STYLE=true \
|
||||
CNPMCORE_NFS_S3_CLIENT_DISABLE_URL=true \
|
||||
TZ=Asia/Shanghai
|
||||
|
||||
EXPOSE 7001
|
||||
CMD ["npm", "run", "start:foreground"]
|
||||
7
.docker/build.sh
Executable file
7
.docker/build.sh
Executable file
@@ -0,0 +1,7 @@
|
||||
#!/bin/sh
|
||||
|
||||
node -v && npm -v \
|
||||
&& npm install -g npminstall --registry=https://registry.npmmirror.com \
|
||||
&& npminstall -c \
|
||||
&& npm run tsc \
|
||||
&& npmupdate -c --production
|
||||
36
.docker/debian/Dockerfile
Normal file
36
.docker/debian/Dockerfile
Normal file
@@ -0,0 +1,36 @@
|
||||
FROM node:22-bookworm-slim
|
||||
|
||||
# Create app directory
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# Install app dependencies
|
||||
COPY . .
|
||||
|
||||
RUN .docker/build.sh
|
||||
|
||||
ENV NODE_ENV=production \
|
||||
EGG_SERVER_ENV=prod \
|
||||
CNPMCORE_CONFIG_REGISTRY= \
|
||||
CNPMCORE_CONFIG_SOURCE_REGISTRY=https://registry.npmmirror.com \
|
||||
CNPMCORE_CONFIG_SOURCE_REGISTRY_IS_CNPM=true \
|
||||
CNPMCORE_DATABASE_TYPE= \
|
||||
CNPMCORE_DATABASE_NAME= \
|
||||
CNPMCORE_DATABASE_HOST= \
|
||||
CNPMCORE_DATABASE_PORT=3306 \
|
||||
CNPMCORE_DATABASE_USER= \
|
||||
CNPMCORE_DATABASE_PASSWORD= \
|
||||
CNPMCORE_REDIS_HOST= \
|
||||
CNPMCORE_REDIS_PORT=6379 \
|
||||
CNPMCORE_REDIS_PASSWORD= \
|
||||
CNPMCORE_REDIS_DB= \
|
||||
CNPMCORE_NFS_TYPE=s3 \
|
||||
CNPMCORE_NFS_S3_CLIENT_ENDPOINT= \
|
||||
CNPMCORE_NFS_S3_CLIENT_BUCKET= \
|
||||
CNPMCORE_NFS_S3_CLIENT_ID= \
|
||||
CNPMCORE_NFS_S3_CLIENT_SECRET= \
|
||||
CNPMCORE_NFS_S3_CLIENT_FORCE_PATH_STYLE=true \
|
||||
CNPMCORE_NFS_S3_CLIENT_DISABLE_URL=true \
|
||||
TZ=Asia/Shanghai
|
||||
|
||||
EXPOSE 7001
|
||||
CMD ["npm", "run", "start:foreground"]
|
||||
50
.env.example
Normal file
50
.env.example
Normal file
@@ -0,0 +1,50 @@
|
||||
# CNPMCORE_DATABASE_TYPE=MySQL
|
||||
# CNPMCORE_DATABASE_USER=root
|
||||
# CNPMCORE_DATABASE_PASSWORD=
|
||||
# CNPMCORE_DATABASE_NAME=cnpmcore
|
||||
|
||||
# CNPMCORE_DATABASE_TYPE=PostgreSQL
|
||||
# CNPMCORE_DATABASE_USER=postgres
|
||||
# CNPMCORE_DATABASE_PASSWORD=postgres
|
||||
# CNPMCORE_DATABASE_NAME=cnpmcore
|
||||
|
||||
# CNPMCORE_CONFIG_ENABLE_ES=true
|
||||
# CNPMCORE_CONFIG_ES_CLIENT_NODE=http://localhost:9200
|
||||
# CNPMCORE_CONFIG_ES_CLIENT_AUTH_USERNAME=elastic
|
||||
# CNPMCORE_CONFIG_ES_CLIENT_AUTH_PASSWORD=abcdef
|
||||
|
||||
# https://github.com/cnpm/cnpmcore/blob/next/docs/elasticsearch-setup.md#%E6%96%B0%E5%BB%BA-env-%E6%96%87%E4%BB%B6
|
||||
# Password for the 'elastic' user (at least 6 characters)
|
||||
ELASTIC_PASSWORD="abcdef"
|
||||
|
||||
# Password for the 'kibana_system' user (at least 6 characters)
|
||||
KIBANA_PASSWORD="abcdef"
|
||||
|
||||
# Version of Elastic products
|
||||
STACK_VERSION=8.7.1
|
||||
# enable for arm64
|
||||
# STACK_VERSION_ARM64=-arm64
|
||||
# STACK_PLATFORM=linux/arm64
|
||||
|
||||
# Set the cluster name
|
||||
CLUSTER_NAME=docker-cluster
|
||||
|
||||
# Set to 'basic' or 'trial' to automatically start the 30-day trial
|
||||
LICENSE=basic
|
||||
#LICENSE=trial
|
||||
|
||||
# Port to expose Elasticsearch HTTP API to the host
|
||||
ES_PORT=9200
|
||||
#ES_PORT=127.0.0.1:9200
|
||||
|
||||
# Port to expose Kibana to the host
|
||||
KIBANA_PORT=5601
|
||||
#KIBANA_PORT=80
|
||||
|
||||
# Increase or decrease based on the available host memory (in bytes)
|
||||
ES_MEM_LIMIT=1073741824
|
||||
KB_MEM_LIMIT=1073741824
|
||||
LS_MEM_LIMIT=1073741824
|
||||
|
||||
# SAMPLE Predefined Key only to be used in POC environments
|
||||
ENCRYPTION_KEY=c34d38b3a14956121ff2170e5030b471551370178f43e5626eec58b04a30fae2
|
||||
@@ -1,7 +0,0 @@
|
||||
app/proxy*
|
||||
**/*.d.ts
|
||||
node_modules/
|
||||
dist/
|
||||
coverage/
|
||||
mocks/
|
||||
.react_entries/
|
||||
564
.github/copilot-instructions.md
vendored
Normal file
564
.github/copilot-instructions.md
vendored
Normal file
@@ -0,0 +1,564 @@
|
||||
# cnpmcore - Private NPM Registry for Enterprise
|
||||
|
||||
cnpmcore is a TypeScript-based private NPM registry implementation built with Egg.js framework. It provides enterprise-grade package management with support for MySQL/PostgreSQL databases, Redis caching, and optional Elasticsearch.
|
||||
|
||||
**ALWAYS reference these instructions first** and fallback to search or bash commands only when you encounter unexpected information that does not match the information here.
|
||||
|
||||
## Code Style and Conventions
|
||||
|
||||
### Linting and Formatting
|
||||
- **Linter**: Oxlint (fast Rust-based linter)
|
||||
- **Formatter**: Prettier with specific configuration
|
||||
- **Pre-commit hooks**: Husky + lint-staged automatically format and lint on commit
|
||||
|
||||
**Code Style Rules:**
|
||||
```javascript
|
||||
// From .prettierrc
|
||||
{
|
||||
"singleQuote": true, // Use single quotes
|
||||
"trailingComma": "es5", // ES5 trailing commas
|
||||
"tabWidth": 2, // 2-space indentation
|
||||
"printWidth": 120, // 120 character line width
|
||||
"arrowParens": "avoid" // Avoid parens when possible
|
||||
}
|
||||
|
||||
// From .oxlintrc.json
|
||||
{
|
||||
"max-params": 6, // Maximum 6 function parameters
|
||||
"no-console": "warn", // Warn on console usage
|
||||
"import/no-anonymous-default-export": "error"
|
||||
}
|
||||
```
|
||||
|
||||
**Linting Commands:**
|
||||
```bash
|
||||
npm run lint # Check for linting errors
|
||||
npm run lint:fix # Auto-fix linting issues
|
||||
npm run typecheck # TypeScript type checking without build
|
||||
```
|
||||
|
||||
### TypeScript Conventions
|
||||
- Use strict TypeScript with comprehensive type definitions
|
||||
- Avoid `any` types - use proper typing or `unknown`
|
||||
- Export types and interfaces for reusability
|
||||
- Use ES modules (`import/export`) syntax throughout
|
||||
|
||||
### Testing Conventions
|
||||
- Test files use `.test.ts` suffix
|
||||
- Use `@eggjs/mock` for mocking and testing
|
||||
- Tests organized to mirror source structure in `test/` directory
|
||||
- Use `assert` from `node:assert/strict` for assertions
|
||||
- Mock external dependencies using `mock()` from `@eggjs/mock`
|
||||
|
||||
**Test Naming Pattern:**
|
||||
```typescript
|
||||
describe('test/path/to/SourceFile.test.ts', () => {
|
||||
describe('[HTTP_METHOD /api/path] functionName()', () => {
|
||||
it('should handle expected behavior', async () => {
|
||||
// Test implementation
|
||||
});
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Domain-Driven Design (DDD) Architecture
|
||||
|
||||
cnpmcore follows **Domain-Driven Design** principles with clear separation of concerns:
|
||||
|
||||
### Layer Architecture (Dependency Flow)
|
||||
|
||||
```
|
||||
Controller (HTTP Interface Layer)
|
||||
↓ depends on
|
||||
Service (Business Logic Layer)
|
||||
↓ depends on
|
||||
Repository (Data Access Layer)
|
||||
↓ depends on
|
||||
Model (ORM/Database Layer)
|
||||
|
||||
Entity (Domain Models - no dependencies, pure business logic)
|
||||
Common (Utilities and Adapters - available to all layers)
|
||||
```
|
||||
|
||||
### Layer Responsibilities
|
||||
|
||||
**Controller Layer** (`app/port/controller/`):
|
||||
- HTTP request/response handling
|
||||
- Request validation using `@eggjs/typebox-validate`
|
||||
- User authentication and authorization
|
||||
- **NO business logic** - delegate to Services
|
||||
- Inheritance: `YourController extends AbstractController extends MiddlewareController`
|
||||
|
||||
**Service Layer** (`app/core/service/`):
|
||||
- Core business logic implementation
|
||||
- Orchestration of multiple repositories and entities
|
||||
- Transaction management
|
||||
- Event publishing
|
||||
- NO HTTP concerns, NO direct database access
|
||||
|
||||
**Repository Layer** (`app/repository/`):
|
||||
- Data access and persistence
|
||||
- CRUD operations on Models
|
||||
- Query building and optimization
|
||||
- NO business logic
|
||||
|
||||
**Entity Layer** (`app/core/entity/`):
|
||||
- Domain models with business behavior
|
||||
- Pure business logic (no infrastructure dependencies)
|
||||
- Immutable data structures where possible
|
||||
- Rich domain objects (not anemic models)
|
||||
|
||||
**Model Layer** (`app/repository/model/`):
|
||||
- ORM definitions using Leoric
|
||||
- Database schema mapping
|
||||
- Table and column definitions
|
||||
- NO business logic
|
||||
|
||||
### Repository Method Naming Convention
|
||||
|
||||
**ALWAYS follow these naming patterns:**
|
||||
- `findSomething` - Query a single model/entity
|
||||
- `saveSomething` - Save (create or update) a model
|
||||
- `removeSomething` - Delete a model
|
||||
- `listSomethings` - Query multiple models (use plural)
|
||||
|
||||
### Request Validation Trilogy
|
||||
|
||||
**ALWAYS validate requests in this exact order:**
|
||||
|
||||
1. **Request Parameter Validation** - First line of defense
|
||||
```typescript
|
||||
// Use @eggjs/typebox-validate for type-safe validation
|
||||
// See app/port/typebox.ts for examples
|
||||
```
|
||||
|
||||
2. **User Authentication & Token Permissions**
|
||||
```typescript
|
||||
// Token roles: 'read' | 'publish' | 'setting'
|
||||
const authorizedUser = await this.userRoleManager.requiredAuthorizedUser(ctx, 'publish');
|
||||
```
|
||||
|
||||
3. **Resource Authorization** - Prevent horizontal privilege escalation
|
||||
```typescript
|
||||
// Example: Ensure user is package maintainer
|
||||
await this.userRoleManager.requiredPackageMaintainer(pkg, authorizedUser);
|
||||
// Or use convenience method
|
||||
const { pkg } = await this.ensurePublishAccess(ctx, fullname);
|
||||
```
|
||||
|
||||
### Modifying Database Models
|
||||
|
||||
When changing a Model, update **all 3 locations**:
|
||||
1. SQL migration files: `sql/mysql/*.sql` AND `sql/postgresql/*.sql`
|
||||
2. ORM Model: `app/repository/model/*.ts`
|
||||
3. Domain Entity: `app/core/entity/*.ts`
|
||||
|
||||
**NEVER auto-generate SQL migrations** - manual review is required for safety.
|
||||
|
||||
## Prerequisites and Environment Setup
|
||||
|
||||
- **Node.js**: Version 20.18.0 or higher (required by engines field in package.json)
|
||||
- **Database**: MySQL 5.7+ or PostgreSQL 17+
|
||||
- **Cache**: Redis 6+
|
||||
- **Optional**: Elasticsearch 8.x for enhanced search capabilities
|
||||
|
||||
## Working Effectively
|
||||
|
||||
### Bootstrap and Build
|
||||
```bash
|
||||
# Install dependencies (takes ~2 minutes)
|
||||
npm install
|
||||
|
||||
# Copy environment configuration
|
||||
cp .env.example .env
|
||||
|
||||
# Lint code (very fast, <1 second)
|
||||
npm run lint
|
||||
|
||||
# Fix linting issues
|
||||
npm run lint:fix
|
||||
|
||||
# Build TypeScript (takes ~6 seconds)
|
||||
npm run tsc
|
||||
|
||||
# Production build (takes ~6 seconds)
|
||||
npm run tsc:prod
|
||||
```
|
||||
|
||||
### Database Setup - MySQL (Recommended for Development)
|
||||
```bash
|
||||
# Start MySQL + Redis services via Docker (takes ~1 minute to pull images initially)
|
||||
docker compose -f docker-compose.yml up -d
|
||||
|
||||
# Verify services are running
|
||||
docker compose ps
|
||||
|
||||
# Initialize database (takes <2 seconds)
|
||||
CNPMCORE_DATABASE_NAME=cnpmcore bash ./prepare-database-mysql.sh
|
||||
|
||||
# For tests, create test database
|
||||
mysql -h 127.0.0.1 -P 3306 -u root -e "CREATE DATABASE cnpmcore_unittest;"
|
||||
```
|
||||
|
||||
### Database Setup - PostgreSQL (Alternative)
|
||||
```bash
|
||||
# Start PostgreSQL + Redis services via Docker
|
||||
docker compose -f docker-compose-postgres.yml up -d
|
||||
|
||||
# Initialize database (takes <1 second)
|
||||
CNPMCORE_DATABASE_NAME=cnpmcore bash ./prepare-database-postgresql.sh
|
||||
```
|
||||
|
||||
### Development Server
|
||||
```bash
|
||||
# MySQL development server (starts in ~20 seconds)
|
||||
npm run dev
|
||||
# Server runs on http://127.0.0.1:7001
|
||||
|
||||
# PostgreSQL development server
|
||||
npm run dev:postgresql
|
||||
# Server runs on http://127.0.0.1:7001
|
||||
```
|
||||
|
||||
### Testing
|
||||
```bash
|
||||
# Run full test suite with MySQL - NEVER CANCEL: Takes 4+ minutes. Set timeout to 10+ minutes.
|
||||
npm run test
|
||||
|
||||
# Run full test suite with PostgreSQL - NEVER CANCEL: Takes 4+ minutes. Set timeout to 10+ minutes.
|
||||
npm run test:postgresql
|
||||
|
||||
# Run single test file (for faster iteration, takes ~12 seconds)
|
||||
npm run test:local test/common/CryptoUtil.test.ts
|
||||
|
||||
# Test coverage with MySQL - NEVER CANCEL: Takes 5+ minutes. Set timeout to 15+ minutes.
|
||||
npm run cov
|
||||
|
||||
# Test coverage with PostgreSQL - NEVER CANCEL: Takes 5+ minutes. Set timeout to 15+ minutes.
|
||||
npm run cov:postgresql
|
||||
```
|
||||
|
||||
**CRITICAL TESTING NOTES:**
|
||||
- **NEVER CANCEL** build or test commands - they may take 4-15 minutes to complete
|
||||
- Individual test files run much faster (~12 seconds) for development iteration
|
||||
- Full test suite processes 100+ test files and requires database initialization
|
||||
- Test failures may occur in CI environment; use individual test files for validation
|
||||
|
||||
**Testing Philosophy:**
|
||||
- **Write tests for all new features** - No feature is complete without tests
|
||||
- **Test at the right layer** - Controller tests for HTTP, Service tests for business logic
|
||||
- **Mock external dependencies** - Use `mock()` from `@eggjs/mock`
|
||||
- **Use realistic test data** - Create through `TestUtil` helper methods
|
||||
- **Clean up after tests** - Database is reset between test files
|
||||
- **Test both success and failure cases** - Error paths are equally important
|
||||
|
||||
**Common Test Patterns:**
|
||||
```typescript
|
||||
import { app, mock } from '@eggjs/mock/bootstrap';
|
||||
import { TestUtil } from '../../../test/TestUtil';
|
||||
|
||||
describe('test/path/to/YourController.test.ts', () => {
|
||||
describe('[GET /api/endpoint] methodName()', () => {
|
||||
it('should return expected result', async () => {
|
||||
// Setup
|
||||
const { authorization } = await TestUtil.createUser();
|
||||
|
||||
// Execute
|
||||
const res = await app
|
||||
.httpRequest()
|
||||
.get('/api/endpoint')
|
||||
.set('authorization', authorization)
|
||||
.expect(200);
|
||||
|
||||
// Assert
|
||||
assert.equal(res.body.someField, expectedValue);
|
||||
});
|
||||
|
||||
it('should handle unauthorized access', async () => {
|
||||
const res = await app
|
||||
.httpRequest()
|
||||
.get('/api/endpoint')
|
||||
.expect(401);
|
||||
|
||||
assert.equal(res.body.error, '[UNAUTHORIZED] Login first');
|
||||
});
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### Production Commands
|
||||
```bash
|
||||
# CI pipeline commands - NEVER CANCEL: Takes 5+ minutes. Set timeout to 15+ minutes.
|
||||
npm run ci # MySQL CI (includes lint, test, coverage, build)
|
||||
npm run ci:postgresql # PostgreSQL CI
|
||||
|
||||
# Production start/stop
|
||||
npm run start # Start as daemon
|
||||
npm run stop # Stop daemon
|
||||
npm run start:foreground # Start in foreground for debugging
|
||||
```
|
||||
|
||||
## Validation Scenarios
|
||||
|
||||
**ALWAYS manually validate changes** by running through these scenarios:
|
||||
|
||||
### Basic API Validation
|
||||
```bash
|
||||
# Start development server
|
||||
npm run dev
|
||||
|
||||
# Test registry root endpoint
|
||||
curl http://127.0.0.1:7001
|
||||
# Should return JSON with app metadata and stats
|
||||
|
||||
# Test authentication endpoint
|
||||
curl http://127.0.0.1:7001/-/whoami
|
||||
# Should return authentication error (expected when not logged in)
|
||||
|
||||
# Test package listing (initially empty)
|
||||
curl http://127.0.0.1:7001/-/all
|
||||
```
|
||||
|
||||
### Admin User Setup and Package Publishing
|
||||
```bash
|
||||
# Register admin user (cnpmcore_admin) - requires allowPublicRegistration=true in config
|
||||
npm login --registry=http://127.0.0.1:7001
|
||||
|
||||
# Verify login
|
||||
npm whoami --registry=http://127.0.0.1:7001
|
||||
|
||||
# Test package publishing
|
||||
npm publish --registry=http://127.0.0.1:7001
|
||||
```
|
||||
|
||||
## Architecture and Navigation
|
||||
|
||||
### Project Structure
|
||||
```
|
||||
app/
|
||||
├── common/ # Global utilities and adapters
|
||||
│ ├── adapter/ # External service adapters (NpmRegistry, Binary, etc.)
|
||||
│ └── enum/ # Shared enumerations
|
||||
├── core/ # Business logic layer
|
||||
│ ├── entity/ # Core domain models
|
||||
│ ├── event/ # Event handlers and async processing
|
||||
│ ├── service/ # Core business services
|
||||
│ └── util/ # Internal utilities
|
||||
├── port/ # Interface layer
|
||||
│ ├── controller/ # HTTP controllers
|
||||
│ ├── middleware/ # Express middleware
|
||||
│ ├── schedule/ # Background job schedulers
|
||||
│ └── webauth/ # WebAuth integration
|
||||
├── repository/ # Data access layer
|
||||
│ ├── model/ # ORM models
|
||||
│ └── util/ # Repository utilities
|
||||
└── infra/ # Infrastructure adapters
|
||||
```
|
||||
|
||||
### Key Services and Controllers
|
||||
- **PackageController**: Main package CRUD operations
|
||||
- **PackageManagerService**: Core package management business logic
|
||||
- **BinarySyncerService**: Binary package synchronization
|
||||
- **ChangesStreamService**: NPM registry change stream processing
|
||||
- **UserController**: User authentication and profile management
|
||||
|
||||
### Infrastructure Adapters (`app/infra/`)
|
||||
Enterprise customization layer for PaaS integration. cnpmcore provides default implementations, but enterprises should implement their own based on their infrastructure:
|
||||
|
||||
- **NFSClientAdapter**: File storage abstraction (local/S3/OSS)
|
||||
- **QueueAdapter**: Message queue integration
|
||||
- **AuthAdapter**: Authentication system integration
|
||||
- **BinaryAdapter**: Binary package storage adapter
|
||||
|
||||
These adapters allow cnpmcore to integrate with different cloud providers and enterprise systems without modifying core business logic.
|
||||
|
||||
### Configuration Files
|
||||
- `config/config.default.ts`: Main application configuration
|
||||
- `config/database.ts`: Database connection settings
|
||||
- `config/binaries.ts`: Binary package mirror configurations
|
||||
- `.env`: Environment-specific variables
|
||||
- `tsconfig.json`: TypeScript compilation settings
|
||||
- `tsconfig.prod.json`: Production build settings
|
||||
|
||||
## Common Development Tasks
|
||||
|
||||
### Adding New Features
|
||||
|
||||
**ALWAYS follow this workflow:**
|
||||
|
||||
1. **Plan the change** - Identify which layers need modification
|
||||
2. **Run linter** - `npm run lint:fix` to establish clean baseline
|
||||
3. **Bottom-up implementation** - Build from data layer up to controller:
|
||||
|
||||
a. **Model Layer** (if new data structure needed):
|
||||
- Add SQL migrations: `sql/mysql/*.sql` AND `sql/postgresql/*.sql`
|
||||
- Create Model: `app/repository/model/YourModel.ts`
|
||||
- Run database migration scripts
|
||||
|
||||
b. **Entity Layer** (domain models):
|
||||
- Create Entity: `app/core/entity/YourEntity.ts`
|
||||
- Implement business logic and behavior
|
||||
- Keep entities pure (no infrastructure dependencies)
|
||||
|
||||
c. **Repository Layer** (data access):
|
||||
- Create Repository: `app/repository/YourRepository.ts`
|
||||
- Follow naming: `findX`, `saveX`, `removeX`, `listXs`
|
||||
- Inject dependencies using `@Inject()`
|
||||
|
||||
d. **Service Layer** (business logic):
|
||||
- Create Service: `app/core/service/YourService.ts`
|
||||
- Orchestrate repositories and entities
|
||||
- Use `@SingletonProto()` for service lifecycle
|
||||
|
||||
e. **Controller Layer** (HTTP endpoints):
|
||||
- Create Controller: `app/port/controller/YourController.ts`
|
||||
- Extend `AbstractController`
|
||||
- Add HTTP method decorators: `@HTTPMethod()`, `@HTTPBody()`, etc.
|
||||
- Implement 3-step validation (params → auth → authorization)
|
||||
|
||||
4. **Add tests** - Create test file: `test/path/matching/source/YourFile.test.ts`
|
||||
5. **Lint and test** - `npm run lint:fix && npm run test:local test/your/test.test.ts`
|
||||
6. **Type check** - `npm run typecheck`
|
||||
7. **Commit** - Use semantic commit messages (feat/fix/chore/docs/test)
|
||||
|
||||
**Example Controller Implementation:**
|
||||
```typescript
|
||||
import { AbstractController } from './AbstractController';
|
||||
import { HTTPController, HTTPMethod, HTTPQuery, Inject } from 'egg';
|
||||
|
||||
@HTTPController()
|
||||
export class YourController extends AbstractController {
|
||||
@Inject()
|
||||
private readonly yourService: YourService;
|
||||
|
||||
@HTTPMethod({ path: '/api/path', method: 'GET' })
|
||||
async yourMethod(@HTTPQuery() params: YourQueryType) {
|
||||
// 1. Validate params (done by @HTTPQuery with typebox)
|
||||
// 2. Authenticate user
|
||||
const user = await this.userRoleManager.requiredAuthorizedUser(this.ctx, 'read');
|
||||
// 3. Authorize resource access (if needed)
|
||||
// 4. Delegate to service
|
||||
return await this.yourService.doSomething(params);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Database Migrations
|
||||
- SQL files are in `sql/mysql/` and `sql/postgresql/`
|
||||
- Migration scripts automatically run during database preparation
|
||||
- **NEVER** modify existing migration files - only add new ones
|
||||
|
||||
### Background Jobs
|
||||
- Schedulers are in `app/port/schedule/`
|
||||
- Include sync workers, cleanup tasks, and stream processors
|
||||
- Jobs run automatically when development server starts
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Database Connection Issues
|
||||
```bash
|
||||
# Check if services are running
|
||||
docker compose ps
|
||||
|
||||
# Reset MySQL environment
|
||||
docker compose -f docker-compose.yml down
|
||||
docker compose -f docker-compose.yml up -d
|
||||
CNPMCORE_DATABASE_NAME=cnpmcore bash ./prepare-database-mysql.sh
|
||||
|
||||
# Reset PostgreSQL environment
|
||||
docker compose -f docker-compose-postgres.yml down
|
||||
docker compose -f docker-compose-postgres.yml up -d
|
||||
CNPMCORE_DATABASE_NAME=cnpmcore bash ./prepare-database-postgresql.sh
|
||||
```
|
||||
|
||||
### Build Issues
|
||||
```bash
|
||||
# Clean and rebuild
|
||||
npm run clean
|
||||
npm run tsc
|
||||
|
||||
# Check TypeScript configuration
|
||||
npx tsc --noEmit
|
||||
```
|
||||
|
||||
### Test Issues
|
||||
```bash
|
||||
# Create missing test database
|
||||
mysql -h 127.0.0.1 -P 3306 -u root -e "CREATE DATABASE cnpmcore_unittest;"
|
||||
|
||||
# Run single test for debugging
|
||||
npm run test:local test/common/CryptoUtil.test.ts
|
||||
```
|
||||
|
||||
## CI/CD Integration
|
||||
|
||||
The project uses GitHub Actions with workflows in `.github/workflows/`:
|
||||
- `nodejs.yml`: Main CI pipeline with MySQL, PostgreSQL, and Elasticsearch testing
|
||||
- Multiple Node.js versions tested: 20, 22, 24
|
||||
- **CRITICAL**: CI jobs include long-running tests that can take 15+ minutes per database type
|
||||
|
||||
### Pre-commit Validation
|
||||
**ALWAYS run before committing:**
|
||||
```bash
|
||||
npm run lint:fix # Fix linting issues
|
||||
npm run tsc # Verify TypeScript compilation
|
||||
npm run test:local test/path/to/relevant.test.ts # Run relevant tests
|
||||
```
|
||||
|
||||
## Docker Support
|
||||
|
||||
### Development Environments
|
||||
- `docker-compose.yml`: MySQL + Redis + phpMyAdmin
|
||||
- `docker-compose-postgres.yml`: PostgreSQL + Redis + pgAdmin
|
||||
- `docker-compose-es.yml`: Elasticsearch integration
|
||||
|
||||
### Production Images
|
||||
```bash
|
||||
# Build Alpine image
|
||||
npm run images:alpine
|
||||
|
||||
# Build Debian image
|
||||
npm run images:debian
|
||||
```
|
||||
|
||||
## External Dependencies
|
||||
|
||||
- **Database**: MySQL 9.x or PostgreSQL 17+
|
||||
- **Cache**: Redis 6+
|
||||
- **Search**: Elasticsearch 8.x (optional)
|
||||
- **Storage**: Local filesystem or S3-compatible storage
|
||||
- **Framework**: Egg.js with extensive TypeScript integration
|
||||
|
||||
## Performance Notes
|
||||
|
||||
Command execution times (for timeout planning):
|
||||
|
||||
- **Startup Time**: ~20 seconds for development server
|
||||
- **Build Time**: ~6 seconds for TypeScript compilation
|
||||
- **Test Time**: 4-15 minutes for full suite (database dependent)
|
||||
- **Individual Test**: ~12 seconds for single test file
|
||||
- **Package Installation**: ~2 minutes for npm install
|
||||
- **Database Init**: <2 seconds for either MySQL or PostgreSQL
|
||||
- **Linting**: <1 second (oxlint is very fast)
|
||||
|
||||
Always account for these timings when setting timeouts for automated processes.
|
||||
|
||||
## Semantic Commit Messages
|
||||
|
||||
Use conventional commit format for all commits:
|
||||
|
||||
- `feat:` - New features
|
||||
- `fix:` - Bug fixes
|
||||
- `docs:` - Documentation changes
|
||||
- `chore:` - Maintenance tasks
|
||||
- `test:` - Test additions or modifications
|
||||
- `refactor:` - Code refactoring
|
||||
- `perf:` - Performance improvements
|
||||
|
||||
Examples:
|
||||
```bash
|
||||
feat: add support for GitHub binary mirroring
|
||||
fix: resolve authentication token expiration issue
|
||||
docs: update API documentation for sync endpoints
|
||||
test: add tests for package publication workflow
|
||||
```
|
||||
67
.github/workflows/codeql-analysis.yml
vendored
67
.github/workflows/codeql-analysis.yml
vendored
@@ -1,67 +0,0 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'javascript', 'typescript' ]
|
||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||
# Learn more about CodeQL language support at https://git.io/codeql-language-support
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
2
.github/workflows/greetings.yml
vendored
2
.github/workflows/greetings.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/first-interaction@v1
|
||||
- uses: actions/first-interaction@v3
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-message: '我们已经看到你的反馈,如果是功能缺陷,可以提供一下重现该问题的方式;如果是新功能需求,我们会尽快加入讨论。同时我们非常期待你可以加入我们的贡献者行列,让项目可以长期可持续发展。'
|
||||
|
||||
326
.github/workflows/nodejs.yml
vendored
326
.github/workflows/nodejs.yml
vendored
@@ -5,12 +5,128 @@ name: Node.js CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
branches: [master]
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
branches: [master]
|
||||
merge_group:
|
||||
|
||||
jobs:
|
||||
typecheck:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
concurrency:
|
||||
group: typecheck-${{ github.workflow }}-#${{ github.event.pull_request.number || github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
steps:
|
||||
- name: Checkout Git Source
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 22
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm i
|
||||
|
||||
- name: Lint
|
||||
run: npm run lint
|
||||
|
||||
- name: Typecheck
|
||||
run: npm run typecheck
|
||||
|
||||
- name: Build
|
||||
run: npm run tsc && npm run tsc:prod
|
||||
|
||||
test-deployment:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
concurrency:
|
||||
group: test-deployment-${{ github.workflow }}-#${{ github.event.pull_request.number || github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
services:
|
||||
mysql:
|
||||
image: mysql:5.7
|
||||
env:
|
||||
MYSQL_ALLOW_EMPTY_PASSWORD: true
|
||||
MYSQL_DATABASE: cnpmcore
|
||||
ports:
|
||||
- 3306:3306
|
||||
options: --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=5
|
||||
redis:
|
||||
# https://docs.github.com/en/actions/using-containerized-services/about-service-containers#example-mapping-redis-ports
|
||||
image: redis
|
||||
ports:
|
||||
# Opens tcp port 6379 on the host and service container
|
||||
- 6379:6379
|
||||
|
||||
steps:
|
||||
- name: Checkout Git Source
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 22
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm i
|
||||
|
||||
- name: Test Deployment
|
||||
run: |
|
||||
npm run build
|
||||
echo "Preparing database..."
|
||||
CNPMCORE_DATABASE_NAME=cnpmcore bash ./prepare-database-mysql.sh
|
||||
echo "Starting cnpmcore..."
|
||||
CNPMCORE_FORCE_LOCAL_FS=true npm run start:foreground &
|
||||
sleep 5
|
||||
echo "Checking cnpmcore is ready..."
|
||||
|
||||
set -Eeuo pipefail
|
||||
URL="http://127.0.0.1:7001"
|
||||
PATTERN="instance_start_time"
|
||||
TIMEOUT=60
|
||||
TMP="$(mktemp)"
|
||||
echo "🔎 Health check $URL, expect 200 & body contains: $PATTERN"
|
||||
deadline=$((SECONDS + TIMEOUT))
|
||||
last_status=""
|
||||
|
||||
while (( SECONDS < deadline )); do
|
||||
last_status="$(curl -sS -o "$TMP" -w '%{http_code}' "$URL" || true)"
|
||||
echo "last_status=$last_status"
|
||||
echo "body=$(cat $TMP)"
|
||||
if [[ "$last_status" == "200" ]] && grep -q "$PATTERN" "$TMP"; then
|
||||
echo "✅ OK"
|
||||
rm -f "$TMP"
|
||||
npx eggctl stop
|
||||
exit 0
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
|
||||
echo "::error::❌ Health check failed: status=$last_status"
|
||||
echo "---- Response body (last try) ----"
|
||||
cat "$TMP" || true
|
||||
rm -f "$TMP"
|
||||
exit 1
|
||||
|
||||
test-postgresql-fs-nfs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [20, 22, 24]
|
||||
os: [ubuntu-latest]
|
||||
# 0-based index
|
||||
shardIndex: [0, 1, 2]
|
||||
shardTotal: [3]
|
||||
|
||||
name: test on postgresql (node@${{ matrix.node-version }}, shard@${{ matrix.shardIndex }}/${{ matrix.shardTotal }})
|
||||
concurrency:
|
||||
group: test-postgresql-fs-nfs-${{ github.workflow }}-#${{ github.event.pull_request.number || github.head_ref || github.ref }}-${{ matrix.node-version }}-${{ matrix.shardIndex }}/${{ matrix.shardTotal }}
|
||||
cancel-in-progress: true
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
services:
|
||||
@@ -38,40 +154,75 @@ jobs:
|
||||
# Opens tcp port 6379 on the host and service container
|
||||
- 6379:6379
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [18.20.0, 18, 20, 22]
|
||||
os: [ubuntu-latest]
|
||||
|
||||
steps:
|
||||
- name: Checkout Git Source
|
||||
uses: actions/checkout@v4
|
||||
- name: Checkout Git Source
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm i -g npminstall && npminstall
|
||||
- name: Install Dependencies
|
||||
run: npm i
|
||||
|
||||
- name: Continuous Integration
|
||||
run: npm run ci:postgresql
|
||||
env:
|
||||
# The hostname used to communicate with the PostgreSQL service container
|
||||
POSTGRES_HOST: localhost
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
# The default PostgreSQL port
|
||||
POSTGRES_PORT: 5432
|
||||
# https://github.com/elastic/elastic-github-actions/blob/master/elasticsearch/README.md
|
||||
- name: Configure sysctl limits
|
||||
run: |
|
||||
sudo swapoff -a
|
||||
sudo sysctl -w vm.swappiness=1
|
||||
sudo sysctl -w fs.file-max=262144
|
||||
sudo sysctl -w vm.max_map_count=262144
|
||||
|
||||
- name: Code Coverage
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- name: Runs Elasticsearch
|
||||
uses: elastic/elastic-github-actions/elasticsearch@master
|
||||
with:
|
||||
stack-version: 8.18.0
|
||||
security-enabled: false
|
||||
|
||||
- name: Wait for Elasticsearch to be ready
|
||||
run: |
|
||||
curl -v http://localhost:9200
|
||||
while ! curl -s http://localhost:9200 | grep -q "elasticsearch"; do
|
||||
echo "Waiting for Elasticsearch to be ready..."
|
||||
sleep 1
|
||||
done
|
||||
|
||||
- name: Continuous Integration
|
||||
run: npm run ci:postgresql
|
||||
env:
|
||||
# The hostname used to communicate with the PostgreSQL service container
|
||||
POSTGRES_HOST: localhost
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
# The default PostgreSQL port
|
||||
POSTGRES_PORT: 5432
|
||||
CNPMCORE_CONFIG_ENABLE_ES: true
|
||||
CNPMCORE_CONFIG_ES_CLIENT_NODES: http://localhost:9200
|
||||
# https://github.com/jamiebuilds/ci-parallel-vars
|
||||
CI_NODE_INDEX: ${{ matrix.shardIndex }}
|
||||
CI_NODE_TOTAL: ${{ matrix.shardTotal }}
|
||||
|
||||
- name: Code Coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
test-mysql57-fs-nfs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [20, 22, 24]
|
||||
os: [ubuntu-latest]
|
||||
# 0-based index
|
||||
shardIndex: [0, 1, 2]
|
||||
shardTotal: [3]
|
||||
|
||||
name: test on mysql (node@${{ matrix.node-version }}, shard@${{ matrix.shardIndex }}/${{ matrix.shardTotal }})
|
||||
concurrency:
|
||||
group: test-mysql57-fs-nfs-${{ github.workflow }}-#${{ github.event.pull_request.number || github.head_ref || github.ref }}-${{ matrix.node-version }}-${{ matrix.shardIndex }}/${{ matrix.shardTotal }}
|
||||
cancel-in-progress: true
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
services:
|
||||
@@ -90,39 +241,43 @@ jobs:
|
||||
# Opens tcp port 6379 on the host and service container
|
||||
- 6379:6379
|
||||
|
||||
steps:
|
||||
- name: Checkout Git Source
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm i
|
||||
|
||||
- name: Continuous Integration
|
||||
run: npm run ci
|
||||
env:
|
||||
# https://github.com/jamiebuilds/ci-parallel-vars
|
||||
CI_NODE_INDEX: ${{ matrix.shardIndex }}
|
||||
CI_NODE_TOTAL: ${{ matrix.shardTotal }}
|
||||
|
||||
- name: Code Coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
test-mysql57-s3-nfs:
|
||||
if: ${{ github.ref_name == 'master' }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [18.20.0, 18, 20, 22]
|
||||
node-version: [20, 22]
|
||||
os: [ubuntu-latest]
|
||||
|
||||
steps:
|
||||
- name: Checkout Git Source
|
||||
uses: actions/checkout@v4
|
||||
concurrency:
|
||||
group: test-mysql57-s3-nfs-${{ github.workflow }}-#${{ github.event.pull_request.number || github.head_ref || github.ref }}-${{ matrix.node-version }}
|
||||
cancel-in-progress: true
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm i -g npminstall && npminstall
|
||||
|
||||
- name: Continuous Integration
|
||||
run: npm run ci
|
||||
|
||||
- name: Code Coverage
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
test-mysql57-oss-nfs:
|
||||
runs-on: ${{ matrix.os }}
|
||||
if: |
|
||||
contains('
|
||||
refs/heads/master
|
||||
refs/heads/dev
|
||||
', github.ref)
|
||||
|
||||
services:
|
||||
mysql:
|
||||
@@ -135,40 +290,45 @@ jobs:
|
||||
options: --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=5
|
||||
|
||||
redis:
|
||||
# https://docs.github.com/en/actions/using-containerized-services/about-service-containers#example-mapping-redis-ports
|
||||
image: redis
|
||||
ports:
|
||||
# Opens tcp port 6379 on the host and service container
|
||||
- 6379:6379
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [18.20.0, 18, 20, 22]
|
||||
os: [ubuntu-latest]
|
||||
|
||||
steps:
|
||||
- name: Checkout Git Source
|
||||
uses: actions/checkout@v4
|
||||
- name: Checkout Git Source
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm i
|
||||
- name: Install Dependencies
|
||||
run: npm i
|
||||
|
||||
- name: Continuous Integration
|
||||
run: npm run ci
|
||||
env:
|
||||
CNPMCORE_NFS_TYPE: oss
|
||||
CNPMCORE_NFS_OSS_BUCKET: cnpmcore-unittest-github-nodejs-${{ matrix.node-version }}
|
||||
CNPMCORE_NFS_OSS_ENDPOINT: https://oss-us-west-1.aliyuncs.com
|
||||
CNPMCORE_NFS_OSS_ID: ${{ secrets.CNPMCORE_NFS_OSS_ID }}
|
||||
CNPMCORE_NFS_OSS_SECRET: ${{ secrets.CNPMCORE_NFS_OSS_SECRET }}
|
||||
- name: Continuous Integration
|
||||
run: npm run ci "test/cli/npm/install.test.ts"
|
||||
env:
|
||||
CNPMCORE_NFS_TYPE: s3
|
||||
CNPMCORE_NFS_REMOVE_BEFORE_UPLOAD: true
|
||||
CNPMCORE_NFS_S3_CLIENT_BUCKET: cnpmcore-unittest-github-nodejs-${{ matrix.node-version }}
|
||||
CNPMCORE_NFS_S3_CLIENT_ENDPOINT: ${{ secrets.CNPMCORE_NFS_S3_ENDPOINT }}
|
||||
CNPMCORE_NFS_S3_CLIENT_ID: ${{ secrets.CNPMCORE_NFS_S3_ID }}
|
||||
CNPMCORE_NFS_S3_CLIENT_SECRET: ${{ secrets.CNPMCORE_NFS_S3_SECRET }}
|
||||
CNPMCORE_NFS_S3_CLIENT_FORCE_PATH_STYLE: true
|
||||
# CNPMCORE_NFS_S3_CLIENT_DISABLE_URL: true
|
||||
|
||||
- name: Code Coverage
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- name: Code Coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
done:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- test-postgresql-fs-nfs
|
||||
- test-mysql57-fs-nfs
|
||||
- typecheck
|
||||
steps:
|
||||
- run: exit 1
|
||||
if: ${{ always() && (contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled')) }}
|
||||
|
||||
75
.github/workflows/release-image.yml
vendored
Normal file
75
.github/workflows/release-image.yml
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
# https://docs.github.com/en/actions/tutorials/publish-packages/publish-docker-images#publishing-images-to-github-packages
|
||||
name: Create and publish a Docker image
|
||||
|
||||
# Configures this workflow to run manually
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
pull_request:
|
||||
branches: [master]
|
||||
|
||||
# Defines two custom environment variables for the workflow. These are used for the Container registry domain, and a name for the Docker image that this workflow builds.
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
# There is a single job in this workflow. It's configured to run on the latest available version of Ubuntu.
|
||||
jobs:
|
||||
build-and-push-image:
|
||||
runs-on: ubuntu-latest
|
||||
concurrency:
|
||||
group: build-and-push-image-${{ github.workflow }}-#${{ github.event.pull_request.number || github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
# Sets the permissions granted to the `GITHUB_TOKEN` for the actions in this job.
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
attestations: write
|
||||
id-token: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
# Uses the `docker/login-action` action to log in to the Container registry registry using the account and password that will publish the packages. Once published, the packages are scoped to the account defined here.
|
||||
- name: Log in to the Container registry
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# This step uses [docker/metadata-action](https://github.com/docker/metadata-action#about) to extract tags and labels that will be applied to the specified image. The `id` "meta" allows the output of this step to be referenced in a subsequent step. The `images` value provides the base name for the tags and labels.
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
# This step uses the `docker/build-push-action` action to build the image, based on your repository's `Dockerfile`. If the build succeeds, it pushes the image to GitHub Packages.
|
||||
# It uses the `context` parameter to define the build's context as the set of files located in the specified path. For more information, see [Usage](https://github.com/docker/build-push-action#usage) in the README of the `docker/build-push-action` repository.
|
||||
# It uses the `tags` and `labels` parameters to tag and label the image with the output from the "meta" step.
|
||||
- name: Build and push Docker image
|
||||
id: push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: .docker/debian/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
||||
# This step generates an artifact attestation for the image, which is a tamper-proof statement about where and how it was built. It increases supply chain security for people who consume the image. For more information, see [Using artifact attestations to establish provenance for builds](/actions/security-guides/using-artifact-attestations-to-establish-provenance-for-builds).
|
||||
- name: Generate artifact attestation
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/attest-build-provenance@v3
|
||||
with:
|
||||
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME}}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
push-to-registry: true
|
||||
14
.github/workflows/release.yml
vendored
14
.github/workflows/release.yml
vendored
@@ -1,12 +1,18 @@
|
||||
name: Release
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
branches: [master]
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
deployments: write
|
||||
issues: write
|
||||
pull-requests: write
|
||||
id-token: write
|
||||
|
||||
jobs:
|
||||
release:
|
||||
name: Node.js
|
||||
uses: cnpm/github-actions/.github/workflows/node-release.yml@master
|
||||
name: NPM
|
||||
uses: cnpm/github-actions/.github/workflows/npm-release.yml@master
|
||||
secrets:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
GIT_TOKEN: ${{ secrets.GIT_TOKEN }}
|
||||
|
||||
1
.husky/pre-commit
Normal file
1
.husky/pre-commit
Normal file
@@ -0,0 +1 @@
|
||||
npx lint-staged
|
||||
24
.oxlintrc.json
Normal file
24
.oxlintrc.json
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"$schema": "./node_modules/oxlint/configuration_schema.json",
|
||||
// FIXME: @eggjs/oxlint-config too strict, disable it for now, will fix it later
|
||||
// "extends": ["./node_modules/@eggjs/oxlint-config/.oxlintrc.json"],
|
||||
"env": {
|
||||
"node": true,
|
||||
"mocha": true
|
||||
},
|
||||
"rules": {
|
||||
// Project-specific overrides
|
||||
"max-params": ["error", 6],
|
||||
"no-console": "warn",
|
||||
"import/no-anonymous-default-export": "error",
|
||||
"no-unassigned-import": "allow",
|
||||
"new-cap": "allow",
|
||||
"class-methods-use-this": "allow",
|
||||
"import/no-named-export": "allow",
|
||||
"unicorn/no-array-sort": "allow",
|
||||
"no-param-reassign": "allow",
|
||||
"unicorn/prefer-at": "allow",
|
||||
"no-process-env": "allow"
|
||||
},
|
||||
"ignorePatterns": ["index.d.ts"]
|
||||
}
|
||||
4
.prettierignore
Normal file
4
.prettierignore
Normal file
@@ -0,0 +1,4 @@
|
||||
CHANGELOG.md
|
||||
__snapshots__
|
||||
pnpm-lock.yaml
|
||||
node_modules
|
||||
7
.prettierrc
Normal file
7
.prettierrc
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"singleQuote": true,
|
||||
"trailingComma": "es5",
|
||||
"tabWidth": 2,
|
||||
"printWidth": 120,
|
||||
"arrowParens": "avoid"
|
||||
}
|
||||
14
.vscode/launch.json
vendored
14
.vscode/launch.json
vendored
@@ -9,12 +9,7 @@
|
||||
"request": "launch",
|
||||
"name": "Egg Debug",
|
||||
"runtimeExecutable": "npm",
|
||||
"runtimeArgs": [
|
||||
"run",
|
||||
"dev",
|
||||
"--",
|
||||
"--inspect-brk"
|
||||
],
|
||||
"runtimeArgs": ["run", "dev", "--", "--inspect-brk"],
|
||||
"console": "integratedTerminal",
|
||||
"restart": true,
|
||||
"autoAttachChildProcesses": true
|
||||
@@ -24,12 +19,7 @@
|
||||
"request": "launch",
|
||||
"name": "Egg Test",
|
||||
"runtimeExecutable": "npm",
|
||||
"runtimeArgs": [
|
||||
"run",
|
||||
"test-local",
|
||||
"--",
|
||||
"--inspect-brk"
|
||||
],
|
||||
"runtimeArgs": ["run", "test:local", "--", "--inspect-brk"],
|
||||
"autoAttachChildProcesses": true
|
||||
}
|
||||
]
|
||||
|
||||
2670
CHANGELOG.md
2670
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
270
CLAUDE.md
Normal file
270
CLAUDE.md
Normal file
@@ -0,0 +1,270 @@
|
||||
# CLAUDE.md
|
||||
|
||||
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||
|
||||
## Project Overview
|
||||
|
||||
cnpmcore is a TypeScript-based private NPM registry implementation for enterprise use. It's built on the Egg.js framework using Domain-Driven Design (DDD) architecture principles and supports both MySQL and PostgreSQL databases.
|
||||
|
||||
## Essential Commands
|
||||
|
||||
### Development
|
||||
```bash
|
||||
# Start development server (MySQL)
|
||||
npm run dev
|
||||
|
||||
# Start development server (PostgreSQL)
|
||||
npm run dev:postgresql
|
||||
|
||||
# Lint code
|
||||
npm run lint
|
||||
|
||||
# Fix linting issues
|
||||
npm run lint:fix
|
||||
|
||||
# TypeScript type checking
|
||||
npm run typecheck
|
||||
```
|
||||
|
||||
### Testing
|
||||
```bash
|
||||
# Run all tests with MySQL (takes 4+ minutes)
|
||||
npm run test
|
||||
|
||||
# Run all tests with PostgreSQL (takes 4+ minutes)
|
||||
npm run test:postgresql
|
||||
|
||||
# Run single test file (faster iteration, ~12 seconds)
|
||||
npm run test:local test/path/to/file.test.ts
|
||||
|
||||
# Generate coverage report
|
||||
npm run cov
|
||||
```
|
||||
|
||||
### Database Setup
|
||||
```bash
|
||||
# MySQL setup
|
||||
docker compose -f docker-compose.yml up -d
|
||||
CNPMCORE_DATABASE_NAME=cnpmcore bash ./prepare-database-mysql.sh
|
||||
|
||||
# PostgreSQL setup
|
||||
docker compose -f docker-compose-postgres.yml up -d
|
||||
CNPMCORE_DATABASE_NAME=cnpmcore bash ./prepare-database-postgresql.sh
|
||||
```
|
||||
|
||||
### Build
|
||||
```bash
|
||||
# Clean build artifacts
|
||||
npm run clean
|
||||
|
||||
# Development build
|
||||
npm run tsc
|
||||
|
||||
# Production build
|
||||
npm run tsc:prod
|
||||
```
|
||||
|
||||
## Architecture - Domain-Driven Design (DDD)
|
||||
|
||||
The codebase follows strict DDD layering with clear separation of concerns:
|
||||
|
||||
```
|
||||
Controller (app/port/controller/) ← HTTP interface, validation, auth
|
||||
↓ depends on
|
||||
Service (app/core/service/) ← Business logic orchestration
|
||||
↓ depends on
|
||||
Repository (app/repository/) ← Data access layer
|
||||
↓ depends on
|
||||
Model (app/repository/model/) ← ORM/Database mapping
|
||||
|
||||
Entity (app/core/entity/) ← Pure domain models (no dependencies)
|
||||
Common (app/common/) ← Utilities and adapters (all layers)
|
||||
```
|
||||
|
||||
### Layer Responsibilities
|
||||
|
||||
**Controller Layer** (`app/port/controller/`):
|
||||
- Handle HTTP requests/responses
|
||||
- Validate inputs using `@eggjs/typebox-validate`
|
||||
- Authenticate users and verify authorization
|
||||
- Delegate business logic to Services
|
||||
- All controllers extend `AbstractController`
|
||||
|
||||
**Service Layer** (`app/core/service/`):
|
||||
- Implement core business logic
|
||||
- Orchestrate multiple repositories
|
||||
- Publish domain events
|
||||
- Manage transactions
|
||||
|
||||
**Repository Layer** (`app/repository/`):
|
||||
- CRUD operations on Models
|
||||
- Data access and persistence
|
||||
- Query building and optimization
|
||||
- Methods named: `findX`, `saveX`, `removeX`, `listXs`
|
||||
|
||||
**Entity Layer** (`app/core/entity/`):
|
||||
- Pure domain models with business behavior
|
||||
- No infrastructure dependencies
|
||||
- Immutable data structures preferred
|
||||
|
||||
**Model Layer** (`app/repository/model/`):
|
||||
- ORM definitions using Leoric
|
||||
- Database schema mapping
|
||||
- No business logic
|
||||
|
||||
### Infrastructure Adapters (`app/infra/`)
|
||||
Enterprise customization layer for PaaS integration:
|
||||
- **NFSClientAdapter**: File storage (local/S3/OSS)
|
||||
- **QueueAdapter**: Message queue integration
|
||||
- **AuthAdapter**: Authentication system
|
||||
- **BinaryAdapter**: Binary package storage
|
||||
|
||||
## Key Development Patterns
|
||||
|
||||
### Request Validation Trilogy
|
||||
Always validate requests in this exact order:
|
||||
1. **Parameter Validation** - Use `@eggjs/typebox-validate` for type-safe validation
|
||||
2. **Authentication** - Get authorized user with token role verification
|
||||
3. **Authorization** - Check resource-level permissions to prevent privilege escalation
|
||||
|
||||
```typescript
|
||||
// Example controller method
|
||||
async someMethod(@HTTPQuery() params: QueryType) {
|
||||
// 1. Params already validated by @HTTPQuery with typebox
|
||||
// 2. Authenticate
|
||||
const user = await this.userRoleManager.requiredAuthorizedUser(this.ctx, 'publish');
|
||||
// 3. Authorize (if needed)
|
||||
const { pkg } = await this.ensurePublishAccess(this.ctx, fullname);
|
||||
// 4. Execute business logic
|
||||
return await this.service.doSomething(params);
|
||||
}
|
||||
```
|
||||
|
||||
### Repository Method Naming
|
||||
- `findSomething` - Query single entity
|
||||
- `saveSomething` - Create or update entity
|
||||
- `removeSomething` - Delete entity
|
||||
- `listSomethings` - Query multiple entities (plural)
|
||||
|
||||
### Modifying Database Models
|
||||
When changing a Model, update all 3 locations:
|
||||
1. SQL migrations: `sql/mysql/*.sql` AND `sql/postgresql/*.sql`
|
||||
2. ORM Model: `app/repository/model/*.ts`
|
||||
3. Domain Entity: `app/core/entity/*.ts`
|
||||
|
||||
## Code Style
|
||||
|
||||
### Linting
|
||||
- **Linter**: Oxlint (Rust-based, very fast)
|
||||
- **Formatter**: Prettier
|
||||
- **Pre-commit**: Husky + lint-staged (auto-format on commit)
|
||||
|
||||
Style rules:
|
||||
- Single quotes (`'`)
|
||||
- 2-space indentation
|
||||
- 120 character line width
|
||||
- ES5 trailing commas
|
||||
- Max 6 function parameters
|
||||
- No console statements (use logger)
|
||||
|
||||
### TypeScript
|
||||
- Strict TypeScript enabled
|
||||
- Avoid `any` types - use proper typing or `unknown`
|
||||
- ES modules (`import/export`) throughout
|
||||
- Comprehensive type definitions in all files
|
||||
|
||||
### Testing
|
||||
- Test files use `.test.ts` suffix
|
||||
- Tests mirror source structure in `test/` directory
|
||||
- Use `@eggjs/mock` for mocking
|
||||
- Use `assert` from `node:assert/strict`
|
||||
- Test both success and error cases
|
||||
|
||||
Pattern:
|
||||
```typescript
|
||||
describe('test/path/to/SourceFile.test.ts', () => {
|
||||
describe('[HTTP_METHOD /api/path] functionName()', () => {
|
||||
it('should handle expected behavior', async () => {
|
||||
// Test implementation
|
||||
});
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
app/
|
||||
├── common/ # Global utilities and adapters
|
||||
│ ├── adapter/ # External service adapters
|
||||
│ └── enum/ # Shared enumerations
|
||||
├── core/ # Business logic layer
|
||||
│ ├── entity/ # Domain models
|
||||
│ ├── event/ # Event handlers
|
||||
│ ├── service/ # Business services
|
||||
│ └── util/ # Internal utilities
|
||||
├── port/ # Interface layer
|
||||
│ ├── controller/ # HTTP controllers
|
||||
│ ├── middleware/ # Middleware
|
||||
│ └── schedule/ # Background jobs
|
||||
├── repository/ # Data access layer
|
||||
│ └── model/ # ORM models
|
||||
└── infra/ # Infrastructure adapters
|
||||
|
||||
config/ # Configuration files
|
||||
sql/ # Database migrations
|
||||
├── mysql/ # MySQL migrations
|
||||
└── postgresql/ # PostgreSQL migrations
|
||||
test/ # Test files (mirrors app/ structure)
|
||||
```
|
||||
|
||||
## Important Configuration
|
||||
|
||||
- `config/config.default.ts` - Main application configuration
|
||||
- `config/database.ts` - Database connection settings
|
||||
- `config/binaries.ts` - Binary package mirror configurations
|
||||
- `.env` - Environment-specific variables (copy from `.env.example`)
|
||||
- `tsconfig.json` - TypeScript settings (target: ES2021 for Leoric compatibility)
|
||||
|
||||
## Development Workflow
|
||||
|
||||
1. **Setup**: Copy `.env.example` to `.env`, start Docker services, initialize database
|
||||
2. **Feature Development**: Follow bottom-up approach (Model → Entity → Repository → Service → Controller)
|
||||
3. **Testing**: Write tests at appropriate layer, run individual tests for fast iteration
|
||||
4. **Validation**: Run linter, typecheck, relevant tests before committing
|
||||
5. **Commit**: Use semantic commit messages (feat/fix/docs/test/chore)
|
||||
|
||||
## Integration as NPM Package
|
||||
|
||||
cnpmcore can be integrated into Egg.js/Tegg applications as an NPM package, allowing enterprises to:
|
||||
- Customize infrastructure adapters (storage, auth, queue)
|
||||
- Override default behavior while receiving updates
|
||||
- Integrate with existing enterprise systems
|
||||
|
||||
See INTEGRATE.md for detailed integration guide.
|
||||
|
||||
## Performance Notes
|
||||
|
||||
Typical command execution times:
|
||||
- Development server startup: ~20 seconds
|
||||
- TypeScript build: ~6 seconds
|
||||
- Full test suite: 4-15 minutes
|
||||
- Single test file: ~12 seconds
|
||||
- Linting: <1 second
|
||||
- Database initialization: <2 seconds
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Node.js: 20.18.0+ or 22.18.0+
|
||||
- Database: MySQL 5.7+ or PostgreSQL 17+
|
||||
- Cache: Redis 6+
|
||||
- Optional: Elasticsearch 8.x
|
||||
|
||||
## Key Services & Controllers
|
||||
|
||||
Core components to understand:
|
||||
- **PackageController**: Package CRUD operations
|
||||
- **PackageManagerService**: Core package management logic
|
||||
- **BinarySyncerService**: Binary package synchronization
|
||||
- **ChangesStreamService**: NPM registry change stream processing
|
||||
- **UserController**: User authentication and profiles
|
||||
48
DEVELOPER.md
48
DEVELOPER.md
@@ -4,14 +4,32 @@
|
||||
|
||||
本项目的外部服务依赖有:MySQL 数据库或 PostgreSQL 数据库、Redis 缓存服务。
|
||||
|
||||
可以通过 Docker 来快速启动本地开发环境:
|
||||
生成本地开发环境配置文件:
|
||||
|
||||
```bash
|
||||
# 启动本地依赖服务
|
||||
docker-compose up -d
|
||||
cp .env.example .env
|
||||
```
|
||||
|
||||
可以通过 Docker 来快速启动本地开发环境:
|
||||
|
||||
MySQL 开发环境:
|
||||
|
||||
```bash
|
||||
# 启动本地依赖服务 - MySQL + Redis
|
||||
docker-compose -f docker-compose.yml up -d
|
||||
|
||||
# 关闭本地依赖服务
|
||||
docker-compose down
|
||||
docker-compose -f docker-compose.yml down
|
||||
```
|
||||
|
||||
PostgreSQL 开发环境:
|
||||
|
||||
```bash
|
||||
# 启动本地依赖服务 - PostgreSQL + Redis
|
||||
docker-compose -f docker-compose-postgres.yml up -d
|
||||
|
||||
# 关闭本地依赖服务
|
||||
docker-compose -f docker-compose-postgres.yml down
|
||||
```
|
||||
|
||||
> 手动初始化依赖服务参见[本地开发环境 - MySQL](./docs/setup.md) 或 [本地开发环境 - PostgreSQL](./docs/setup-with-postgresql.md)
|
||||
@@ -50,6 +68,26 @@ npm run dev:postgresql
|
||||
curl -v http://127.0.0.1:7001
|
||||
```
|
||||
|
||||
### 登录和测试发包
|
||||
|
||||
> cnpmcore 默认不开放注册,可以通过 `config.default.ts` 中的 `allowPublicRegistration` 配置开启,否则只有管理员可以登录
|
||||
|
||||
|
||||
注册 cnpmcore_admin 管理员
|
||||
|
||||
```bash
|
||||
npm login --registry=http://127.0.0.1:7001
|
||||
|
||||
# 验证登录
|
||||
npm whoami --registry=http://127.0.0.1:7001
|
||||
```
|
||||
|
||||
发包
|
||||
|
||||
```bash
|
||||
npm publish --registry=http://127.0.0.1:7001
|
||||
```
|
||||
|
||||
### 单元测试
|
||||
|
||||
MySQL
|
||||
@@ -198,7 +236,7 @@ private async getPackageEntity(scope: string, name: string) {
|
||||
|
||||
#### 1、请求参数校验
|
||||
|
||||
使用 [egg-typebox-validate](https://github.com/xiekw2010/egg-typebox-validate) 来做请求参数校验,只需要定义一次参数类型和规则,就能同时拥有参数校验和类型定义。
|
||||
使用 [@eggjs/typebox-validate](https://github.com/eggjs/egg/tree/next/plugins/typebox-validate) 来做请求参数校验,只需要定义一次参数类型和规则,就能同时拥有参数校验和类型定义。
|
||||
详细使用方式可以参考 [PR#12](https://github.com/cnpm/cnpmcore/pull/12)。
|
||||
|
||||
使用方式请直接参考 `app/port/typebox.ts` 代码。
|
||||
|
||||
17
Dockerfile
17
Dockerfile
@@ -1,17 +0,0 @@
|
||||
FROM node:20
|
||||
|
||||
# Create app directory
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# Install app dependencies
|
||||
COPY . .
|
||||
|
||||
RUN npm install -g npminstall --registry=https://registry.npmmirror.com \
|
||||
&& npminstall -c \
|
||||
&& npm run tsc
|
||||
|
||||
ENV NODE_ENV=production \
|
||||
EGG_SERVER_ENV=prod
|
||||
|
||||
EXPOSE 7001
|
||||
CMD ["npm", "run", "start:foreground"]
|
||||
203
INTEGRATE.md
203
INTEGRATE.md
@@ -1,14 +1,14 @@
|
||||
# 🥚 如何在 [tegg](https://github.com/eggjs/tegg) 中集成 cnpmcore
|
||||
# 🥚 如何在 [tegg](https://github.com/eggjs/egg/blob/next/tegg) 中集成 cnpmcore
|
||||
|
||||
> 文档中的示例项目可以在 [这里](https://github.com/eggjs/examples/commit/bed580fe053ae573f8b63f6788002ff9c6e7a142) 查看,在开始前请确保已阅读 [DEVELOPER.md](DEVELOPER.md) 中的相关文档,完成本地开发环境搭建。
|
||||
|
||||
在生产环境中,我们也可以直接部署 cnpmcore 系统,实现完整的 Registry 镜像功能。
|
||||
但通常,在企业内部会有一些内部的中间件服务或限制,例如文件存储、缓存服务、登录鉴权流程等。
|
||||
|
||||
除了源码部署、二次开发的方式,我们还提供了 npm 包的方式,便于 [tegg](https://github.com/eggjs/tegg) 应用集成。
|
||||
除了源码部署、二次开发的方式,我们还提供了 npm 包的方式,便于 [tegg](https://github.com/eggjs/egg/blob/next/tegg) 应用集成。
|
||||
这样既可以享受到丰富的自定义扩展能力,又可以享受到 cnpmcore 持续迭代的功能演进。
|
||||
|
||||
下面,让我们以 [tegg](https://github.com/eggjs/tegg) 初始化的应用为例,以 npm 包的方式集成 cnpmcore,并扩展登录功能,以支持企业内 [SSO](https://en.wikipedia.org/wiki/Single_sign-on) 登录。
|
||||
下面,让我们以 [tegg](https://github.com/eggjs/egg/blob/next/tegg) 初始化的应用为例,以 npm 包的方式集成 cnpmcore,并扩展登录功能,以支持企业内 [SSO](https://en.wikipedia.org/wiki/Single_sign-on) 登录。
|
||||
|
||||
## 🚀 快速开始
|
||||
|
||||
@@ -34,51 +34,36 @@
|
||||
|
||||
### 📦︎ 安装 cnpmcore 修改对应配置
|
||||
|
||||
```shell
|
||||
npm i cnpmcore -S
|
||||
```
|
||||
```shell
|
||||
npm i cnpmcore
|
||||
```
|
||||
|
||||
1. 修改 `ts-config.json` 配置,这是因为 cnpmcore 使用了 [subPath](https://nodejs.org/api/packages.html#subpath-exports)
|
||||
|
||||
```json
|
||||
{
|
||||
"extends": "@eggjs/tsconfig",
|
||||
"compilerOptions": {
|
||||
"baseUrl": "./",
|
||||
"moduleResolution": "NodeNext",
|
||||
"target": "ES2020",
|
||||
"module": "Node16"
|
||||
}
|
||||
}
|
||||
```
|
||||
```json
|
||||
{
|
||||
"extends": "@eggjs/tsconfig",
|
||||
"compilerOptions": {
|
||||
"baseUrl": "./",
|
||||
"target": "ES2021"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
2. 修改 `config/plugin.ts` 文件,开启 cnpmcore 依赖的一些插件
|
||||
|
||||
```typescript
|
||||
// 开启如下插件
|
||||
{
|
||||
redis: {
|
||||
enable: true,
|
||||
package: 'egg-redis',
|
||||
},
|
||||
teggOrm: {
|
||||
enable: true,
|
||||
package: '@eggjs/tegg-orm-plugin',
|
||||
},
|
||||
eventbusModule: {
|
||||
enable: true,
|
||||
package: '@eggjs/tegg-eventbus-plugin',
|
||||
},
|
||||
tracer: {
|
||||
enable: true,
|
||||
package: 'egg-tracer',
|
||||
},
|
||||
typeboxValidate: {
|
||||
enable: true,
|
||||
package: 'egg-typebox-validate',
|
||||
},
|
||||
}
|
||||
```
|
||||
```typescript
|
||||
import tracerPlugin from '@eggjs/tracer';
|
||||
import typeboxValidatePlugin from '@eggjs/typebox-validate';
|
||||
import redisPlugin from '@eggjs/redis';
|
||||
|
||||
// 开启如下插件
|
||||
export default {
|
||||
...redisPlugin(),
|
||||
...tracerPlugin(),
|
||||
...typeboxValidatePlugin(),
|
||||
}
|
||||
```
|
||||
|
||||
3. 修改 `config.default.ts` 文件,可以直接覆盖默认配置
|
||||
|
||||
@@ -92,6 +77,8 @@ export default () => {
|
||||
...cnpmcoreConfig,
|
||||
enableChangesStream: false,
|
||||
syncMode: SyncMode.all,
|
||||
allowPublicRegistration: true,
|
||||
// 放开注册配置
|
||||
};
|
||||
return config;
|
||||
}
|
||||
@@ -101,63 +88,63 @@ export default () => {
|
||||
|
||||
1. 创建文件夹,用于存放自定义的 infra module,这里以 app/infra 为例
|
||||
|
||||
```shell
|
||||
├── infra
|
||||
│ ├── AuthAdapter.ts
|
||||
│ ├── NFSAdapter.ts
|
||||
│ ├── QueueAdapter.ts
|
||||
│ └── package.json
|
||||
```
|
||||
```shell
|
||||
├── infra
|
||||
│ ├── AuthAdapter.ts
|
||||
│ ├── NFSAdapter.ts
|
||||
│ ├── QueueAdapter.ts
|
||||
│ └── package.json
|
||||
```
|
||||
|
||||
* 添加 `package.json` ,声明 infra 作为一个 eggModule 单元
|
||||
|
||||
```JSON
|
||||
{
|
||||
"name": "infra",
|
||||
"eggModule": {
|
||||
"name": "infra"
|
||||
}
|
||||
}
|
||||
```
|
||||
```JSON
|
||||
{
|
||||
"name": "infra",
|
||||
"eggModule": {
|
||||
"name": "infra"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
* 添加 `XXXAdapter.ts` 在对应的 Adapter 中继承 cnpmcore 默认的 Adapter,以 AuthAdapter 为例
|
||||
|
||||
```typescript
|
||||
import { AccessLevel, SingletonProto } from '@eggjs/tegg';
|
||||
import { AuthAdapter } from 'cnpmcore/infra/AuthAdapter';
|
||||
```typescript
|
||||
import { AccessLevel, SingletonProto } from 'egg';
|
||||
import { AuthAdapter } from 'cnpmcore/infra/AuthAdapter';
|
||||
|
||||
@SingletonProto({
|
||||
name: 'authAdapter',
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
})
|
||||
export class MyAuthAdapter extends AuthAdapter {
|
||||
}
|
||||
```
|
||||
@SingletonProto({
|
||||
name: 'authAdapter',
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
})
|
||||
export class MyAuthAdapter extends AuthAdapter {
|
||||
}
|
||||
```
|
||||
|
||||
2. 添加 `config/module.json`,将 cnpmcore 作为一个 module 集成进我们新增的 tegg 应用中
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"path": "../app/biz"
|
||||
},
|
||||
{
|
||||
"path": "../app/infra"
|
||||
},
|
||||
{
|
||||
"package": "cnpmcore/common"
|
||||
},
|
||||
{
|
||||
"package": "cnpmcore/core"
|
||||
},
|
||||
{
|
||||
"package": "cnpmcore/port"
|
||||
},
|
||||
{
|
||||
"package": "cnpmcore/repository"
|
||||
}
|
||||
]
|
||||
```
|
||||
```json
|
||||
[
|
||||
{
|
||||
"path": "../app/biz"
|
||||
},
|
||||
{
|
||||
"path": "../app/infra"
|
||||
},
|
||||
{
|
||||
"package": "cnpmcore/common"
|
||||
},
|
||||
{
|
||||
"package": "cnpmcore/core"
|
||||
},
|
||||
{
|
||||
"package": "cnpmcore/port"
|
||||
},
|
||||
{
|
||||
"package": "cnpmcore/repository"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
### ✍🏻 重载 AuthAdapter 实现
|
||||
|
||||
@@ -173,10 +160,10 @@ export default () => {
|
||||
修改 AuthAdapter.ts 文件
|
||||
|
||||
```typescript
|
||||
import { AccessLevel, EggContext, SingletonProto } from '@eggjs/tegg';
|
||||
import { AccessLevel, Context, SingletonProto } from 'egg';
|
||||
import { AuthAdapter } from 'cnpmcore/infra/AuthAdapter';
|
||||
import { randomUUID } from 'crypto';
|
||||
import { AuthUrlResult, userResult } from 'node_modules/cnpmcore/dist/app/common/typing';
|
||||
import { AuthUrlResult, userResult } from 'cnpmcore/dist/app/common/typing';
|
||||
|
||||
const ONE_DAY = 3600 * 24;
|
||||
|
||||
@@ -185,7 +172,7 @@ const ONE_DAY = 3600 * 24;
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
})
|
||||
export class MyAuthAdapter extends AuthAdapter {
|
||||
async getAuthUrl(ctx: EggContext): Promise<AuthUrlResult> {
|
||||
async getAuthUrl(ctx: Context): Promise<AuthUrlResult> {
|
||||
const sessionId = randomUUID();
|
||||
await this.redis.setex(sessionId, ONE_DAY, '');
|
||||
return {
|
||||
@@ -208,33 +195,33 @@ export class MyAuthAdapter extends AuthAdapter {
|
||||
修改 HelloController 的实现,实际也可以通过登录中心回调、页面确认等方式实现
|
||||
|
||||
```typescript
|
||||
// 触发回调接口,会自动完成用户创建
|
||||
await this.httpclient.request(`${ctx.origin}/-/v1/login/sso/${name}`, { method: 'POST' });
|
||||
// 触发回调接口,会自动完成用户创建
|
||||
await this.httpclient.request(`${ctx.origin}/-/v1/login/sso/${name}`, { method: 'POST' });
|
||||
```
|
||||
|
||||
## 🎉 功能验证
|
||||
|
||||
1. 在命令行输入 `npm login --registry=http://127.0.0.1:7001`
|
||||
|
||||
```shell
|
||||
npm login --registry=http://127.0.0.1:7001
|
||||
npm notice Log in on http://127.0.0.1:7001/
|
||||
Login at:
|
||||
http://127.0.0.1:7001/hello?name=e44e8c43-211a-4bcd-ae78-c4cbb1a78ae7
|
||||
Press ENTER to open in the browser...
|
||||
```
|
||||
```shell
|
||||
npm login --registry=http://127.0.0.1:7001
|
||||
npm notice Log in on http://127.0.0.1:7001/
|
||||
Login at:
|
||||
http://127.0.0.1:7001/hello?name=e44e8c43-211a-4bcd-ae78-c4cbb1a78ae7
|
||||
Press ENTER to open in the browser...
|
||||
```
|
||||
|
||||
2. 界面提示回车打开浏览器访问登录中心,也就是我们在 getAuthUrl,返回的 loginUrl 配置
|
||||
|
||||
3. 由于我们 mock 了对应实现,界面会直接显示登录成功
|
||||
|
||||
```shell
|
||||
Logged in on http://127.0.0.1:7001/.
|
||||
```
|
||||
```shell
|
||||
Logged in on http://127.0.0.1:7001/.
|
||||
```
|
||||
|
||||
4. 在命令行输入 `npm whoami --registry=http://127.0.0.1:7001` 验证
|
||||
|
||||
```shell
|
||||
npm whoami --registry=http://127.0.0.1:7001
|
||||
hello
|
||||
```
|
||||
```shell
|
||||
npm whoami --registry=http://127.0.0.1:7001
|
||||
hello
|
||||
```
|
||||
|
||||
15
README.md
15
README.md
@@ -1,9 +1,14 @@
|
||||
# Private NPM Registry for Enterprise
|
||||
|
||||
[](https://github.com/cnpm/cnpmcore/actions/workflows/nodejs.yml)
|
||||
[](https://codecov.io/gh/cnpm/cnpmcore)
|
||||
[](https://github.com/cnpm/cnpmcore/actions/workflows/codeql-analysis.yml)
|
||||
[](https://github.com/cnpm/cnpmcore/actions/workflows/nodejs.yml)
|
||||
[](https://app.codecov.io/gh/cnpm/cnpmcore/tree/master)
|
||||
[](https://app.fossa.com/projects/git%2Bgithub.com%2Fcnpm%2Fcnpmcore?ref=badge_shield)
|
||||
[](https://nodejs.org/en/download/)
|
||||
[](https://makeapullrequest.com)
|
||||

|
||||
[](https://www.npmjs.com/package/cnpmcore)
|
||||
[](https://www.npmjs.com/package/cnpmcore)
|
||||
[](https://github.com/cnpm/cnpmcore/blob/master/LICENSE)
|
||||
|
||||
Reimplement based on [cnpmjs.org](https://github.com/cnpm/cnpmjs.org) with TypeScript.
|
||||
|
||||
@@ -11,6 +16,10 @@ Reimplement based on [cnpmjs.org](https://github.com/cnpm/cnpmjs.org) with TypeS
|
||||
|
||||
See [registry-api.md](docs/registry-api.md)
|
||||
|
||||
## Internal API for Direct HTTP Requests
|
||||
|
||||
See [internal-api.md](docs/internal-api.md) for comprehensive documentation of cnpmcore's internal APIs that allow direct HTTP requests for package synchronization, administration, and other advanced operations.
|
||||
|
||||
## How to contribute
|
||||
|
||||
See [DEVELOPER.md](DEVELOPER.md)
|
||||
|
||||
25
app.ts
25
app.ts
@@ -1,7 +1,8 @@
|
||||
import path from 'path';
|
||||
import { readFile } from 'fs/promises';
|
||||
import { Application } from 'egg';
|
||||
import { ChangesStreamService } from './app/core/service/ChangesStreamService';
|
||||
import path from 'node:path';
|
||||
import { readFile } from 'node:fs/promises';
|
||||
import type { Application, ILifecycleBoot } from 'egg';
|
||||
|
||||
import { ChangesStreamService } from './app/core/service/ChangesStreamService.ts';
|
||||
|
||||
declare module 'egg' {
|
||||
interface Application {
|
||||
@@ -9,7 +10,7 @@ declare module 'egg' {
|
||||
}
|
||||
}
|
||||
|
||||
export default class CnpmcoreAppHook {
|
||||
export default class CnpmcoreAppHook implements ILifecycleBoot {
|
||||
private readonly app: Application;
|
||||
|
||||
constructor(app: Application) {
|
||||
@@ -17,9 +18,9 @@ export default class CnpmcoreAppHook {
|
||||
this.app.binaryHTML = '';
|
||||
}
|
||||
|
||||
async configWillLoad() {
|
||||
configWillLoad() {
|
||||
const app = this.app;
|
||||
// https://github.com/eggjs/tegg/blob/master/plugin/orm/app.ts#L37
|
||||
// https://github.com/eggjs/egg/blob/next/tegg/plugin/orm/src/app.ts#L37
|
||||
// store query sql to log
|
||||
app.config.orm.logger = {
|
||||
...app.config.orm.logger,
|
||||
@@ -33,14 +34,18 @@ export default class CnpmcoreAppHook {
|
||||
async didReady() {
|
||||
// ready binary.html and replace registry
|
||||
const filepath = path.join(this.app.baseDir, 'app/port/binary.html');
|
||||
const text = await readFile(filepath, 'utf-8');
|
||||
this.app.binaryHTML = text.replace('{{registry}}', this.app.config.cnpmcore.registry);
|
||||
const text = await readFile(filepath, 'utf8');
|
||||
this.app.binaryHTML = text.replace(
|
||||
'{{registry}}',
|
||||
this.app.config.cnpmcore.registry
|
||||
);
|
||||
}
|
||||
|
||||
// 应用退出时执行
|
||||
// 需要暂停当前执行的 changesStream task
|
||||
async beforeClose() {
|
||||
const changesStreamService = await this.app.getEggObject(ChangesStreamService);
|
||||
const changesStreamService =
|
||||
await this.app.getEggObject(ChangesStreamService);
|
||||
await changesStreamService.suspendSync(true);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,8 @@
|
||||
import {
|
||||
Inject,
|
||||
} from '@eggjs/tegg';
|
||||
import {
|
||||
EggAppConfig,
|
||||
EggLogger,
|
||||
} from 'egg';
|
||||
import { EggAppConfig, Logger, Inject } from 'egg';
|
||||
|
||||
export abstract class AbstractService {
|
||||
@Inject()
|
||||
protected readonly config: EggAppConfig;
|
||||
@Inject()
|
||||
protected readonly logger: EggLogger;
|
||||
protected readonly logger: Logger;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { generateKeyPairSync } from 'crypto';
|
||||
import { generateKeyPairSync } from 'node:crypto';
|
||||
import NodeRSA from 'node-rsa';
|
||||
|
||||
// generate rsa key pair
|
||||
|
||||
@@ -1,27 +1,29 @@
|
||||
const TimeoutErrorNames = [
|
||||
const TimeoutErrorNames = new Set([
|
||||
'HttpClientRequestTimeoutError',
|
||||
'HttpClientConnectTimeoutError',
|
||||
'ConnectionError',
|
||||
'ConnectTimeoutError',
|
||||
'BodyTimeoutError',
|
||||
'ResponseTimeoutError',
|
||||
];
|
||||
]);
|
||||
|
||||
export function isTimeoutError(err: Error) {
|
||||
if (TimeoutErrorNames.includes(err.name)) {
|
||||
if (TimeoutErrorNames.has(err.name)) {
|
||||
return true;
|
||||
}
|
||||
if (err instanceof AggregateError && err.errors) {
|
||||
for (const subError of err.errors) {
|
||||
if (TimeoutErrorNames.includes(subError.name)) {
|
||||
if (TimeoutErrorNames.has(subError.name)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if ('cause' in err && err.cause instanceof Error) {
|
||||
if (TimeoutErrorNames.includes(err.cause.name)) {
|
||||
return true;
|
||||
}
|
||||
if (
|
||||
'cause' in err &&
|
||||
err.cause instanceof Error &&
|
||||
TimeoutErrorNames.has(err.cause.name)
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -1,64 +1,20 @@
|
||||
// oxlint-disable import/exports-last
|
||||
import { mkdir, rm } from 'node:fs/promises';
|
||||
import { createWriteStream } from 'node:fs';
|
||||
import { setTimeout } from 'node:timers/promises';
|
||||
import path from 'node:path';
|
||||
import url from 'node:url';
|
||||
import { randomBytes } from 'node:crypto';
|
||||
import { EggContextHttpClient, HttpClientResponse } from 'egg';
|
||||
import type { EggContextHttpClient, HttpClientResponse } from 'egg';
|
||||
import mime from 'mime-types';
|
||||
import dayjs from './dayjs';
|
||||
import dayjs from './dayjs.ts';
|
||||
|
||||
interface DownloadToTempfileOptionalConfig {
|
||||
retries?: number,
|
||||
ignoreDownloadStatuses?: number[],
|
||||
remoteAuthToken?: string
|
||||
}
|
||||
|
||||
export async function createTempDir(dataDir: string, dirname?: string) {
|
||||
// will auto clean on CleanTempDir Schedule
|
||||
let tmpdir = path.join(dataDir, 'downloads', dayjs().format('YYYY/MM/DD'));
|
||||
if (dirname) {
|
||||
tmpdir = path.join(tmpdir, dirname);
|
||||
}
|
||||
await mkdir(tmpdir, { recursive: true });
|
||||
return tmpdir;
|
||||
}
|
||||
|
||||
export async function createTempfile(dataDir: string, filename: string) {
|
||||
const tmpdir = await createTempDir(dataDir);
|
||||
// The filename is a URL (from dist.tarball), which needs to be truncated, (`getconf NAME_MAX /` # max filename length: 255 bytes)
|
||||
// https://github.com/cnpm/cnpmjs.org/pull/1345
|
||||
const tmpfile = path.join(tmpdir, `${randomBytes(10).toString('hex')}-${path.basename(url.parse(filename).pathname!)}`);
|
||||
return tmpfile;
|
||||
}
|
||||
|
||||
export async function downloadToTempfile(httpclient: EggContextHttpClient,
|
||||
dataDir: string, url: string, optionalConfig?: DownloadToTempfileOptionalConfig) {
|
||||
let retries = optionalConfig?.retries || 3;
|
||||
let lastError: any;
|
||||
while (retries > 0) {
|
||||
try {
|
||||
return await _downloadToTempfile(httpclient, dataDir, url, optionalConfig);
|
||||
} catch (err: any) {
|
||||
if (err.name === 'DownloadNotFoundError') throw err;
|
||||
lastError = err;
|
||||
}
|
||||
retries--;
|
||||
if (retries > 0) {
|
||||
// sleep 1s ~ 4s in random
|
||||
const delay = process.env.NODE_ENV === 'test' ? 1 : 1000 + Math.random() * 4000;
|
||||
await setTimeout(delay);
|
||||
}
|
||||
}
|
||||
throw lastError;
|
||||
}
|
||||
export interface Tempfile {
|
||||
tmpfile: string;
|
||||
headers: HttpClientResponse['res']['headers'];
|
||||
timing: HttpClientResponse['res']['timing'];
|
||||
}
|
||||
async function _downloadToTempfile(httpclient: EggContextHttpClient,
|
||||
dataDir: string, url: string, optionalConfig?: DownloadToTempfileOptionalConfig): Promise<Tempfile> {
|
||||
async function _downloadToTempfile(
|
||||
httpclient: EggContextHttpClient,
|
||||
dataDir: string,
|
||||
url: string,
|
||||
optionalConfig?: DownloadToTempfileOptionalConfig
|
||||
): Promise<Tempfile> {
|
||||
const tmpfile = await createTempfile(dataDir, url);
|
||||
const writeStream = createWriteStream(tmpfile);
|
||||
try {
|
||||
@@ -68,14 +24,18 @@ async function _downloadToTempfile(httpclient: EggContextHttpClient,
|
||||
if (optionalConfig?.remoteAuthToken) {
|
||||
requestHeaders.authorization = `Bearer ${optionalConfig.remoteAuthToken}`;
|
||||
}
|
||||
const { status, headers, res } = await httpclient.request(url, {
|
||||
timeout: 60000 * 10,
|
||||
const { status, headers, res } = (await httpclient.request(url, {
|
||||
timeout: 60_000 * 10,
|
||||
headers: requestHeaders,
|
||||
writeStream,
|
||||
timing: true,
|
||||
followRedirect: true,
|
||||
}) as HttpClientResponse;
|
||||
if (status === 404 || (optionalConfig?.ignoreDownloadStatuses && optionalConfig.ignoreDownloadStatuses.includes(status))) {
|
||||
})) as HttpClientResponse;
|
||||
if (
|
||||
status === 404 ||
|
||||
(optionalConfig?.ignoreDownloadStatuses &&
|
||||
optionalConfig.ignoreDownloadStatuses.includes(status))
|
||||
) {
|
||||
const err = new Error(`Not found, status(${status})`);
|
||||
err.name = 'DownloadNotFoundError';
|
||||
throw err;
|
||||
@@ -96,6 +56,71 @@ async function _downloadToTempfile(httpclient: EggContextHttpClient,
|
||||
}
|
||||
}
|
||||
|
||||
export interface DownloadToTempfileOptionalConfig {
|
||||
retries?: number;
|
||||
ignoreDownloadStatuses?: number[];
|
||||
remoteAuthToken?: string;
|
||||
}
|
||||
|
||||
export async function createTempDir(dataDir: string, dirname?: string) {
|
||||
// will auto clean on CleanTempDir Schedule
|
||||
let tmpdir = path.join(dataDir, 'downloads', dayjs().format('YYYY/MM/DD'));
|
||||
if (dirname) {
|
||||
tmpdir = path.join(tmpdir, dirname);
|
||||
}
|
||||
await mkdir(tmpdir, { recursive: true });
|
||||
return tmpdir;
|
||||
}
|
||||
|
||||
export async function createTempfile(dataDir: string, filename: string) {
|
||||
const tmpdir = await createTempDir(dataDir);
|
||||
// The filename is a URL (from dist.tarball), which needs to be truncated, (`getconf NAME_MAX /` # max filename length: 255 bytes)
|
||||
// https://github.com/cnpm/cnpmjs.org/pull/1345
|
||||
const tmpfile = path.join(
|
||||
tmpdir,
|
||||
// oxlint-disable-next-line typescript-eslint/no-non-null-assertion
|
||||
`${randomBytes(10).toString('hex')}-${path.basename(url.parse(filename).pathname!)}`
|
||||
);
|
||||
return tmpfile;
|
||||
}
|
||||
|
||||
export async function downloadToTempfile(
|
||||
httpclient: EggContextHttpClient,
|
||||
dataDir: string,
|
||||
url: string,
|
||||
optionalConfig?: DownloadToTempfileOptionalConfig
|
||||
) {
|
||||
let retries = optionalConfig?.retries || 3;
|
||||
let lastError: Error | undefined;
|
||||
while (retries > 0) {
|
||||
try {
|
||||
return await _downloadToTempfile(
|
||||
httpclient,
|
||||
dataDir,
|
||||
url,
|
||||
optionalConfig
|
||||
);
|
||||
} catch (err) {
|
||||
if (err.name === 'DownloadNotFoundError') throw err;
|
||||
lastError = err;
|
||||
}
|
||||
retries--;
|
||||
if (retries > 0) {
|
||||
// sleep 1s ~ 4s in random
|
||||
const delay =
|
||||
process.env.NODE_ENV === 'test' ? 1 : 1000 + Math.random() * 4000;
|
||||
await setTimeout(delay);
|
||||
}
|
||||
}
|
||||
// oxlint-disable-next-line no-throw-literal
|
||||
throw lastError;
|
||||
}
|
||||
export interface Tempfile {
|
||||
tmpfile: string;
|
||||
headers: HttpClientResponse['res']['headers'];
|
||||
timing: HttpClientResponse['res']['timing'];
|
||||
}
|
||||
|
||||
const DEFAULT_CONTENT_TYPE = 'application/octet-stream';
|
||||
const PLAIN_TEXT = 'text/plain';
|
||||
const WHITE_FILENAME_CONTENT_TYPES = {
|
||||
@@ -110,11 +135,27 @@ const WHITE_FILENAME_CONTENT_TYPES = {
|
||||
'.eslintrc': 'application/json',
|
||||
} as const;
|
||||
|
||||
const CONTENT_TYPE_BLACKLIST = new Set(['application/xml', 'text/html']);
|
||||
|
||||
export function ensureContentType(contentType: string) {
|
||||
if (CONTENT_TYPE_BLACKLIST.has(contentType)) {
|
||||
return 'text/plain';
|
||||
}
|
||||
return contentType;
|
||||
}
|
||||
|
||||
export function mimeLookup(filepath: string) {
|
||||
const filename = path.basename(filepath).toLowerCase();
|
||||
if (filename.endsWith('.ts')) return PLAIN_TEXT;
|
||||
if (filename.endsWith('.lock')) return PLAIN_TEXT;
|
||||
return mime.lookup(filename) ||
|
||||
WHITE_FILENAME_CONTENT_TYPES[filename as keyof typeof WHITE_FILENAME_CONTENT_TYPES] ||
|
||||
const defaultContentType = mime.lookup(filename);
|
||||
// https://github.com/cnpm/cnpmcore/issues/693#issuecomment-2955268229
|
||||
const contentType =
|
||||
defaultContentType ||
|
||||
WHITE_FILENAME_CONTENT_TYPES[
|
||||
filename as keyof typeof WHITE_FILENAME_CONTENT_TYPES
|
||||
] ||
|
||||
DEFAULT_CONTENT_TYPE;
|
||||
|
||||
return ensureContentType(contentType);
|
||||
}
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
import { createReadStream } from 'node:fs';
|
||||
import { Readable } from 'node:stream';
|
||||
import { pipeline } from 'node:stream/promises';
|
||||
import * as ssri from 'ssri';
|
||||
import tar from '@fengmk2/tar';
|
||||
import { AuthorType, PackageJSONType } from '../repository/PackageRepository';
|
||||
|
||||
import { fromData, fromStream, type HashLike } from 'ssri';
|
||||
// @ts-expect-error no types available
|
||||
import tar from '@fengmk2/tar';
|
||||
|
||||
import type {
|
||||
AuthorType,
|
||||
PackageJSONType,
|
||||
} from '../repository/PackageRepository.ts';
|
||||
|
||||
// /@cnpm%2ffoo
|
||||
// /@cnpm%2Ffoo
|
||||
@@ -12,13 +17,14 @@ import { AuthorType, PackageJSONType } from '../repository/PackageRepository';
|
||||
// /foo
|
||||
// name max length is 214 chars
|
||||
// https://www.npmjs.com/package/path-to-regexp#custom-matching-parameters
|
||||
export const FULLNAME_REG_STRING = '@[^/]{1,220}\/[^/]{1,220}|@[^%]+\%2[fF][^/]{1,220}|[^@/]{1,220}';
|
||||
export const FULLNAME_REG_STRING =
|
||||
'@[^/]{1,220}/[^/]{1,220}|@[^%]+%2[fF][^/]{1,220}|[^@/]{1,220}';
|
||||
|
||||
export function getScopeAndName(fullname: string): string[] {
|
||||
if (fullname.startsWith('@')) {
|
||||
return fullname.split('/', 2);
|
||||
}
|
||||
return [ '', fullname ];
|
||||
return ['', fullname];
|
||||
}
|
||||
|
||||
export function getFullname(scope: string, name: string): string {
|
||||
@@ -33,15 +39,22 @@ export function getPrefixedName(prefix: string, username: string): string {
|
||||
return prefix ? `${prefix}${username}` : username;
|
||||
}
|
||||
|
||||
export async function calculateIntegrity(contentOrFile: Uint8Array | string) {
|
||||
let integrityObj;
|
||||
export interface Integrity {
|
||||
integrity: string;
|
||||
shasum: string;
|
||||
}
|
||||
|
||||
export async function calculateIntegrity(
|
||||
contentOrFile: Uint8Array | string
|
||||
): Promise<Integrity> {
|
||||
let integrityObj: HashLike;
|
||||
if (typeof contentOrFile === 'string') {
|
||||
integrityObj = await ssri.fromStream(createReadStream(contentOrFile), {
|
||||
algorithms: [ 'sha512', 'sha1' ],
|
||||
integrityObj = await fromStream(createReadStream(contentOrFile), {
|
||||
algorithms: ['sha512', 'sha1'],
|
||||
});
|
||||
} else {
|
||||
integrityObj = ssri.fromData(contentOrFile, {
|
||||
algorithms: [ 'sha512', 'sha1' ],
|
||||
integrityObj = fromData(contentOrFile, {
|
||||
algorithms: ['sha512', 'sha1'],
|
||||
});
|
||||
}
|
||||
const integrity = integrityObj.sha512[0].toString() as string;
|
||||
@@ -49,26 +62,33 @@ export async function calculateIntegrity(contentOrFile: Uint8Array | string) {
|
||||
return { integrity, shasum };
|
||||
}
|
||||
|
||||
export function formatTarball(registry: string, scope: string, name: string, version: string) {
|
||||
export function formatTarball(
|
||||
registry: string,
|
||||
scope: string,
|
||||
name: string,
|
||||
version: string
|
||||
) {
|
||||
const fullname = getFullname(scope, name);
|
||||
return `${registry}/${fullname}/-/${name}-${version}.tgz`;
|
||||
}
|
||||
|
||||
export function detectInstallScript(manifest: any) {
|
||||
export function detectInstallScript(manifest: {
|
||||
scripts?: Record<string, string>;
|
||||
}) {
|
||||
// https://github.com/npm/registry/blob/master/docs/responses/package-metadata.md#abbreviated-version-object
|
||||
let hasInstallScript = false;
|
||||
const scripts = manifest.scripts;
|
||||
if (scripts) {
|
||||
// https://www.npmjs.com/package/fix-has-install-script
|
||||
if (scripts.install || scripts.preinstall || scripts.postinstall) {
|
||||
hasInstallScript = true;
|
||||
}
|
||||
// https://www.npmjs.com/package/fix-has-install-script
|
||||
if (scripts?.install || scripts?.preinstall || scripts?.postinstall) {
|
||||
hasInstallScript = true;
|
||||
}
|
||||
return hasInstallScript;
|
||||
}
|
||||
|
||||
/** 判断一个版本压缩包中是否包含 npm-shrinkwrap.json */
|
||||
export async function hasShrinkWrapInTgz(contentOrFile: Uint8Array | string): Promise<boolean> {
|
||||
export async function hasShrinkWrapInTgz(
|
||||
contentOrFile: Uint8Array | string
|
||||
): Promise<boolean> {
|
||||
let readable: Readable;
|
||||
if (typeof contentOrFile === 'string') {
|
||||
readable = createReadStream(contentOrFile);
|
||||
@@ -86,7 +106,8 @@ export async function hasShrinkWrapInTgz(contentOrFile: Uint8Array | string): Pr
|
||||
const parser = tar.t({
|
||||
// options.strict 默认为 false,会忽略 Recoverable errors,例如 tar 解析失败
|
||||
// 详见 https://github.com/isaacs/node-tar#warnings-and-errors
|
||||
onentry(entry) {
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
onentry(entry: any) {
|
||||
if (entry.path === 'package/npm-shrinkwrap.json') {
|
||||
hasShrinkWrap = true;
|
||||
abortController.abort();
|
||||
@@ -101,12 +122,17 @@ export async function hasShrinkWrapInTgz(contentOrFile: Uint8Array | string): Pr
|
||||
if (e.code === 'ABORT_ERR') {
|
||||
return hasShrinkWrap;
|
||||
}
|
||||
throw Object.assign(new Error('[hasShrinkWrapInTgz] Fail to parse input file'), { cause: e });
|
||||
throw Object.assign(
|
||||
new Error('[hasShrinkWrapInTgz] Fail to parse input file'),
|
||||
{ cause: e }
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/** 写入 ES 时,格式化 author */
|
||||
export function formatAuthor(author: string | AuthorType | undefined): AuthorType | undefined {
|
||||
export function formatAuthor(
|
||||
author: string | AuthorType | undefined
|
||||
): AuthorType | undefined {
|
||||
if (author === undefined) {
|
||||
return author;
|
||||
}
|
||||
@@ -118,12 +144,15 @@ export function formatAuthor(author: string | AuthorType | undefined): AuthorTyp
|
||||
return author;
|
||||
}
|
||||
|
||||
export async function extractPackageJSON(tarballBytes: Buffer): Promise<PackageJSONType> {
|
||||
export async function extractPackageJSON(
|
||||
tarballBytes: Buffer
|
||||
): Promise<PackageJSONType> {
|
||||
// oxlint-disable-next-line promise/avoid-new
|
||||
return new Promise((resolve, reject) => {
|
||||
Readable.from(tarballBytes)
|
||||
.pipe(tar.t({
|
||||
filter: name => name === 'package/package.json',
|
||||
onentry: async entry => {
|
||||
Readable.from(tarballBytes).pipe(
|
||||
tar.t({
|
||||
filter: (name: string) => name === 'package/package.json',
|
||||
onentry: async (entry: Readable) => {
|
||||
const chunks: Buffer[] = [];
|
||||
for await (const chunk of entry) {
|
||||
chunks.push(chunk);
|
||||
@@ -131,10 +160,11 @@ export async function extractPackageJSON(tarballBytes: Buffer): Promise<PackageJ
|
||||
try {
|
||||
const data = Buffer.concat(chunks);
|
||||
return resolve(JSON.parse(data.toString()));
|
||||
} catch (err) {
|
||||
} catch {
|
||||
reject(new Error('Error parsing package.json'));
|
||||
}
|
||||
},
|
||||
}));
|
||||
})
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { EggContext } from '@eggjs/tegg';
|
||||
import type { Context } from 'egg';
|
||||
|
||||
export function isSyncWorkerRequest(ctx: EggContext) {
|
||||
export function isSyncWorkerRequest(ctx: Context) {
|
||||
// sync request will contain this query params
|
||||
let isSyncWorkerRequest = ctx.query.cache === '0';
|
||||
if (!isSyncWorkerRequest) {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import crypto from 'crypto';
|
||||
import crypto from 'node:crypto';
|
||||
import base from 'base-x';
|
||||
import { crc32 } from '@node-rs/crc32';
|
||||
import * as ssri from 'ssri';
|
||||
import { checkData, create } from 'ssri';
|
||||
import UAParser from 'ua-parser-js';
|
||||
|
||||
const base62 = base('0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ');
|
||||
@@ -29,12 +29,12 @@ export function checkToken(token: string, prefix: string): boolean {
|
||||
}
|
||||
|
||||
export function integrity(plain: string): string {
|
||||
return ssri.create().update(plain).digest()
|
||||
return create().update(plain).digest()
|
||||
.toString();
|
||||
}
|
||||
|
||||
export function checkIntegrity(plain: string, expectedIntegrity: string): boolean {
|
||||
return !!ssri.checkData(plain, expectedIntegrity);
|
||||
return !!checkData(plain, expectedIntegrity);
|
||||
}
|
||||
|
||||
export function sha512(plain: string): string {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { AccessLevel, SingletonProto } from '@eggjs/tegg';
|
||||
import { BugVersion } from '../../core/entity/BugVersion';
|
||||
import { AccessLevel, SingletonProto } from 'egg';
|
||||
|
||||
import type { BugVersion } from '../../core/entity/BugVersion.ts';
|
||||
|
||||
@SingletonProto({
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
import {
|
||||
SingletonProto,
|
||||
AccessLevel,
|
||||
Inject,
|
||||
} from '@eggjs/tegg';
|
||||
// FIXME: egg-redis should use ioredis v5
|
||||
// https://github.com/eggjs/egg-redis/issues/35
|
||||
import { AccessLevel, Inject, SingletonProto } from 'egg';
|
||||
// FIXME: @eggjs/redis should use ioredis v5
|
||||
// https://github.com/eggjs/redis/issues/35
|
||||
import type { Redis } from 'ioredis';
|
||||
|
||||
const ONE_DAY = 3600 * 24;
|
||||
@@ -40,7 +36,7 @@ export class CacheAdapter {
|
||||
const lockName = this.getLockName(key);
|
||||
const existsTimestamp = await this.redis.get(lockName);
|
||||
if (existsTimestamp) {
|
||||
if (Date.now() - parseInt(existsTimestamp) < seconds * 1000) {
|
||||
if (Date.now() - Number.parseInt(existsTimestamp) < seconds * 1000) {
|
||||
return null;
|
||||
}
|
||||
// lock timeout, delete it
|
||||
|
||||
@@ -1,14 +1,11 @@
|
||||
import { Readable } from 'stream';
|
||||
import {
|
||||
SingletonProto,
|
||||
AccessLevel,
|
||||
Inject,
|
||||
} from '@eggjs/tegg';
|
||||
import { Pointcut } from '@eggjs/tegg/aop';
|
||||
import { EggLogger } from 'egg';
|
||||
import { AsyncTimer } from '../aop/AsyncTimer';
|
||||
import { NFSClient } from '../typing';
|
||||
import { IncomingHttpHeaders } from 'http';
|
||||
import type { Readable } from 'node:stream';
|
||||
import type { IncomingHttpHeaders } from 'node:http';
|
||||
|
||||
import { AccessLevel, Inject, SingletonProto, Logger } from 'egg';
|
||||
import { Pointcut } from 'egg/aop';
|
||||
|
||||
import { AsyncTimer } from '../aop/AsyncTimer.ts';
|
||||
import type { NFSClient } from '../typing.ts';
|
||||
|
||||
const INSTANCE_NAME = 'nfsAdapter';
|
||||
|
||||
@@ -21,17 +18,27 @@ export class NFSAdapter {
|
||||
private readonly nfsClient: NFSClient;
|
||||
|
||||
@Inject()
|
||||
private readonly logger: EggLogger;
|
||||
private readonly logger: Logger;
|
||||
|
||||
@Pointcut(AsyncTimer)
|
||||
async uploadBytes(storeKey: string, bytes: Uint8Array) {
|
||||
this.logger.info('[%s:uploadBytes] key: %s, bytes: %d', INSTANCE_NAME, storeKey, bytes.length);
|
||||
this.logger.info(
|
||||
'[%s:uploadBytes] key: %s, bytes: %d',
|
||||
INSTANCE_NAME,
|
||||
storeKey,
|
||||
bytes.length
|
||||
);
|
||||
await this.nfsClient.uploadBytes(bytes, { key: storeKey });
|
||||
}
|
||||
|
||||
// will return next store position
|
||||
@Pointcut(AsyncTimer)
|
||||
async appendBytes(storeKey: string, bytes: Uint8Array, position?: string, headers?: IncomingHttpHeaders) {
|
||||
async appendBytes(
|
||||
storeKey: string,
|
||||
bytes: Uint8Array,
|
||||
position?: string,
|
||||
headers?: IncomingHttpHeaders
|
||||
) {
|
||||
// make sure position is undefined by the first time
|
||||
if (!position) position = undefined;
|
||||
const options = {
|
||||
@@ -45,14 +52,24 @@ export class NFSAdapter {
|
||||
|
||||
@Pointcut(AsyncTimer)
|
||||
async uploadFile(storeKey: string, file: string) {
|
||||
this.logger.info('[%s:uploadFile] key: %s, file: %s', INSTANCE_NAME, storeKey, file);
|
||||
this.logger.info(
|
||||
'[%s:uploadFile] key: %s, file: %s',
|
||||
INSTANCE_NAME,
|
||||
storeKey,
|
||||
file
|
||||
);
|
||||
await this.nfsClient.upload(file, { key: storeKey });
|
||||
}
|
||||
|
||||
@Pointcut(AsyncTimer)
|
||||
async downloadFile(storeKey: string, file: string, timeout: number) {
|
||||
this.logger.info('[%s:downloadFile] key: %s, file: %s, timeout: %s',
|
||||
INSTANCE_NAME, storeKey, file, timeout);
|
||||
this.logger.info(
|
||||
'[%s:downloadFile] key: %s, file: %s, timeout: %s',
|
||||
INSTANCE_NAME,
|
||||
storeKey,
|
||||
file,
|
||||
timeout
|
||||
);
|
||||
await this.nfsClient.download(storeKey, file, { timeout });
|
||||
}
|
||||
|
||||
@@ -79,7 +96,9 @@ export class NFSAdapter {
|
||||
}
|
||||
}
|
||||
|
||||
async getDownloadUrlOrStream(storeKey: string): Promise<string | Readable | undefined> {
|
||||
async getDownloadUrlOrStream(
|
||||
storeKey: string
|
||||
): Promise<string | Readable | undefined> {
|
||||
const downloadUrl = await this.getDownloadUrl(storeKey);
|
||||
if (downloadUrl) {
|
||||
return downloadUrl;
|
||||
|
||||
@@ -1,18 +1,17 @@
|
||||
import { setTimeout } from 'timers/promises';
|
||||
import { setTimeout } from 'node:timers/promises';
|
||||
|
||||
import {
|
||||
ContextProto,
|
||||
AccessLevel,
|
||||
Inject,
|
||||
} from '@eggjs/tegg';
|
||||
import {
|
||||
EggLogger,
|
||||
EggContextHttpClient,
|
||||
AccessLevel, Inject,
|
||||
EggAppConfig,
|
||||
HttpClient,
|
||||
Logger,
|
||||
HttpClientRequestOptions,
|
||||
HttpClientResponse,
|
||||
} from 'egg';
|
||||
import { PackageManifestType } from '../../repository/PackageRepository';
|
||||
import { isTimeoutError } from '../ErrorUtil';
|
||||
|
||||
import type { PackageManifestType } from '../../repository/PackageRepository.ts';
|
||||
import { isTimeoutError } from '../ErrorUtil.ts';
|
||||
|
||||
type HttpMethod = HttpClientRequestOptions['method'];
|
||||
|
||||
@@ -26,12 +25,12 @@ export type RegistryResponse = { method: HttpMethod } & HttpClientResponse;
|
||||
})
|
||||
export class NPMRegistry {
|
||||
@Inject()
|
||||
private readonly logger: EggLogger;
|
||||
private readonly logger: Logger;
|
||||
@Inject()
|
||||
private readonly httpclient: EggContextHttpClient;
|
||||
private readonly httpClient: HttpClient;
|
||||
@Inject()
|
||||
private config: EggAppConfig;
|
||||
private timeout = 10000;
|
||||
private timeout = 10_000;
|
||||
public registryHost: string;
|
||||
|
||||
get registry(): string {
|
||||
@@ -42,22 +41,27 @@ export class NPMRegistry {
|
||||
this.registryHost = registryHost;
|
||||
}
|
||||
|
||||
public async getFullManifests(fullname: string, optionalConfig?: { retries?: number, remoteAuthToken?: string }): Promise<{ method: HttpMethod } & HttpClientResponse<PackageManifestType>> {
|
||||
public async getFullManifests(
|
||||
fullname: string,
|
||||
optionalConfig?: { retries?: number; remoteAuthToken?: string }
|
||||
): Promise<{ method: HttpMethod } & HttpClientResponse<PackageManifestType>> {
|
||||
let retries = optionalConfig?.retries || 3;
|
||||
// set query t=timestamp, make sure CDN cache disable
|
||||
// cache=0 is sync worker request flag
|
||||
const url = `${this.registry}/${encodeURIComponent(fullname)}?t=${Date.now()}&cache=0`;
|
||||
let lastError: any;
|
||||
let lastError: Error | undefined;
|
||||
while (retries > 0) {
|
||||
try {
|
||||
// large package: https://r.cnpmjs.org/%40procore%2Fcore-icons
|
||||
// https://r.cnpmjs.org/intraactive-sdk-ui 44s
|
||||
const authorization = this.genAuthorizationHeader(optionalConfig?.remoteAuthToken);
|
||||
const authorization = this.genAuthorizationHeader(
|
||||
optionalConfig?.remoteAuthToken
|
||||
);
|
||||
return await this.request('GET', url, undefined, {
|
||||
timeout: 120000,
|
||||
timeout: 120_000,
|
||||
headers: { authorization },
|
||||
});
|
||||
} catch (err: any) {
|
||||
} catch (err) {
|
||||
if (isTimeoutError(err)) {
|
||||
throw err;
|
||||
}
|
||||
@@ -66,16 +70,23 @@ export class NPMRegistry {
|
||||
retries--;
|
||||
if (retries > 0) {
|
||||
// sleep 1s ~ 4s in random
|
||||
const delay = process.env.NODE_ENV === 'test' ? 1 : 1000 + Math.random() * 4000;
|
||||
const delay =
|
||||
process.env.NODE_ENV === 'test' ? 1 : 1000 + Math.random() * 4000;
|
||||
await setTimeout(delay);
|
||||
}
|
||||
}
|
||||
// oxlint-disable-next-line no-throw-literal
|
||||
throw lastError;
|
||||
}
|
||||
|
||||
// app.put('/:name/sync', sync.sync);
|
||||
public async createSyncTask(fullname: string, optionalConfig?: { remoteAuthToken?:string}): Promise<RegistryResponse> {
|
||||
const authorization = this.genAuthorizationHeader(optionalConfig?.remoteAuthToken);
|
||||
public async createSyncTask(
|
||||
fullname: string,
|
||||
optionalConfig?: { remoteAuthToken?: string }
|
||||
): Promise<RegistryResponse> {
|
||||
const authorization = this.genAuthorizationHeader(
|
||||
optionalConfig?.remoteAuthToken
|
||||
);
|
||||
const url = `${this.registry}/${encodeURIComponent(fullname)}/sync?sync_upstream=true&nodeps=true`;
|
||||
// {
|
||||
// ok: true,
|
||||
@@ -85,21 +96,41 @@ export class NPMRegistry {
|
||||
}
|
||||
|
||||
// app.get('/:name/sync/log/:id', sync.getSyncLog);
|
||||
public async getSyncTask(fullname: string, id: string, offset: number, optionalConfig?:{ remoteAuthToken?:string }): Promise<RegistryResponse> {
|
||||
const authorization = this.genAuthorizationHeader(optionalConfig?.remoteAuthToken);
|
||||
public async getSyncTask(
|
||||
fullname: string,
|
||||
id: string,
|
||||
offset: number,
|
||||
optionalConfig?: { remoteAuthToken?: string }
|
||||
): Promise<RegistryResponse> {
|
||||
const authorization = this.genAuthorizationHeader(
|
||||
optionalConfig?.remoteAuthToken
|
||||
);
|
||||
const url = `${this.registry}/${encodeURIComponent(fullname)}/sync/log/${id}?offset=${offset}`;
|
||||
// { ok: true, syncDone: syncDone, log: log }
|
||||
return await this.request('GET', url, undefined, { authorization });
|
||||
}
|
||||
|
||||
public async getDownloadRanges(registry: string, fullname: string, start: string, end: string, optionalConfig?:{ remoteAuthToken?:string }): Promise<RegistryResponse> {
|
||||
const authorization = this.genAuthorizationHeader(optionalConfig?.remoteAuthToken);
|
||||
public async getDownloadRanges(
|
||||
registry: string,
|
||||
fullname: string,
|
||||
start: string,
|
||||
end: string,
|
||||
optionalConfig?: { remoteAuthToken?: string }
|
||||
): Promise<RegistryResponse> {
|
||||
const authorization = this.genAuthorizationHeader(
|
||||
optionalConfig?.remoteAuthToken
|
||||
);
|
||||
const url = `${registry}/downloads/range/${start}:${end}/${encodeURIComponent(fullname)}`;
|
||||
return await this.request('GET', url, undefined, { authorization });
|
||||
}
|
||||
|
||||
private async request(method: HttpMethod, url: string, params?: object, options?: object): Promise<RegistryResponse> {
|
||||
const res = await this.httpclient.request(url, {
|
||||
private async request(
|
||||
method: HttpMethod,
|
||||
url: string,
|
||||
params?: object,
|
||||
options?: object
|
||||
): Promise<RegistryResponse> {
|
||||
const res = (await this.httpClient.request(url, {
|
||||
method,
|
||||
data: params,
|
||||
dataType: 'json',
|
||||
@@ -109,15 +140,20 @@ export class NPMRegistry {
|
||||
followRedirect: true,
|
||||
gzip: true,
|
||||
...options,
|
||||
}) as HttpClientResponse;
|
||||
this.logger.info('[NPMRegistry:request] %s %s, status: %s', method, url, res.status);
|
||||
})) as HttpClientResponse;
|
||||
this.logger.info(
|
||||
'[NPMRegistry:request] %s %s, status: %s',
|
||||
method,
|
||||
url,
|
||||
res.status
|
||||
);
|
||||
return {
|
||||
method,
|
||||
...res,
|
||||
};
|
||||
}
|
||||
|
||||
public genAuthorizationHeader(remoteAuthToken?:string) {
|
||||
public genAuthorizationHeader(remoteAuthToken?: string) {
|
||||
return remoteAuthToken ? `Bearer ${remoteAuthToken}` : '';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,35 +1,48 @@
|
||||
import { ImplDecorator, Inject, QualifierImplDecoratorUtil } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import { EggHttpClient, EggLogger } from 'egg';
|
||||
import { BinaryName, BinaryTaskConfig } from '../../../../config/binaries';
|
||||
import {
|
||||
Inject,
|
||||
QualifierImplDecoratorUtil,
|
||||
type ImplDecorator,
|
||||
HttpClient,
|
||||
Logger,
|
||||
} from 'egg';
|
||||
|
||||
export type BinaryItem = {
|
||||
import type { BinaryType } from '../../enum/Binary.ts';
|
||||
import type {
|
||||
BinaryName,
|
||||
BinaryTaskConfig,
|
||||
} from '../../../../config/binaries.ts';
|
||||
|
||||
const platforms = ['darwin', 'linux', 'win32'] as const;
|
||||
export interface BinaryItem {
|
||||
name: string;
|
||||
isDir: boolean;
|
||||
url: string;
|
||||
size: string | number;
|
||||
date: string;
|
||||
ignoreDownloadStatuses?: number[];
|
||||
};
|
||||
}
|
||||
|
||||
export type FetchResult = {
|
||||
export interface FetchResult {
|
||||
items: BinaryItem[];
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
nextParams?: any;
|
||||
};
|
||||
|
||||
const platforms = [ 'darwin', 'linux', 'win32' ] as const;
|
||||
}
|
||||
|
||||
export const BINARY_ADAPTER_ATTRIBUTE = Symbol('BINARY_ADAPTER_ATTRIBUTE');
|
||||
|
||||
export abstract class AbstractBinary {
|
||||
@Inject()
|
||||
protected logger: EggLogger;
|
||||
protected logger: Logger;
|
||||
|
||||
@Inject()
|
||||
protected httpclient: EggHttpClient;
|
||||
protected httpclient: HttpClient;
|
||||
|
||||
abstract initFetch(binaryName: BinaryName): Promise<void>;
|
||||
abstract fetch(dir: string, binaryName: BinaryName): Promise<FetchResult | undefined>;
|
||||
abstract fetch(
|
||||
dir: string,
|
||||
binaryName: BinaryName,
|
||||
lastData?: Record<string, unknown>
|
||||
): Promise<FetchResult | undefined>;
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
async finishFetch(_success: boolean, _binaryName: BinaryName): Promise<void> {
|
||||
@@ -38,40 +51,57 @@ export abstract class AbstractBinary {
|
||||
|
||||
protected async requestXml(url: string) {
|
||||
const { status, data, headers } = await this.httpclient.request(url, {
|
||||
timeout: 30000,
|
||||
timeout: 30_000,
|
||||
followRedirect: true,
|
||||
gzip: true,
|
||||
});
|
||||
const xml = data.toString() as string;
|
||||
if (status !== 200) {
|
||||
this.logger.warn('[AbstractBinary.requestXml:non-200-status] url: %s, status: %s, headers: %j, xml: %j', url, status, headers, xml);
|
||||
this.logger.warn(
|
||||
'[AbstractBinary.requestXml:non-200-status] url: %s, status: %s, headers: %j, xml: %j',
|
||||
url,
|
||||
status,
|
||||
headers,
|
||||
xml
|
||||
);
|
||||
return '';
|
||||
}
|
||||
return xml;
|
||||
}
|
||||
|
||||
protected async requestJSON(url: string, requestHeaders?: Record<string, string>) {
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
protected async requestJSON<T = any>(
|
||||
url: string,
|
||||
requestHeaders?: Record<string, string>
|
||||
): Promise<T> {
|
||||
const { status, data, headers } = await this.httpclient.request(url, {
|
||||
timeout: 30000,
|
||||
timeout: 30_000,
|
||||
dataType: 'json',
|
||||
followRedirect: true,
|
||||
gzip: true,
|
||||
headers: requestHeaders,
|
||||
});
|
||||
if (status !== 200) {
|
||||
this.logger.warn('[AbstractBinary.requestJSON:non-200-status] url: %s, status: %s, headers: %j', url, status, headers);
|
||||
return data;
|
||||
this.logger.warn(
|
||||
'[AbstractBinary.requestJSON:non-200-status] url: %s, status: %s, headers: %j',
|
||||
url,
|
||||
status,
|
||||
headers
|
||||
);
|
||||
return data as T;
|
||||
}
|
||||
return data;
|
||||
return data as T;
|
||||
}
|
||||
|
||||
// https://nodejs.org/api/n-api.html#n_api_node_api_version_matrix
|
||||
protected async listNodeABIVersions() {
|
||||
const nodeABIVersions: number[] = [];
|
||||
const versions = await this.requestJSON('https://nodejs.org/dist/index.json');
|
||||
const versions = await this.requestJSON(
|
||||
'https://nodejs.org/dist/index.json'
|
||||
);
|
||||
for (const version of versions) {
|
||||
if (!version.modules) continue;
|
||||
const modulesVersion = parseInt(version.modules);
|
||||
const modulesVersion = Number.parseInt(version.modules);
|
||||
// node v6.0.0 modules 48 min
|
||||
if (modulesVersion >= 48 && !nodeABIVersions.includes(modulesVersion)) {
|
||||
nodeABIVersions.push(modulesVersion);
|
||||
@@ -89,21 +119,24 @@ export abstract class AbstractBinary {
|
||||
if (binaryConfig?.options?.nodeArchs) return binaryConfig.options.nodeArchs;
|
||||
// https://nodejs.org/api/os.html#osarch
|
||||
return {
|
||||
linux: [ 'arm', 'arm64', 's390x', 'ia32', 'x64' ],
|
||||
darwin: [ 'arm64', 'ia32', 'x64' ],
|
||||
win32: [ 'ia32', 'x64' ],
|
||||
linux: ['arm', 'arm64', 's390x', 'ia32', 'x64'],
|
||||
darwin: ['arm64', 'ia32', 'x64'],
|
||||
win32: ['ia32', 'x64'],
|
||||
};
|
||||
}
|
||||
|
||||
protected listNodeLibcs(): Record<typeof platforms[number], string[]> {
|
||||
protected listNodeLibcs(): Record<(typeof platforms)[number], string[]> {
|
||||
// https://github.com/lovell/detect-libc/blob/master/lib/detect-libc.js#L42
|
||||
return {
|
||||
darwin: [ 'unknown' ],
|
||||
linux: [ 'glibc', 'musl' ],
|
||||
win32: [ 'unknown' ],
|
||||
darwin: ['unknown'],
|
||||
linux: ['glibc', 'musl'],
|
||||
win32: ['unknown'],
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export const BinaryAdapter: ImplDecorator<AbstractBinary, typeof BinaryType> =
|
||||
QualifierImplDecoratorUtil.generatorDecorator(AbstractBinary, BINARY_ADAPTER_ATTRIBUTE);
|
||||
QualifierImplDecoratorUtil.generatorDecorator(
|
||||
AbstractBinary,
|
||||
BINARY_ADAPTER_ATTRIBUTE
|
||||
);
|
||||
|
||||
@@ -1,7 +1,12 @@
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
import { Inject, SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import { EggAppConfig } from 'egg';
|
||||
import { Inject, SingletonProto, EggAppConfig } from 'egg';
|
||||
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import {
|
||||
AbstractBinary,
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Api)
|
||||
@@ -14,12 +19,25 @@ export class ApiBinary extends AbstractBinary {
|
||||
return;
|
||||
}
|
||||
|
||||
async fetch(dir: string, binaryName: string): Promise<FetchResult | undefined> {
|
||||
const apiUrl = this.config.cnpmcore.syncBinaryFromAPISource || `${this.config.cnpmcore.sourceRegistry}/-/binary`;
|
||||
const url = `${apiUrl}/${binaryName}${dir}`;
|
||||
async fetch(
|
||||
dir: string,
|
||||
binaryName: string,
|
||||
lastData?: Record<string, unknown>
|
||||
): Promise<FetchResult | undefined> {
|
||||
const apiUrl =
|
||||
this.config.cnpmcore.syncBinaryFromAPISource ||
|
||||
`${this.config.cnpmcore.sourceRegistry}/-/binary`;
|
||||
let url = `${apiUrl}/${binaryName}${dir}`;
|
||||
if (lastData && lastData.lastSyncTime) {
|
||||
url += `?since=${lastData.lastSyncTime}&limit=100`;
|
||||
}
|
||||
|
||||
const data = await this.requestJSON(url);
|
||||
if (!Array.isArray(data)) {
|
||||
this.logger.warn('[ApiBinary.fetch:response-data-not-array] data: %j', data);
|
||||
this.logger.warn(
|
||||
'[ApiBinary.fetch:response-data-not-array] data: %j',
|
||||
data
|
||||
);
|
||||
return;
|
||||
}
|
||||
const items: BinaryItem[] = [];
|
||||
@@ -28,6 +46,7 @@ export class ApiBinary extends AbstractBinary {
|
||||
name: item.name,
|
||||
isDir: item.type === 'dir',
|
||||
url: item.url,
|
||||
// oxlint-disable-next-line unicorn/explicit-length-check
|
||||
size: item.size || '-',
|
||||
date: item.date,
|
||||
});
|
||||
|
||||
@@ -1,8 +1,18 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import binaries, { BinaryName, BinaryTaskConfig } from '../../../../config/binaries';
|
||||
import path from 'path';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
import path from 'node:path';
|
||||
|
||||
import { SingletonProto } from 'egg';
|
||||
|
||||
import binaries, {
|
||||
type BinaryName,
|
||||
type BinaryTaskConfig,
|
||||
} from '../../../../config/binaries.ts';
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import {
|
||||
AbstractBinary,
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Bucket)
|
||||
@@ -12,22 +22,30 @@ export class BucketBinary extends AbstractBinary {
|
||||
return;
|
||||
}
|
||||
|
||||
async fetch(dir: string, binaryName: BinaryName): Promise<FetchResult | undefined> {
|
||||
async fetch(
|
||||
dir: string,
|
||||
binaryName: BinaryName
|
||||
): Promise<FetchResult | undefined> {
|
||||
// /foo/ => foo/
|
||||
const binaryConfig = binaries[binaryName];
|
||||
const subDir = dir.substring(1);
|
||||
const subDir = dir.slice(1);
|
||||
const url = `${binaryConfig.distUrl}?delimiter=/&prefix=${encodeURIComponent(subDir)}`;
|
||||
const xml = await this.requestXml(url);
|
||||
return { items: this.parseItems(xml, dir, binaryConfig), nextParams: null };
|
||||
}
|
||||
|
||||
protected parseItems(xml: string, dir: string, binaryConfig: BinaryTaskConfig): BinaryItem[] {
|
||||
protected parseItems(
|
||||
xml: string,
|
||||
dir: string,
|
||||
binaryConfig: BinaryTaskConfig
|
||||
): BinaryItem[] {
|
||||
const items: BinaryItem[] = [];
|
||||
// https://nwjs2.s3.amazonaws.com/?prefix=v0.59.0%2Fx64%2F
|
||||
// https://chromedriver.storage.googleapis.com/?delimiter=/&prefix=
|
||||
// <Contents><Key>2.0/chromedriver_linux32.zip</Key><Generation>1380149859530000</Generation><MetaGeneration>2</MetaGeneration><LastModified>2013-09-25T22:57:39.349Z</LastModified><ETag>"c0d96102715c4916b872f91f5bf9b12c"</ETag><Size>7262134</Size><Owner/></Contents><Contents>
|
||||
// <Contents><Key>v0.59.0/nwjs-v0.59.0-linux-ia32.tar.gz</Key><LastModified>2015-11-02T02:34:18.000Z</LastModified><ETag>"b1b7a52928e9f874bad0cabf7f74ba8e"</ETag><Size>22842</Size><StorageClass>STANDARD</StorageClass></Contents>
|
||||
const fileRe = /<Contents><Key>([^<]+?)<\/Key>(?:<Generation>\d+?<\/Generation>)?(?:<MetaGeneration>\d+?<\/MetaGeneration>)?<LastModified>([^<]+?)<\/LastModified><ETag>[^<]+?<\/ETag><Size>(\d+?)<\/Size>/g;
|
||||
const fileRe =
|
||||
/<Contents><Key>([^<]+?)<\/Key>(?:<Generation>\d+?<\/Generation>)?(?:<MetaGeneration>\d+?<\/MetaGeneration>)?<LastModified>([^<]+?)<\/LastModified><ETag>[^<]+?<\/ETag><Size>(\d+?)<\/Size>/g;
|
||||
let matchs = xml.matchAll(fileRe);
|
||||
for (const m of matchs) {
|
||||
const fullname = m[1].trim();
|
||||
@@ -42,7 +60,7 @@ export class BucketBinary extends AbstractBinary {
|
||||
|
||||
const name = path.basename(fullname);
|
||||
const date = m[2].trim();
|
||||
const size = parseInt(m[3].trim());
|
||||
const size = Number.parseInt(m[3].trim());
|
||||
items.push({
|
||||
name,
|
||||
isDir: false,
|
||||
@@ -52,7 +70,8 @@ export class BucketBinary extends AbstractBinary {
|
||||
});
|
||||
}
|
||||
// <CommonPrefixes><Prefix>v0.59.0/x64/</Prefix></CommonPrefixes>
|
||||
const dirRe = /<CommonPrefixes><Prefix>([^<]+?)<\/Prefix><\/CommonPrefixes>/g;
|
||||
const dirRe =
|
||||
/<CommonPrefixes><Prefix>([^<]+?)<\/Prefix><\/CommonPrefixes>/g;
|
||||
matchs = xml.matchAll(dirRe);
|
||||
for (const m of matchs) {
|
||||
// <Prefix>AWSLogs/</Prefix>
|
||||
@@ -65,7 +84,7 @@ export class BucketBinary extends AbstractBinary {
|
||||
let date = '-';
|
||||
// root dir children, should set date to '2022-04-19T01:00:00Z', sync per hour
|
||||
if (dir === '/') {
|
||||
date = new Date().toISOString().split(':', 1)[0] + ':00:00Z';
|
||||
date = `${new Date().toISOString().split(':', 1)[0]}:00:00Z`;
|
||||
}
|
||||
items.push({
|
||||
name,
|
||||
|
||||
@@ -1,7 +1,14 @@
|
||||
import { basename } from 'path';
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
import { basename } from 'node:path';
|
||||
|
||||
import { SingletonProto } from 'egg';
|
||||
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import {
|
||||
AbstractBinary,
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.ChromeForTesting)
|
||||
@@ -18,7 +25,11 @@ export class ChromeForTestingBinary extends AbstractBinary {
|
||||
}
|
||||
|
||||
async finishFetch(success: boolean) {
|
||||
if (success && this.#timestamp && ChromeForTestingBinary.lastTimestamp !== this.#timestamp) {
|
||||
if (
|
||||
success &&
|
||||
this.#timestamp &&
|
||||
ChromeForTestingBinary.lastTimestamp !== this.#timestamp
|
||||
) {
|
||||
ChromeForTestingBinary.lastTimestamp = this.#timestamp;
|
||||
}
|
||||
}
|
||||
@@ -26,22 +37,35 @@ export class ChromeForTestingBinary extends AbstractBinary {
|
||||
async #syncDirItems() {
|
||||
this.dirItems = {};
|
||||
this.dirItems['/'] = [];
|
||||
const jsonApiEndpoint = 'https://googlechromelabs.github.io/chrome-for-testing/known-good-versions-with-downloads.json';
|
||||
const { data, status, headers } = await this.httpclient.request(jsonApiEndpoint, {
|
||||
dataType: 'json',
|
||||
timeout: 30000,
|
||||
followRedirect: true,
|
||||
gzip: true,
|
||||
});
|
||||
const jsonApiEndpoint =
|
||||
'https://googlechromelabs.github.io/chrome-for-testing/known-good-versions-with-downloads.json';
|
||||
const { data, status, headers } = await this.httpclient.request(
|
||||
jsonApiEndpoint,
|
||||
{
|
||||
dataType: 'json',
|
||||
timeout: 30_000,
|
||||
followRedirect: true,
|
||||
gzip: true,
|
||||
}
|
||||
);
|
||||
if (status !== 200) {
|
||||
this.logger.warn('[ChromeForTestingBinary.request:non-200-status] url: %s, status: %s, headers: %j, data: %j',
|
||||
jsonApiEndpoint, status, headers, data);
|
||||
this.logger.warn(
|
||||
'[ChromeForTestingBinary.request:non-200-status] url: %s, status: %s, headers: %j, data: %j',
|
||||
jsonApiEndpoint,
|
||||
status,
|
||||
headers,
|
||||
data
|
||||
);
|
||||
return;
|
||||
}
|
||||
this.#timestamp = data.timestamp;
|
||||
const hasNewData = this.#timestamp !== ChromeForTestingBinary.lastTimestamp;
|
||||
this.logger.info('[ChromeForTestingBinary] remote data timestamp: %j, last timestamp: %j, hasNewData: %s',
|
||||
this.#timestamp, ChromeForTestingBinary.lastTimestamp, hasNewData);
|
||||
this.logger.info(
|
||||
'[ChromeForTestingBinary] remote data timestamp: %j, last timestamp: %j, hasNewData: %s',
|
||||
this.#timestamp,
|
||||
ChromeForTestingBinary.lastTimestamp,
|
||||
hasNewData
|
||||
);
|
||||
if (!hasNewData) {
|
||||
return;
|
||||
}
|
||||
@@ -151,6 +175,6 @@ export class ChromeForTestingBinary extends AbstractBinary {
|
||||
if (!this.dirItems) {
|
||||
await this.#syncDirItems();
|
||||
}
|
||||
return { items: this.dirItems![dir], nextParams: null };
|
||||
return { items: this.dirItems?.[dir] ?? [], nextParams: null };
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
import { SingletonProto } from 'egg';
|
||||
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import {
|
||||
AbstractBinary,
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Cypress)
|
||||
@@ -20,7 +26,7 @@ export class CypressBinary extends AbstractBinary {
|
||||
this.dirItems = {};
|
||||
this.dirItems['/'] = [];
|
||||
for (const version in data.versions) {
|
||||
const major = parseInt(version.split('.', 1)[0]);
|
||||
const major = Number.parseInt(version.split('.', 1)[0]);
|
||||
// need >= 4.0.0
|
||||
// https://npmmirror.com/mirrors/cypress/4.0.0/
|
||||
if (major < 4) continue;
|
||||
@@ -53,8 +59,10 @@ export class CypressBinary extends AbstractBinary {
|
||||
// { platform: 'win32', arch: 'x64' },
|
||||
// ]
|
||||
const platforms = [
|
||||
'darwin-x64', 'darwin-arm64',
|
||||
'linux-x64', 'linux-arm64',
|
||||
'darwin-x64',
|
||||
'darwin-arm64',
|
||||
'linux-x64',
|
||||
'linux-arm64',
|
||||
'win32-x64',
|
||||
];
|
||||
for (const platform of platforms) {
|
||||
|
||||
@@ -1,9 +1,14 @@
|
||||
import path from 'node:path';
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
|
||||
import { SingletonProto } from 'egg';
|
||||
|
||||
import {
|
||||
AbstractBinary, FetchResult, BinaryItem, BinaryAdapter,
|
||||
} from './AbstractBinary';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
AbstractBinary,
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Edgedriver)
|
||||
@@ -20,15 +25,23 @@ export class EdgedriverBinary extends AbstractBinary {
|
||||
this.dirItems = {};
|
||||
this.dirItems['/'] = [];
|
||||
const jsonApiEndpoint = 'https://edgeupdates.microsoft.com/api/products';
|
||||
const { data, status, headers } = await this.httpclient.request(jsonApiEndpoint, {
|
||||
dataType: 'json',
|
||||
timeout: 30000,
|
||||
followRedirect: true,
|
||||
gzip: true,
|
||||
});
|
||||
const { data, status, headers } = await this.httpclient.request(
|
||||
jsonApiEndpoint,
|
||||
{
|
||||
dataType: 'json',
|
||||
timeout: 30_000,
|
||||
followRedirect: true,
|
||||
gzip: true,
|
||||
}
|
||||
);
|
||||
if (status !== 200) {
|
||||
this.logger.warn('[EdgedriverBinary.request:non-200-status] url: %s, status: %s, headers: %j, data: %j',
|
||||
jsonApiEndpoint, status, headers, data);
|
||||
this.logger.warn(
|
||||
'[EdgedriverBinary.request:non-200-status] url: %s, status: %s, headers: %j, data: %j',
|
||||
jsonApiEndpoint,
|
||||
status,
|
||||
headers,
|
||||
data
|
||||
);
|
||||
return;
|
||||
}
|
||||
this.logger.info('[EdgedriverBinary] remote data length: %s', data.length);
|
||||
@@ -160,12 +173,12 @@ export class EdgedriverBinary extends AbstractBinary {
|
||||
}
|
||||
// fetch root dir
|
||||
if (dir === '/') {
|
||||
return { items: this.dirItems![dir], nextParams: null };
|
||||
return { items: this.dirItems?.[dir] ?? [], nextParams: null };
|
||||
}
|
||||
|
||||
// fetch sub dir
|
||||
// /foo/ => foo/
|
||||
const subDir = dir.substring(1);
|
||||
const subDir = dir.slice(1);
|
||||
// https://msedgewebdriverstorage.blob.core.windows.net/edgewebdriver?prefix=124.0.2478.97/&delimiter=/&maxresults=100&restype=container&comp=list
|
||||
const url = `https://msedgewebdriverstorage.blob.core.windows.net/edgewebdriver?prefix=${encodeURIComponent(subDir)}&delimiter=/&maxresults=100&restype=container&comp=list`;
|
||||
const xml = await this.requestXml(url);
|
||||
@@ -175,7 +188,8 @@ export class EdgedriverBinary extends AbstractBinary {
|
||||
#parseItems(xml: string): BinaryItem[] {
|
||||
const items: BinaryItem[] = [];
|
||||
// <Blob><Name>124.0.2478.97/edgedriver_arm64.zip</Name><Url>https://msedgewebdriverstorage.blob.core.windows.net/edgewebdriver/124.0.2478.97/edgedriver_arm64.zip</Url><Properties><Last-Modified>Fri, 10 May 2024 18:35:44 GMT</Last-Modified><Etag>0x8DC712000713C13</Etag><Content-Length>9191362</Content-Length><Content-Type>application/octet-stream</Content-Type><Content-Encoding /><Content-Language /><Content-MD5>1tjPTf5JU6KKB06Qf1JOGw==</Content-MD5><Cache-Control /><BlobType>BlockBlob</BlobType><LeaseStatus>unlocked</LeaseStatus></Properties></Blob>
|
||||
const fileRe = /<Blob><Name>([^<]+?)<\/Name><Url>([^<]+?)<\/Url><Properties><Last\-Modified>([^<]+?)<\/Last\-Modified><Etag>(?:[^<]+?)<\/Etag><Content\-Length>(\d+)<\/Content\-Length>/g;
|
||||
const fileRe =
|
||||
/<Blob><Name>([^<]+?)<\/Name><Url>([^<]+?)<\/Url><Properties><Last-Modified>([^<]+?)<\/Last-Modified><Etag>(?:[^<]+?)<\/Etag><Content-Length>(\d+)<\/Content-Length>/g;
|
||||
const matchItems = xml.matchAll(fileRe);
|
||||
for (const m of matchItems) {
|
||||
const fullname = m[1].trim();
|
||||
@@ -199,7 +213,7 @@ export class EdgedriverBinary extends AbstractBinary {
|
||||
const name = path.basename(fullname);
|
||||
const url = m[2].trim();
|
||||
const date = m[3].trim();
|
||||
const size = parseInt(m[4].trim());
|
||||
const size = Number.parseInt(m[4].trim());
|
||||
items.push({
|
||||
name,
|
||||
isDir: false,
|
||||
|
||||
@@ -1,13 +1,21 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import binaries, { BinaryName } from '../../../../config/binaries';
|
||||
import { BinaryAdapter, BinaryItem, FetchResult } from './AbstractBinary';
|
||||
import { GithubBinary } from './GithubBinary';
|
||||
import { SingletonProto } from 'egg';
|
||||
|
||||
import binaries, { type BinaryName } from '../../../../config/binaries.ts';
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import { GithubBinary } from './GithubBinary.ts';
|
||||
import {
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Electron)
|
||||
export class ElectronBinary extends GithubBinary {
|
||||
async fetch(dir: string, binaryName: BinaryName = 'electron'): Promise<FetchResult | undefined> {
|
||||
async fetch(
|
||||
dir: string,
|
||||
binaryName: BinaryName = 'electron'
|
||||
): Promise<FetchResult | undefined> {
|
||||
const releases = await this.initReleases(binaryName, binaries.electron);
|
||||
if (!releases) return;
|
||||
|
||||
@@ -24,7 +32,7 @@ export class ElectronBinary extends GithubBinary {
|
||||
// v14.2.6 => 14.2.6
|
||||
if (/^v\d+?\./.test(item.tag_name)) {
|
||||
items.push({
|
||||
name: `${item.tag_name.substring(1)}/`,
|
||||
name: `${item.tag_name.slice(1)}/`,
|
||||
isDir: true,
|
||||
url: item.url,
|
||||
size: '-',
|
||||
@@ -34,7 +42,10 @@ export class ElectronBinary extends GithubBinary {
|
||||
}
|
||||
} else {
|
||||
for (const item of releases) {
|
||||
if (dir === `/${item.tag_name}/` || dir === `/${item.tag_name.substring(1)}/`) {
|
||||
if (
|
||||
dir === `/${item.tag_name}/` ||
|
||||
dir === `/${item.tag_name.slice(1)}/`
|
||||
) {
|
||||
items = this.formatItems(item, binaries.electron);
|
||||
break;
|
||||
}
|
||||
|
||||
145
app/common/adapter/binary/FirefoxBinary.ts
Normal file
145
app/common/adapter/binary/FirefoxBinary.ts
Normal file
@@ -0,0 +1,145 @@
|
||||
import { basename } from 'node:path';
|
||||
|
||||
import { SingletonProto } from 'egg';
|
||||
|
||||
import binaries, { type BinaryName } from '../../../../config/binaries.ts';
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import {
|
||||
AbstractBinary,
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Firefox)
|
||||
export class FirefoxBinary extends AbstractBinary {
|
||||
async initFetch() {
|
||||
// do nothing
|
||||
return;
|
||||
}
|
||||
|
||||
// Only fetch Firefox versions >= 100.0.0 to avoid too old versions
|
||||
async fetch(
|
||||
dir: string,
|
||||
binaryName: BinaryName
|
||||
): Promise<FetchResult | undefined> {
|
||||
const binaryConfig = binaries[binaryName];
|
||||
const url = `${binaryConfig.distUrl}${dir}`;
|
||||
const html = await this.requestXml(url);
|
||||
|
||||
// Mozilla archive has format like:
|
||||
// <tr>
|
||||
// <td>Dir</td>
|
||||
// <td><a href="/pub/firefox/releases/131.0.3/update/">update/</a></td>
|
||||
// <td></td>
|
||||
// <td></td>
|
||||
// </tr>
|
||||
// <tr>
|
||||
// <td>File</td>
|
||||
// <td><a href="/pub/firefox/releases/131.0.3/SHA256SUMS.asc">SHA256SUMS.asc</a></td>
|
||||
// <td>833</td>
|
||||
// <td>12-Apr-2025 08:52</td>
|
||||
// </tr>
|
||||
|
||||
// Parse Mozilla directory listing format - handles two different formats:
|
||||
// Format 1 (main index): <td><a href="/path/">name/</a></td>
|
||||
// Format 2 (version dir): <td>Type</td><td><a href="/path/">name</a></td><td>size</td><td>date</td>
|
||||
|
||||
// Try the detailed format first (with Type/Size/Date columns)
|
||||
const detailedRe = /<tr>\s*<td>(Dir|File)<\/td>\s*<td><a href="([^"]+?)"[^>]*?>[^<]+?<\/a><\/td>\s*<td>([^<]*?)<\/td>\s*<td>([^<]*?)<\/td>\s*<\/tr>/gi;
|
||||
const detailedMatches = Array.from(html.matchAll(detailedRe));
|
||||
|
||||
let matchs: RegExpMatchArray[];
|
||||
let useDetailedFormat = false;
|
||||
|
||||
if (detailedMatches.length > 0) {
|
||||
// Use detailed format
|
||||
matchs = detailedMatches;
|
||||
useDetailedFormat = true;
|
||||
} else {
|
||||
// Fallback to simple format
|
||||
const simpleRe = /<td><a href="([^"]+?)"[^>]*?>[^<]+?<\/a><\/td>/gi;
|
||||
matchs = Array.from(html.matchAll(simpleRe));
|
||||
}
|
||||
|
||||
const items: BinaryItem[] = [];
|
||||
|
||||
for (const m of matchs) {
|
||||
let href: string;
|
||||
let isDir: boolean;
|
||||
let size: string;
|
||||
let date: string;
|
||||
|
||||
if (useDetailedFormat) {
|
||||
// Detailed format: [fullMatch, type, href, size, date]
|
||||
const type = m[1]; // "Dir" or "File"
|
||||
href = m[2];
|
||||
size = m[3].trim() || '-';
|
||||
date = m[4].trim() || '-';
|
||||
isDir = type === 'Dir';
|
||||
} else {
|
||||
// Simple format: [fullMatch, href]
|
||||
href = m[1];
|
||||
isDir = href.endsWith('/');
|
||||
size = '-';
|
||||
date = '-';
|
||||
}
|
||||
|
||||
// Extract the name from the href path
|
||||
// href could be "/pub/firefox/releases/130.0/" or just "130.0/"
|
||||
let name = href;
|
||||
if (href.startsWith('/')) {
|
||||
// Extract the last part of the path
|
||||
const parts = href.split('/').filter(Boolean);
|
||||
name = parts[parts.length - 1] ?? '';
|
||||
if (href.endsWith('/')) {
|
||||
name += '/';
|
||||
}
|
||||
}
|
||||
|
||||
if (!isDir) {
|
||||
// Keep the full name for files
|
||||
name = basename(name);
|
||||
}
|
||||
|
||||
// Skip parent directory links
|
||||
if (name === '../' || href === '/pub/firefox/' || href.endsWith('/..') || href === '/pub/firefox/releases/') continue;
|
||||
|
||||
// Filter out old Firefox versions (< 100.0.0) for directories - apply to main index (root directory)
|
||||
if (isDir && name !== '../' && dir === '/') {
|
||||
const versionName = name.slice(0, -1); // Remove trailing '/'
|
||||
// Skip non-version directories that are just special names
|
||||
if (/^\d+\.\d+/.test(versionName)) {
|
||||
try {
|
||||
const major = Number.parseInt(versionName.split('.')[0]);
|
||||
if (major < 100) {
|
||||
continue; // Skip versions < 100.0.0
|
||||
}
|
||||
} catch {
|
||||
// If version parsing fails, skip this directory
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// Also skip named directories that aren't version numbers
|
||||
else if (!['latest', 'latest-beta', 'latest-esr'].includes(versionName)) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
const fileUrl = isDir ? '' : `${url}${name}`;
|
||||
if (binaryConfig.ignoreFiles?.includes(`${dir}${name}`)) continue;
|
||||
|
||||
const item = {
|
||||
name,
|
||||
isDir,
|
||||
url: fileUrl,
|
||||
size,
|
||||
date,
|
||||
ignoreDownloadStatuses: binaryConfig.options?.ignoreDownloadStatuses,
|
||||
};
|
||||
items.push(item);
|
||||
}
|
||||
return { items, nextParams: null };
|
||||
}
|
||||
}
|
||||
@@ -1,15 +1,17 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import binaries, { BinaryName, BinaryTaskConfig } from '../../../../config/binaries';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
import { SingletonProto } from 'egg';
|
||||
|
||||
import binaries, { type BinaryName, type BinaryTaskConfig } from '../../../../config/binaries.ts';
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import { AbstractBinary, BinaryAdapter, type BinaryItem, type FetchResult } from './AbstractBinary.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.GitHub)
|
||||
export class GithubBinary extends AbstractBinary {
|
||||
private releases: Record<string, any[]> = {};
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
private releases: Record<string, any[] | undefined> = {};
|
||||
|
||||
async initFetch(binaryName: BinaryName) {
|
||||
delete this.releases[binaryName];
|
||||
this.releases[binaryName] = undefined;
|
||||
}
|
||||
|
||||
protected async initReleases(binaryName: BinaryName, binaryConfig: BinaryTaskConfig) {
|
||||
@@ -17,10 +19,12 @@ export class GithubBinary extends AbstractBinary {
|
||||
// https://docs.github.com/en/rest/reference/releases get three pages
|
||||
// https://api.github.com/repos/electron/electron/releases
|
||||
// https://api.github.com/repos/electron/electron/releases?per_page=100&page=3
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
let releases: any[] = [];
|
||||
const maxPage = binaryConfig.options?.maxPage || 1;
|
||||
const perPage = binaryConfig.options?.perPage || 100;
|
||||
for (let i = 0; i < maxPage; i++) {
|
||||
const url = `https://api.github.com/repos/${binaryConfig.repo}/releases?per_page=100&page=${i + 1}`;
|
||||
const url = `https://api.github.com/repos/${binaryConfig.repo}/releases?per_page=${perPage}&page=${i + 1}`;
|
||||
const requestHeaders: Record<string, string> = {};
|
||||
if (process.env.GITHUB_TOKEN) {
|
||||
requestHeaders.Authorization = `token ${process.env.GITHUB_TOKEN}`;
|
||||
@@ -42,13 +46,17 @@ export class GithubBinary extends AbstractBinary {
|
||||
return this.releases[binaryName];
|
||||
}
|
||||
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
protected formatItems(releaseItem: any, binaryConfig: BinaryTaskConfig) {
|
||||
const items: BinaryItem[] = [];
|
||||
// 250MB
|
||||
const maxFileSize = 1024 * 1024 * 250;
|
||||
for (const asset of releaseItem.assets) {
|
||||
if (asset.size > maxFileSize) {
|
||||
this.logger.info('[GithubBinary.formatItems] asset reach max file size(> 250MB), ignore download it, asset: %j', asset);
|
||||
this.logger.info(
|
||||
'[GithubBinary.formatItems] asset reach max file size(> 250MB), ignore download it, asset: %j',
|
||||
asset
|
||||
);
|
||||
continue;
|
||||
}
|
||||
items.push({
|
||||
|
||||
@@ -1,7 +1,13 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import binaries, { BinaryName } from '../../../../config/binaries';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
import { SingletonProto } from 'egg';
|
||||
|
||||
import binaries, { type BinaryName } from '../../../../config/binaries.ts';
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import {
|
||||
AbstractBinary,
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Imagemin)
|
||||
@@ -11,7 +17,10 @@ export class ImageminBinary extends AbstractBinary {
|
||||
return;
|
||||
}
|
||||
|
||||
async fetch(dir: string, binaryName: BinaryName): Promise<FetchResult | undefined> {
|
||||
async fetch(
|
||||
dir: string,
|
||||
binaryName: BinaryName
|
||||
): Promise<FetchResult | undefined> {
|
||||
const binaryConfig = binaries[binaryName];
|
||||
const dirItems: {
|
||||
[key: string]: BinaryItem[];
|
||||
@@ -24,7 +33,7 @@ export class ImageminBinary extends AbstractBinary {
|
||||
// https://github.com/imagemin/jpegtran-bin/blob/v4.0.0/lib/index.js
|
||||
// https://github.com/imagemin/pngquant-bin/blob/v4.0.0/lib/index.js
|
||||
for (const version in data.versions) {
|
||||
const major = parseInt(version.split('.', 1)[0]);
|
||||
const major = Number.parseInt(version.split('.', 1)[0]);
|
||||
if (major < 4) continue;
|
||||
// >= 4.0.0
|
||||
const date = data.time[version];
|
||||
@@ -47,7 +56,7 @@ export class ImageminBinary extends AbstractBinary {
|
||||
});
|
||||
const versionVendorDir = `/v${version}/vendor/`;
|
||||
dirItems[versionVendorDir] = [];
|
||||
for (const platform of binaryConfig.options!.nodePlatforms!) {
|
||||
for (const platform of binaryConfig.options?.nodePlatforms ?? []) {
|
||||
dirItems[versionVendorDir].push({
|
||||
name: `${platform}/`,
|
||||
date,
|
||||
@@ -57,16 +66,16 @@ export class ImageminBinary extends AbstractBinary {
|
||||
});
|
||||
const platformDir = `/v${version}/vendor/${platform}/`;
|
||||
dirItems[platformDir] = [];
|
||||
const archs = binaryConfig.options!.nodeArchs![platform];
|
||||
const archs = binaryConfig.options?.nodeArchs?.[platform] ?? [];
|
||||
if (archs.length === 0) {
|
||||
for (const name of binaryConfig.options!.binFiles![platform]) {
|
||||
for (const name of binaryConfig.options?.binFiles?.[platform] ?? []) {
|
||||
dirItems[platformDir].push({
|
||||
name,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `${binaryConfig.distUrl}/${binaryConfig.repo}${platformDir}${name}`,
|
||||
ignoreDownloadStatuses: [ 404 ],
|
||||
ignoreDownloadStatuses: [404],
|
||||
});
|
||||
}
|
||||
} else {
|
||||
@@ -81,14 +90,15 @@ export class ImageminBinary extends AbstractBinary {
|
||||
const platformArchDir = `/v${version}/vendor/${platform}/${arch}/`;
|
||||
dirItems[platformArchDir] = [];
|
||||
|
||||
for (const name of binaryConfig.options!.binFiles![platform]) {
|
||||
for (const name of binaryConfig.options?.binFiles?.[platform] ??
|
||||
[]) {
|
||||
dirItems[platformArchDir].push({
|
||||
name,
|
||||
date,
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `${binaryConfig.distUrl}/${binaryConfig.repo}${platformArchDir}${name}`,
|
||||
ignoreDownloadStatuses: [ 404 ],
|
||||
ignoreDownloadStatuses: [404],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,16 @@
|
||||
import { basename } from 'node:path';
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import binaries, { BinaryName } from '../../../../config/binaries';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
|
||||
import { SingletonProto } from 'egg';
|
||||
import dayjs from 'dayjs';
|
||||
|
||||
import binaries, { type BinaryName } from '../../../../config/binaries.ts';
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import {
|
||||
AbstractBinary,
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Node)
|
||||
@@ -12,10 +20,14 @@ export class NodeBinary extends AbstractBinary {
|
||||
return;
|
||||
}
|
||||
|
||||
async fetch(dir: string, binaryName: BinaryName): Promise<FetchResult | undefined> {
|
||||
async fetch(
|
||||
dir: string,
|
||||
binaryName: BinaryName
|
||||
): Promise<FetchResult | undefined> {
|
||||
const binaryConfig = binaries[binaryName];
|
||||
const url = `${binaryConfig.distUrl}${dir}`;
|
||||
const html = await this.requestXml(url);
|
||||
|
||||
// <a href="v9.8.0/">v9.8.0/</a> 08-Mar-2018 01:55 -
|
||||
// <a href="v9.9.0/">v9.9.0/</a> 21-Mar-2018 15:47 -
|
||||
// <a href="index.json">index.json</a> 17-Dec-2021 23:16 219862
|
||||
@@ -30,7 +42,43 @@ export class NodeBinary extends AbstractBinary {
|
||||
// <a href="/dist/v18.15.0/SHASUMS256.txt.asc">SHASUMS256.txt.asc</a> 04-Nov-2024 17:29 3.7 KB
|
||||
// <a href="/dist/v18.15.0/SHASUMS256.txt.sig">SHASUMS256.txt.sig</a> 04-Nov-2024 17:29 310 B
|
||||
// <a href="/dist/v18.15.0/SHASUMS256.txt">SHASUMS256.txt</a> 04-Nov-2024 17:29 3.2 KB
|
||||
const re = /<a href="([^\"]+?)"[^>]*?>[^<]+?<\/a>\s+?((?:[\w\-]+? \w{2}\:\d{2})|\-)\s+?([\d\.\-\s\w]+)/ig;
|
||||
|
||||
// <a href="/dist/latest-v20.x/SHASUMS256.txt.asc">SHASUMS256.txt.asc</a> 03 Sept 2025, 18:20 4.7 KB
|
||||
// <a href="/dist/latest-v20.x/SHASUMS256.txt.sig">SHASUMS256.txt.sig</a> 03 Sept 2025, 18:20 566 B
|
||||
// <a href="/dist/latest-v20.x/SHASUMS256.txt">SHASUMS256.txt</a> 03 Sept 2025, 18:19 3.8 KB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-aix-ppc64.tar.gz">node-v20.19.5-aix-ppc64.tar.gz</a> 03 Sept 2025, 18:19 60 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-arm64.msi">node-v20.19.5-arm64.msi</a> 03 Sept 2025, 18:19 24 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-darwin-arm64.tar.gz">node-v20.19.5-darwin-arm64.tar.gz</a> 03 Sept 2025, 18:19 41 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-darwin-arm64.tar.xz">node-v20.19.5-darwin-arm64.tar.xz</a> 03 Sept 2025, 18:19 21 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-darwin-x64.tar.gz">node-v20.19.5-darwin-x64.tar.gz</a> 03 Sept 2025, 18:19 43 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-darwin-x64.tar.xz">node-v20.19.5-darwin-x64.tar.xz</a> 03 Sept 2025, 18:19 23 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-headers.tar.gz">node-v20.19.5-headers.tar.gz</a> 03 Sept 2025, 18:19 8.7 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-headers.tar.xz">node-v20.19.5-headers.tar.xz</a> 03 Sept 2025, 18:19 524 KB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-linux-arm64.tar.gz">node-v20.19.5-linux-arm64.tar.gz</a> 03 Sept 2025, 18:19 47 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-linux-arm64.tar.xz">node-v20.19.5-linux-arm64.tar.xz</a> 03 Sept 2025, 18:19 25 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-linux-armv7l.tar.gz">node-v20.19.5-linux-armv7l.tar.gz</a> 03 Sept 2025, 18:19 43 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-linux-armv7l.tar.xz">node-v20.19.5-linux-armv7l.tar.xz</a> 03 Sept 2025, 18:19 22 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-linux-ppc64le.tar.gz">node-v20.19.5-linux-ppc64le.tar.gz</a> 03 Sept 2025, 18:19 49 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-linux-ppc64le.tar.xz">node-v20.19.5-linux-ppc64le.tar.xz</a> 03 Sept 2025, 18:19 26 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-linux-s390x.tar.gz">node-v20.19.5-linux-s390x.tar.gz</a> 03 Sept 2025, 18:19 47 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-linux-s390x.tar.xz">node-v20.19.5-linux-s390x.tar.xz</a> 03 Sept 2025, 18:19 25 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-linux-x64.tar.gz">node-v20.19.5-linux-x64.tar.gz</a> 03 Sept 2025, 18:19 47 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-linux-x64.tar.xz">node-v20.19.5-linux-x64.tar.xz</a> 03 Sept 2025, 18:19 26 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-win-arm64.7z">node-v20.19.5-win-arm64.7z</a> 03 Sept 2025, 18:19 17 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-win-arm64.zip">node-v20.19.5-win-arm64.zip</a> 03 Sept 2025, 18:19 26 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-win-x64.7z">node-v20.19.5-win-x64.7z</a> 03 Sept 2025, 18:19 19 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-win-x64.zip">node-v20.19.5-win-x64.zip</a> 03 Sept 2025, 18:19 30 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-win-x86.7z">node-v20.19.5-win-x86.7z</a> 03 Sept 2025, 18:19 18 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-win-x86.zip">node-v20.19.5-win-x86.zip</a> 03 Sept 2025, 18:19 28 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-x64.msi">node-v20.19.5-x64.msi</a> 03 Sept 2025, 18:19 27 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5-x86.msi">node-v20.19.5-x86.msi</a> 03 Sept 2025, 18:19 25 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5.pkg">node-v20.19.5.pkg</a> 03 Sept 2025, 18:19 72 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5.tar.gz">node-v20.19.5.tar.gz</a> 03 Sept 2025, 18:19 89 MB
|
||||
// <a href="/dist/latest-v20.x/node-v20.19.5.tar.xz">node-v20.19.5.tar.xz</a> 03 Sept 2025, 18:19 43 MB
|
||||
|
||||
// date format: 19-Jan-2020 06:07 or 03 Sept 2025, 18:19
|
||||
const re =
|
||||
/<a href="([^"]+?)"[^>]*?>[^<]+?<\/a>\s+?((?:[\w-]+? \w{2}:\d{2})|(?:\d{2} [A-Za-z]{3,9} \d{4}, \d{2}:\d{2})|-)\s+?([\d.\-\s\w]+)/gi;
|
||||
const matchs = html.matchAll(re);
|
||||
const items: BinaryItem[] = [];
|
||||
for (const m of matchs) {
|
||||
@@ -41,7 +89,7 @@ export class NodeBinary extends AbstractBinary {
|
||||
name = basename(name);
|
||||
}
|
||||
const fileUrl = isDir ? '' : `${url}${name}`;
|
||||
const date = m[2];
|
||||
const date = m[2] === '-' ? '-' : dayjs(m[2]).format('DD-MMM-YYYY HH:mm');
|
||||
const size = m[3].trim();
|
||||
if (size === '0') continue;
|
||||
if (binaryConfig.ignoreFiles?.includes(`${dir}${name}`)) continue;
|
||||
|
||||
@@ -1,8 +1,15 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import binaries, { BinaryName } from '../../../../config/binaries';
|
||||
import { join } from 'path';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
import { join } from 'node:path';
|
||||
|
||||
import { SingletonProto } from 'egg';
|
||||
|
||||
import binaries, { type BinaryName } from '../../../../config/binaries.ts';
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import {
|
||||
AbstractBinary,
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.NodePreGyp)
|
||||
@@ -13,7 +20,10 @@ export class NodePreGypBinary extends AbstractBinary {
|
||||
}
|
||||
|
||||
// https://github.com/mapbox/node-pre-gyp
|
||||
async fetch(dir: string, binaryName: BinaryName): Promise<FetchResult | undefined> {
|
||||
async fetch(
|
||||
dir: string,
|
||||
binaryName: BinaryName
|
||||
): Promise<FetchResult | undefined> {
|
||||
const binaryConfig = binaries[binaryName];
|
||||
const npmPackageName = binaryConfig.options?.npmPackageName ?? binaryName;
|
||||
const pkgUrl = `https://registry.npmjs.com/${npmPackageName}`;
|
||||
@@ -33,20 +43,28 @@ export class NodePreGypBinary extends AbstractBinary {
|
||||
if (!pkgVersion.binary) continue;
|
||||
// https://github.com/mapbox/node-pre-gyp#package_name
|
||||
// defaults to {module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz
|
||||
let binaryFile = pkgVersion.binary.package_name
|
||||
|| '{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz';
|
||||
let binaryFile =
|
||||
pkgVersion.binary.package_name ||
|
||||
'{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz';
|
||||
if (!binaryFile) continue;
|
||||
const moduleName = pkgVersion.binary.module_name || pkgVersion.name;
|
||||
binaryFile = binaryFile.replace('{version}', version)
|
||||
binaryFile = binaryFile
|
||||
.replace('{version}', version)
|
||||
.replace('{module_name}', moduleName);
|
||||
|
||||
let currentDir = dirItems['/'];
|
||||
let versionPrefix = '';
|
||||
let remotePath = pkgVersion.binary.remote_path;
|
||||
const napiVersions = pkgVersion.binary.napi_versions ?? [];
|
||||
if (binaryConfig.options?.requiredNapiVersions && napiVersions.length === 0) continue;
|
||||
if (
|
||||
binaryConfig.options?.requiredNapiVersions &&
|
||||
napiVersions.length === 0
|
||||
)
|
||||
continue;
|
||||
if (remotePath?.includes('{version}')) {
|
||||
const dirName = remotePath.includes('v{version}') ? `v${version}` : version;
|
||||
const dirName = remotePath.includes('v{version}')
|
||||
? `v${version}`
|
||||
: version;
|
||||
versionPrefix = `/${dirName}`;
|
||||
dirItems['/'].push({
|
||||
name: `${dirName}/`,
|
||||
@@ -55,7 +73,8 @@ export class NodePreGypBinary extends AbstractBinary {
|
||||
isDir: true,
|
||||
url: '',
|
||||
});
|
||||
currentDir = dirItems[`/${dirName}/`] = [];
|
||||
currentDir = [];
|
||||
dirItems[`/${dirName}/`] = currentDir;
|
||||
}
|
||||
|
||||
// https://node-precompiled-binaries.grpc.io/?delimiter=/&prefix=grpc/v1.24.11/
|
||||
@@ -67,17 +86,20 @@ export class NodePreGypBinary extends AbstractBinary {
|
||||
// "remote_path": "{name}/v{version}",
|
||||
// "package_name": "{node_abi}-{platform}-{arch}-{libc}.tar.gz"
|
||||
// },
|
||||
if (binaryFile.includes('{node_abi}')
|
||||
&& binaryFile.includes('{platform}')
|
||||
&& binaryFile.includes('{arch}')
|
||||
&& binaryFile.includes('{libc}')) {
|
||||
if (
|
||||
binaryFile.includes('{node_abi}') &&
|
||||
binaryFile.includes('{platform}') &&
|
||||
binaryFile.includes('{arch}') &&
|
||||
binaryFile.includes('{libc}')
|
||||
) {
|
||||
for (const nodeAbi of nodeABIVersions) {
|
||||
for (const platform of nodePlatforms) {
|
||||
const archs = nodeArchs[platform];
|
||||
const libcs = nodeLibcs[platform];
|
||||
for (const arch of archs) {
|
||||
for (const libc of libcs) {
|
||||
const name = binaryFile.replace('{node_abi}', `node-v${nodeAbi}`)
|
||||
const name = binaryFile
|
||||
.replace('{node_abi}', `node-v${nodeAbi}`)
|
||||
.replace('{platform}', platform)
|
||||
.replace('{arch}', arch)
|
||||
.replace('{libc}', libc);
|
||||
@@ -87,20 +109,23 @@ export class NodePreGypBinary extends AbstractBinary {
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `${binaryConfig.distUrl}/${binaryName}${versionPrefix}/${name}`,
|
||||
ignoreDownloadStatuses: [ 404 ],
|
||||
ignoreDownloadStatuses: [404],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (binaryFile.includes('{node_abi}')
|
||||
&& binaryFile.includes('{platform}')
|
||||
&& binaryFile.includes('{arch}')) {
|
||||
} else if (
|
||||
binaryFile.includes('{node_abi}') &&
|
||||
binaryFile.includes('{platform}') &&
|
||||
binaryFile.includes('{arch}')
|
||||
) {
|
||||
for (const nodeAbi of nodeABIVersions) {
|
||||
for (const platform of nodePlatforms) {
|
||||
const archs = nodeArchs[platform];
|
||||
for (const arch of archs) {
|
||||
const name = binaryFile.replace('{node_abi}', `node-v${nodeAbi}`)
|
||||
const name = binaryFile
|
||||
.replace('{node_abi}', `node-v${nodeAbi}`)
|
||||
.replace('{platform}', platform)
|
||||
.replace('{arch}', arch);
|
||||
currentDir.push({
|
||||
@@ -109,12 +134,15 @@ export class NodePreGypBinary extends AbstractBinary {
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `${binaryConfig.distUrl}/${binaryName}${versionPrefix}/${name}`,
|
||||
ignoreDownloadStatuses: [ 404 ],
|
||||
ignoreDownloadStatuses: [404],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (binaryFile.includes('{platform}-{arch}-{node_napi_label}-{libc}') && napiVersions.length > 0) {
|
||||
} else if (
|
||||
binaryFile.includes('{platform}-{arch}-{node_napi_label}-{libc}') &&
|
||||
napiVersions.length > 0
|
||||
) {
|
||||
// https://skia-canvas.s3.us-east-1.amazonaws.com/v0.9.30/darwin-arm64-napi-v6-unknown.tar.gz
|
||||
// https://github.com/samizdatco/skia-canvas/blob/2a75801d7cce3b4e4e6ad015a173daefaa8465e6/package.json#L48
|
||||
// "binary": {
|
||||
@@ -133,7 +161,8 @@ export class NodePreGypBinary extends AbstractBinary {
|
||||
for (const arch of archs) {
|
||||
for (const libc of libcs) {
|
||||
for (const napiVersion of napiVersions) {
|
||||
const name = binaryFile.replace('{platform}', platform)
|
||||
const name = binaryFile
|
||||
.replace('{platform}', platform)
|
||||
.replace('{arch}', arch)
|
||||
.replace('{node_napi_label}', `napi-v${napiVersion}`)
|
||||
.replace('{libc}', libc);
|
||||
@@ -143,7 +172,7 @@ export class NodePreGypBinary extends AbstractBinary {
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `${binaryConfig.distUrl}${versionPrefix}/${name}`,
|
||||
ignoreDownloadStatuses: [ 404, 403 ],
|
||||
ignoreDownloadStatuses: [404, 403],
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -165,10 +194,12 @@ export class NodePreGypBinary extends AbstractBinary {
|
||||
const archs = nodeArchs[platform];
|
||||
for (const arch of archs) {
|
||||
for (const napiVersion of napiVersions) {
|
||||
const binaryFileName = binaryFile.replace('{platform}', platform)
|
||||
const binaryFileName = binaryFile
|
||||
.replace('{platform}', platform)
|
||||
.replace('{arch}', arch)
|
||||
.replace('{node_napi_label}', napiVersion);
|
||||
remotePath = remotePath.replace('{module_name}', moduleName)
|
||||
remotePath = remotePath
|
||||
.replace('{module_name}', moduleName)
|
||||
.replace('{name}', binaryName)
|
||||
.replace('{version}', version)
|
||||
.replace('{configuration}', 'Release');
|
||||
@@ -180,12 +211,15 @@ export class NodePreGypBinary extends AbstractBinary {
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: remoteUrl,
|
||||
ignoreDownloadStatuses: [ 404 ],
|
||||
ignoreDownloadStatuses: [404],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (binaryFile.includes('{platform}') && binaryFile.includes('{arch}')) {
|
||||
} else if (
|
||||
binaryFile.includes('{platform}') &&
|
||||
binaryFile.includes('{arch}')
|
||||
) {
|
||||
// https://github.com/grpc/grpc-node/blob/master/packages/grpc-tools/package.json#L29
|
||||
// "binary": {
|
||||
// "module_name": "grpc_tools",
|
||||
@@ -205,9 +239,11 @@ export class NodePreGypBinary extends AbstractBinary {
|
||||
for (const platform of nodePlatforms) {
|
||||
const archs = nodeArchs[platform];
|
||||
for (const arch of archs) {
|
||||
const binaryFileName = binaryFile.replace('{platform}', platform)
|
||||
const binaryFileName = binaryFile
|
||||
.replace('{platform}', platform)
|
||||
.replace('{arch}', arch);
|
||||
remotePath = remotePath.replace('{module_name}', moduleName)
|
||||
remotePath = remotePath
|
||||
.replace('{module_name}', moduleName)
|
||||
.replace('{name}', binaryName)
|
||||
.replace('{version}', version)
|
||||
.replace('{configuration}', 'Release');
|
||||
@@ -219,7 +255,7 @@ export class NodePreGypBinary extends AbstractBinary {
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: remoteUrl,
|
||||
ignoreDownloadStatuses: [ 404 ],
|
||||
ignoreDownloadStatuses: [404],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,13 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import binaries from '../../../../config/binaries';
|
||||
import { FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
import { BucketBinary } from './BucketBinary';
|
||||
import { SingletonProto } from 'egg';
|
||||
|
||||
import binaries from '../../../../config/binaries.ts';
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import {
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
import { BucketBinary } from './BucketBinary.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Nwjs)
|
||||
@@ -13,8 +18,10 @@ export class NwjsBinary extends BucketBinary {
|
||||
const binaryConfig = binaries.nwjs;
|
||||
const isRootDir = dir === '/';
|
||||
// /foo/ => foo/
|
||||
const subDir = dir.substring(1);
|
||||
const url = isRootDir ? binaryConfig.distUrl : `${this.s3Url}${encodeURIComponent(subDir)}`;
|
||||
const subDir = dir.slice(1);
|
||||
const url = isRootDir
|
||||
? binaryConfig.distUrl
|
||||
: `${this.s3Url}${encodeURIComponent(subDir)}`;
|
||||
const xml = await this.requestXml(url);
|
||||
if (!xml) return;
|
||||
|
||||
@@ -25,7 +32,8 @@ export class NwjsBinary extends BucketBinary {
|
||||
// <tr><td valign="top"><img src="/icons/folder.gif" alt="[DIR]"></td><td><a href="v0.15.0-rc1/">v0.15.0-rc1/</a></td><td align="right">06-May-2016 12:24 </td><td align="right"> - </td><td> </td></tr>
|
||||
// <tr><td valign="top"><img src="/icons/folder.gif" alt="[DIR]"></td><td><a href="v0.15.0-rc2/">v0.15.0-rc2/</a></td><td align="right">13-May-2016 20:13 </td><td align="right"> - </td><td> </td></tr>
|
||||
const items: BinaryItem[] = [];
|
||||
const re = /<td><a [^>]+?>([^<]+?\/)<\/a><\/td><td [^>]+?>([^>]+?)<\/td>/ig;
|
||||
const re =
|
||||
/<td><a [^>]+?>([^<]+?\/)<\/a><\/td><td [^>]+?>([^>]+?)<\/td>/gi;
|
||||
const matchs = xml.matchAll(re);
|
||||
for (const m of matchs) {
|
||||
const name = m[1].trim();
|
||||
|
||||
@@ -1,9 +1,15 @@
|
||||
import util from 'node:util';
|
||||
import path from 'node:path';
|
||||
|
||||
import { AbstractBinary, BinaryAdapter, BinaryItem, FetchResult } from './AbstractBinary';
|
||||
import util from 'util';
|
||||
import path from 'path';
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import { SingletonProto } from 'egg';
|
||||
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import {
|
||||
AbstractBinary,
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
|
||||
const PACKAGE_URL = 'https://registry.npmjs.com/playwright-core';
|
||||
const DOWNLOAD_HOST = 'https://playwright.azureedge.net/';
|
||||
@@ -11,7 +17,7 @@ const DOWNLOAD_HOST = 'https://playwright.azureedge.net/';
|
||||
// https://github.com/microsoft/playwright/blob/main/packages/playwright-core/src/server/registry/index.ts
|
||||
/* eslint-disable quote-props */
|
||||
const DOWNLOAD_PATHS = {
|
||||
'chromium': {
|
||||
chromium: {
|
||||
'<unknown>': undefined,
|
||||
'ubuntu18.04-x64': undefined,
|
||||
'ubuntu20.04-x64': 'builds/chromium/%s/chromium-linux.zip',
|
||||
@@ -28,17 +34,17 @@ const DOWNLOAD_PATHS = {
|
||||
'mac10.13': 'builds/chromium/%s/chromium-mac.zip',
|
||||
'mac10.14': 'builds/chromium/%s/chromium-mac.zip',
|
||||
'mac10.15': 'builds/chromium/%s/chromium-mac.zip',
|
||||
'mac11': 'builds/chromium/%s/chromium-mac.zip',
|
||||
mac11: 'builds/chromium/%s/chromium-mac.zip',
|
||||
'mac11-arm64': 'builds/chromium/%s/chromium-mac-arm64.zip',
|
||||
'mac12': 'builds/chromium/%s/chromium-mac.zip',
|
||||
mac12: 'builds/chromium/%s/chromium-mac.zip',
|
||||
'mac12-arm64': 'builds/chromium/%s/chromium-mac-arm64.zip',
|
||||
'mac13': 'builds/chromium/%s/chromium-mac.zip',
|
||||
mac13: 'builds/chromium/%s/chromium-mac.zip',
|
||||
'mac13-arm64': 'builds/chromium/%s/chromium-mac-arm64.zip',
|
||||
'mac14': 'builds/chromium/%s/chromium-mac.zip',
|
||||
mac14: 'builds/chromium/%s/chromium-mac.zip',
|
||||
'mac14-arm64': 'builds/chromium/%s/chromium-mac-arm64.zip',
|
||||
'mac15': 'builds/chromium/%s/chromium-mac.zip',
|
||||
mac15: 'builds/chromium/%s/chromium-mac.zip',
|
||||
'mac15-arm64': 'builds/chromium/%s/chromium-mac-arm64.zip',
|
||||
'win64': 'builds/chromium/%s/chromium-win64.zip',
|
||||
win64: 'builds/chromium/%s/chromium-win64.zip',
|
||||
},
|
||||
'chromium-headless-shell': {
|
||||
'<unknown>': undefined,
|
||||
@@ -47,87 +53,128 @@ const DOWNLOAD_PATHS = {
|
||||
'ubuntu22.04-x64': 'builds/chromium/%s/chromium-headless-shell-linux.zip',
|
||||
'ubuntu24.04-x64': 'builds/chromium/%s/chromium-headless-shell-linux.zip',
|
||||
'ubuntu18.04-arm64': undefined,
|
||||
'ubuntu20.04-arm64': 'builds/chromium/%s/chromium-headless-shell-linux-arm64.zip',
|
||||
'ubuntu22.04-arm64': 'builds/chromium/%s/chromium-headless-shell-linux-arm64.zip',
|
||||
'ubuntu24.04-arm64': 'builds/chromium/%s/chromium-headless-shell-linux-arm64.zip',
|
||||
'ubuntu20.04-arm64':
|
||||
'builds/chromium/%s/chromium-headless-shell-linux-arm64.zip',
|
||||
'ubuntu22.04-arm64':
|
||||
'builds/chromium/%s/chromium-headless-shell-linux-arm64.zip',
|
||||
'ubuntu24.04-arm64':
|
||||
'builds/chromium/%s/chromium-headless-shell-linux-arm64.zip',
|
||||
'debian11-x64': 'builds/chromium/%s/chromium-headless-shell-linux.zip',
|
||||
'debian11-arm64': 'builds/chromium/%s/chromium-headless-shell-linux-arm64.zip',
|
||||
'debian11-arm64':
|
||||
'builds/chromium/%s/chromium-headless-shell-linux-arm64.zip',
|
||||
'debian12-x64': 'builds/chromium/%s/chromium-headless-shell-linux.zip',
|
||||
'debian12-arm64': 'builds/chromium/%s/chromium-headless-shell-linux-arm64.zip',
|
||||
'debian12-arm64':
|
||||
'builds/chromium/%s/chromium-headless-shell-linux-arm64.zip',
|
||||
'mac10.13': undefined,
|
||||
'mac10.14': undefined,
|
||||
'mac10.15': undefined,
|
||||
'mac11': 'builds/chromium/%s/chromium-headless-shell-mac.zip',
|
||||
mac11: 'builds/chromium/%s/chromium-headless-shell-mac.zip',
|
||||
'mac11-arm64': 'builds/chromium/%s/chromium-headless-shell-mac-arm64.zip',
|
||||
'mac12': 'builds/chromium/%s/chromium-headless-shell-mac.zip',
|
||||
mac12: 'builds/chromium/%s/chromium-headless-shell-mac.zip',
|
||||
'mac12-arm64': 'builds/chromium/%s/chromium-headless-shell-mac-arm64.zip',
|
||||
'mac13': 'builds/chromium/%s/chromium-headless-shell-mac.zip',
|
||||
mac13: 'builds/chromium/%s/chromium-headless-shell-mac.zip',
|
||||
'mac13-arm64': 'builds/chromium/%s/chromium-headless-shell-mac-arm64.zip',
|
||||
'mac14': 'builds/chromium/%s/chromium-headless-shell-mac.zip',
|
||||
mac14: 'builds/chromium/%s/chromium-headless-shell-mac.zip',
|
||||
'mac14-arm64': 'builds/chromium/%s/chromium-headless-shell-mac-arm64.zip',
|
||||
'mac15': 'builds/chromium/%s/chromium-headless-shell-mac.zip',
|
||||
mac15: 'builds/chromium/%s/chromium-headless-shell-mac.zip',
|
||||
'mac15-arm64': 'builds/chromium/%s/chromium-headless-shell-mac-arm64.zip',
|
||||
'win64': 'builds/chromium/%s/chromium-headless-shell-win64.zip',
|
||||
win64: 'builds/chromium/%s/chromium-headless-shell-win64.zip',
|
||||
},
|
||||
'chromium-tip-of-tree': {
|
||||
'<unknown>': undefined,
|
||||
'ubuntu18.04-x64': undefined,
|
||||
'ubuntu20.04-x64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
|
||||
'ubuntu22.04-x64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
|
||||
'ubuntu24.04-x64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
|
||||
'ubuntu20.04-x64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
|
||||
'ubuntu22.04-x64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
|
||||
'ubuntu24.04-x64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
|
||||
'ubuntu18.04-arm64': undefined,
|
||||
'ubuntu20.04-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
|
||||
'ubuntu22.04-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
|
||||
'ubuntu24.04-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
|
||||
'debian11-x64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
|
||||
'debian11-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
|
||||
'debian12-x64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
|
||||
'debian12-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
|
||||
'ubuntu20.04-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
|
||||
'ubuntu22.04-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
|
||||
'ubuntu24.04-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
|
||||
'debian11-x64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
|
||||
'debian11-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
|
||||
'debian12-x64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
|
||||
'debian12-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
|
||||
'mac10.13': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
|
||||
'mac10.14': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
|
||||
'mac10.15': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
|
||||
'mac11': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
|
||||
'mac11-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac-arm64.zip',
|
||||
'mac12': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
|
||||
'mac12-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac-arm64.zip',
|
||||
'mac13': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
|
||||
'mac13-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac-arm64.zip',
|
||||
'mac14': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
|
||||
'mac14-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac-arm64.zip',
|
||||
'mac15': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
|
||||
'mac15-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac-arm64.zip',
|
||||
'win64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-win64.zip',
|
||||
mac11: 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
|
||||
'mac11-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac-arm64.zip',
|
||||
mac12: 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
|
||||
'mac12-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac-arm64.zip',
|
||||
mac13: 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
|
||||
'mac13-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac-arm64.zip',
|
||||
mac14: 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
|
||||
'mac14-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac-arm64.zip',
|
||||
mac15: 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
|
||||
'mac15-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac-arm64.zip',
|
||||
win64: 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-win64.zip',
|
||||
},
|
||||
'chromium-tip-of-tree-headless-shell': {
|
||||
'<unknown>': undefined,
|
||||
'ubuntu18.04-x64': undefined,
|
||||
'ubuntu20.04-x64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux.zip',
|
||||
'ubuntu22.04-x64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux.zip',
|
||||
'ubuntu24.04-x64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux.zip',
|
||||
'ubuntu20.04-x64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux.zip',
|
||||
'ubuntu22.04-x64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux.zip',
|
||||
'ubuntu24.04-x64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux.zip',
|
||||
'ubuntu18.04-arm64': undefined,
|
||||
'ubuntu20.04-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux-arm64.zip',
|
||||
'ubuntu22.04-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux-arm64.zip',
|
||||
'ubuntu24.04-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux-arm64.zip',
|
||||
'debian11-x64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux.zip',
|
||||
'debian11-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux-arm64.zip',
|
||||
'debian12-x64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux.zip',
|
||||
'debian12-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux-arm64.zip',
|
||||
'ubuntu20.04-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux-arm64.zip',
|
||||
'ubuntu22.04-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux-arm64.zip',
|
||||
'ubuntu24.04-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux-arm64.zip',
|
||||
'debian11-x64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux.zip',
|
||||
'debian11-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux-arm64.zip',
|
||||
'debian12-x64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux.zip',
|
||||
'debian12-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux-arm64.zip',
|
||||
'mac10.13': undefined,
|
||||
'mac10.14': undefined,
|
||||
'mac10.15': undefined,
|
||||
'mac11': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac.zip',
|
||||
'mac11-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac-arm64.zip',
|
||||
'mac12': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac.zip',
|
||||
'mac12-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac-arm64.zip',
|
||||
'mac13': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac.zip',
|
||||
'mac13-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac-arm64.zip',
|
||||
'mac14': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac.zip',
|
||||
'mac14-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac-arm64.zip',
|
||||
'mac15': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac.zip',
|
||||
'mac15-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac-arm64.zip',
|
||||
'win64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-win64.zip',
|
||||
mac11:
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac.zip',
|
||||
'mac11-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac-arm64.zip',
|
||||
mac12:
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac.zip',
|
||||
'mac12-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac-arm64.zip',
|
||||
mac13:
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac.zip',
|
||||
'mac13-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac-arm64.zip',
|
||||
mac14:
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac.zip',
|
||||
'mac14-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac-arm64.zip',
|
||||
mac15:
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac.zip',
|
||||
'mac15-arm64':
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac-arm64.zip',
|
||||
win64:
|
||||
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-win64.zip',
|
||||
},
|
||||
'firefox': {
|
||||
firefox: {
|
||||
'<unknown>': undefined,
|
||||
'ubuntu18.04-x64': undefined,
|
||||
'ubuntu20.04-x64': 'builds/firefox/%s/firefox-ubuntu-20.04.zip',
|
||||
@@ -144,17 +191,17 @@ const DOWNLOAD_PATHS = {
|
||||
'mac10.13': 'builds/firefox/%s/firefox-mac.zip',
|
||||
'mac10.14': 'builds/firefox/%s/firefox-mac.zip',
|
||||
'mac10.15': 'builds/firefox/%s/firefox-mac.zip',
|
||||
'mac11': 'builds/firefox/%s/firefox-mac.zip',
|
||||
mac11: 'builds/firefox/%s/firefox-mac.zip',
|
||||
'mac11-arm64': 'builds/firefox/%s/firefox-mac-arm64.zip',
|
||||
'mac12': 'builds/firefox/%s/firefox-mac.zip',
|
||||
mac12: 'builds/firefox/%s/firefox-mac.zip',
|
||||
'mac12-arm64': 'builds/firefox/%s/firefox-mac-arm64.zip',
|
||||
'mac13': 'builds/firefox/%s/firefox-mac.zip',
|
||||
mac13: 'builds/firefox/%s/firefox-mac.zip',
|
||||
'mac13-arm64': 'builds/firefox/%s/firefox-mac-arm64.zip',
|
||||
'mac14': 'builds/firefox/%s/firefox-mac.zip',
|
||||
mac14: 'builds/firefox/%s/firefox-mac.zip',
|
||||
'mac14-arm64': 'builds/firefox/%s/firefox-mac-arm64.zip',
|
||||
'mac15': 'builds/firefox/%s/firefox-mac.zip',
|
||||
mac15: 'builds/firefox/%s/firefox-mac.zip',
|
||||
'mac15-arm64': 'builds/firefox/%s/firefox-mac-arm64.zip',
|
||||
'win64': 'builds/firefox/%s/firefox-win64.zip',
|
||||
win64: 'builds/firefox/%s/firefox-win64.zip',
|
||||
},
|
||||
'firefox-beta': {
|
||||
'<unknown>': undefined,
|
||||
@@ -164,8 +211,10 @@ const DOWNLOAD_PATHS = {
|
||||
'ubuntu24.04-x64': 'builds/firefox-beta/%s/firefox-beta-ubuntu-24.04.zip',
|
||||
'ubuntu18.04-arm64': undefined,
|
||||
'ubuntu20.04-arm64': undefined,
|
||||
'ubuntu22.04-arm64': 'builds/firefox-beta/%s/firefox-beta-ubuntu-22.04-arm64.zip',
|
||||
'ubuntu24.04-arm64': 'builds/firefox-beta/%s/firefox-beta-ubuntu-24.04-arm64.zip',
|
||||
'ubuntu22.04-arm64':
|
||||
'builds/firefox-beta/%s/firefox-beta-ubuntu-22.04-arm64.zip',
|
||||
'ubuntu24.04-arm64':
|
||||
'builds/firefox-beta/%s/firefox-beta-ubuntu-24.04-arm64.zip',
|
||||
'debian11-x64': 'builds/firefox-beta/%s/firefox-beta-debian-11.zip',
|
||||
'debian11-arm64': 'builds/firefox-beta/%s/firefox-beta-debian-11-arm64.zip',
|
||||
'debian12-x64': 'builds/firefox-beta/%s/firefox-beta-debian-12.zip',
|
||||
@@ -173,19 +222,19 @@ const DOWNLOAD_PATHS = {
|
||||
'mac10.13': 'builds/firefox-beta/%s/firefox-beta-mac.zip',
|
||||
'mac10.14': 'builds/firefox-beta/%s/firefox-beta-mac.zip',
|
||||
'mac10.15': 'builds/firefox-beta/%s/firefox-beta-mac.zip',
|
||||
'mac11': 'builds/firefox-beta/%s/firefox-beta-mac.zip',
|
||||
mac11: 'builds/firefox-beta/%s/firefox-beta-mac.zip',
|
||||
'mac11-arm64': 'builds/firefox-beta/%s/firefox-beta-mac-arm64.zip',
|
||||
'mac12': 'builds/firefox-beta/%s/firefox-beta-mac.zip',
|
||||
mac12: 'builds/firefox-beta/%s/firefox-beta-mac.zip',
|
||||
'mac12-arm64': 'builds/firefox-beta/%s/firefox-beta-mac-arm64.zip',
|
||||
'mac13': 'builds/firefox-beta/%s/firefox-beta-mac.zip',
|
||||
mac13: 'builds/firefox-beta/%s/firefox-beta-mac.zip',
|
||||
'mac13-arm64': 'builds/firefox-beta/%s/firefox-beta-mac-arm64.zip',
|
||||
'mac14': 'builds/firefox-beta/%s/firefox-beta-mac.zip',
|
||||
mac14: 'builds/firefox-beta/%s/firefox-beta-mac.zip',
|
||||
'mac14-arm64': 'builds/firefox-beta/%s/firefox-beta-mac-arm64.zip',
|
||||
'mac15': 'builds/firefox-beta/%s/firefox-beta-mac.zip',
|
||||
mac15: 'builds/firefox-beta/%s/firefox-beta-mac.zip',
|
||||
'mac15-arm64': 'builds/firefox-beta/%s/firefox-beta-mac-arm64.zip',
|
||||
'win64': 'builds/firefox-beta/%s/firefox-beta-win64.zip',
|
||||
win64: 'builds/firefox-beta/%s/firefox-beta-win64.zip',
|
||||
},
|
||||
'webkit': {
|
||||
webkit: {
|
||||
'<unknown>': undefined,
|
||||
'ubuntu18.04-x64': undefined,
|
||||
'ubuntu20.04-x64': 'builds/webkit/%s/webkit-ubuntu-20.04.zip',
|
||||
@@ -200,21 +249,23 @@ const DOWNLOAD_PATHS = {
|
||||
'debian12-x64': 'builds/webkit/%s/webkit-debian-12.zip',
|
||||
'debian12-arm64': 'builds/webkit/%s/webkit-debian-12-arm64.zip',
|
||||
'mac10.13': undefined,
|
||||
'mac10.14': 'builds/deprecated-webkit-mac-10.14/%s/deprecated-webkit-mac-10.14.zip',
|
||||
'mac10.15': 'builds/deprecated-webkit-mac-10.15/%s/deprecated-webkit-mac-10.15.zip',
|
||||
'mac11': 'builds/webkit/%s/webkit-mac-11.zip',
|
||||
'mac10.14':
|
||||
'builds/deprecated-webkit-mac-10.14/%s/deprecated-webkit-mac-10.14.zip',
|
||||
'mac10.15':
|
||||
'builds/deprecated-webkit-mac-10.15/%s/deprecated-webkit-mac-10.15.zip',
|
||||
mac11: 'builds/webkit/%s/webkit-mac-11.zip',
|
||||
'mac11-arm64': 'builds/webkit/%s/webkit-mac-11-arm64.zip',
|
||||
'mac12': 'builds/webkit/%s/webkit-mac-12.zip',
|
||||
mac12: 'builds/webkit/%s/webkit-mac-12.zip',
|
||||
'mac12-arm64': 'builds/webkit/%s/webkit-mac-12-arm64.zip',
|
||||
'mac13': 'builds/webkit/%s/webkit-mac-13.zip',
|
||||
mac13: 'builds/webkit/%s/webkit-mac-13.zip',
|
||||
'mac13-arm64': 'builds/webkit/%s/webkit-mac-13-arm64.zip',
|
||||
'mac14': 'builds/webkit/%s/webkit-mac-14.zip',
|
||||
mac14: 'builds/webkit/%s/webkit-mac-14.zip',
|
||||
'mac14-arm64': 'builds/webkit/%s/webkit-mac-14-arm64.zip',
|
||||
'mac15': 'builds/webkit/%s/webkit-mac-15.zip',
|
||||
mac15: 'builds/webkit/%s/webkit-mac-15.zip',
|
||||
'mac15-arm64': 'builds/webkit/%s/webkit-mac-15-arm64.zip',
|
||||
'win64': 'builds/webkit/%s/webkit-win64.zip',
|
||||
win64: 'builds/webkit/%s/webkit-win64.zip',
|
||||
},
|
||||
'ffmpeg': {
|
||||
ffmpeg: {
|
||||
'<unknown>': undefined,
|
||||
'ubuntu18.04-x64': undefined,
|
||||
'ubuntu20.04-x64': 'builds/ffmpeg/%s/ffmpeg-linux.zip',
|
||||
@@ -231,19 +282,19 @@ const DOWNLOAD_PATHS = {
|
||||
'mac10.13': 'builds/ffmpeg/%s/ffmpeg-mac.zip',
|
||||
'mac10.14': 'builds/ffmpeg/%s/ffmpeg-mac.zip',
|
||||
'mac10.15': 'builds/ffmpeg/%s/ffmpeg-mac.zip',
|
||||
'mac11': 'builds/ffmpeg/%s/ffmpeg-mac.zip',
|
||||
mac11: 'builds/ffmpeg/%s/ffmpeg-mac.zip',
|
||||
'mac11-arm64': 'builds/ffmpeg/%s/ffmpeg-mac-arm64.zip',
|
||||
'mac12': 'builds/ffmpeg/%s/ffmpeg-mac.zip',
|
||||
mac12: 'builds/ffmpeg/%s/ffmpeg-mac.zip',
|
||||
'mac12-arm64': 'builds/ffmpeg/%s/ffmpeg-mac-arm64.zip',
|
||||
'mac13': 'builds/ffmpeg/%s/ffmpeg-mac.zip',
|
||||
mac13: 'builds/ffmpeg/%s/ffmpeg-mac.zip',
|
||||
'mac13-arm64': 'builds/ffmpeg/%s/ffmpeg-mac-arm64.zip',
|
||||
'mac14': 'builds/ffmpeg/%s/ffmpeg-mac.zip',
|
||||
mac14: 'builds/ffmpeg/%s/ffmpeg-mac.zip',
|
||||
'mac14-arm64': 'builds/ffmpeg/%s/ffmpeg-mac-arm64.zip',
|
||||
'mac15': 'builds/ffmpeg/%s/ffmpeg-mac.zip',
|
||||
mac15: 'builds/ffmpeg/%s/ffmpeg-mac.zip',
|
||||
'mac15-arm64': 'builds/ffmpeg/%s/ffmpeg-mac-arm64.zip',
|
||||
'win64': 'builds/ffmpeg/%s/ffmpeg-win64.zip',
|
||||
win64: 'builds/ffmpeg/%s/ffmpeg-win64.zip',
|
||||
},
|
||||
'winldd': {
|
||||
winldd: {
|
||||
'<unknown>': undefined,
|
||||
'ubuntu18.04-x64': undefined,
|
||||
'ubuntu20.04-x64': undefined,
|
||||
@@ -260,19 +311,19 @@ const DOWNLOAD_PATHS = {
|
||||
'mac10.13': undefined,
|
||||
'mac10.14': undefined,
|
||||
'mac10.15': undefined,
|
||||
'mac11': undefined,
|
||||
mac11: undefined,
|
||||
'mac11-arm64': undefined,
|
||||
'mac12': undefined,
|
||||
mac12: undefined,
|
||||
'mac12-arm64': undefined,
|
||||
'mac13': undefined,
|
||||
mac13: undefined,
|
||||
'mac13-arm64': undefined,
|
||||
'mac14': undefined,
|
||||
mac14: undefined,
|
||||
'mac14-arm64': undefined,
|
||||
'mac15': undefined,
|
||||
mac15: undefined,
|
||||
'mac15-arm64': undefined,
|
||||
'win64': 'builds/winldd/%s/winldd-win64.zip',
|
||||
win64: 'builds/winldd/%s/winldd-win64.zip',
|
||||
},
|
||||
'android': {
|
||||
android: {
|
||||
'<unknown>': 'builds/android/%s/android.zip',
|
||||
'ubuntu18.04-x64': undefined,
|
||||
'ubuntu20.04-x64': 'builds/android/%s/android.zip',
|
||||
@@ -289,17 +340,17 @@ const DOWNLOAD_PATHS = {
|
||||
'mac10.13': 'builds/android/%s/android.zip',
|
||||
'mac10.14': 'builds/android/%s/android.zip',
|
||||
'mac10.15': 'builds/android/%s/android.zip',
|
||||
'mac11': 'builds/android/%s/android.zip',
|
||||
mac11: 'builds/android/%s/android.zip',
|
||||
'mac11-arm64': 'builds/android/%s/android.zip',
|
||||
'mac12': 'builds/android/%s/android.zip',
|
||||
mac12: 'builds/android/%s/android.zip',
|
||||
'mac12-arm64': 'builds/android/%s/android.zip',
|
||||
'mac13': 'builds/android/%s/android.zip',
|
||||
mac13: 'builds/android/%s/android.zip',
|
||||
'mac13-arm64': 'builds/android/%s/android.zip',
|
||||
'mac14': 'builds/android/%s/android.zip',
|
||||
mac14: 'builds/android/%s/android.zip',
|
||||
'mac14-arm64': 'builds/android/%s/android.zip',
|
||||
'mac15': 'builds/android/%s/android.zip',
|
||||
mac15: 'builds/android/%s/android.zip',
|
||||
'mac15-arm64': 'builds/android/%s/android.zip',
|
||||
'win64': 'builds/android/%s/android.zip',
|
||||
win64: 'builds/android/%s/android.zip',
|
||||
},
|
||||
} as const;
|
||||
|
||||
@@ -317,17 +368,37 @@ export class PlaywrightBinary extends AbstractBinary {
|
||||
const nowDateISO = new Date().toISOString();
|
||||
const buildDirs: BinaryItem[] = [];
|
||||
for (const browserName of Object.keys(DOWNLOAD_PATHS)) {
|
||||
if (browserName === 'chromium-headless-shell' || browserName === 'chromium-tip-of-tree-headless-shell') {
|
||||
if (
|
||||
browserName === 'chromium-headless-shell' ||
|
||||
browserName === 'chromium-tip-of-tree-headless-shell'
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
buildDirs.push({ name: `${browserName}/`, isDir: true, url: '', size: '-', date: nowDateISO });
|
||||
buildDirs.push({
|
||||
name: `${browserName}/`,
|
||||
isDir: true,
|
||||
url: '',
|
||||
size: '-',
|
||||
date: nowDateISO,
|
||||
});
|
||||
}
|
||||
this.dirItems = {
|
||||
'/': [{ name: 'builds/', isDir: true, url: '', size: '-', date: nowDateISO }],
|
||||
'/': [
|
||||
{
|
||||
name: 'builds/',
|
||||
isDir: true,
|
||||
url: '',
|
||||
size: '-',
|
||||
date: nowDateISO,
|
||||
},
|
||||
],
|
||||
'/builds/': buildDirs,
|
||||
};
|
||||
for (const browserName of Object.keys(DOWNLOAD_PATHS)) {
|
||||
if (browserName === 'chromium-headless-shell' || browserName === 'chromium-tip-of-tree-headless-shell') {
|
||||
if (
|
||||
browserName === 'chromium-headless-shell' ||
|
||||
browserName === 'chromium-tip-of-tree-headless-shell'
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
this.dirItems[`/builds/${browserName}/`] = [];
|
||||
@@ -338,11 +409,16 @@ export class PlaywrightBinary extends AbstractBinary {
|
||||
.filter(version => version.match(/^(?:\d+\.\d+\.\d+)(?:-beta-\d+)?$/))
|
||||
// select recently update 20 items
|
||||
.slice(-20);
|
||||
const browsers: { name: keyof typeof DOWNLOAD_PATHS; revision: string; browserVersion: string; revisionOverrides?: Record<string, string> }[] = [];
|
||||
const browsers: {
|
||||
name: keyof typeof DOWNLOAD_PATHS;
|
||||
revision: string;
|
||||
browserVersion: string;
|
||||
revisionOverrides?: Record<string, string>;
|
||||
}[] = [];
|
||||
await Promise.all(
|
||||
packageVersions.map(version =>
|
||||
this.requestJSON(
|
||||
`https://unpkg.com/playwright-core@${version}/browsers.json`,
|
||||
`https://unpkg.com/playwright-core@${version}/browsers.json`
|
||||
)
|
||||
.then(data => {
|
||||
// browsers: [
|
||||
@@ -355,16 +431,22 @@ export class PlaywrightBinary extends AbstractBinary {
|
||||
// },
|
||||
// ]
|
||||
browsers.push(...data.browsers);
|
||||
return data;
|
||||
})
|
||||
.catch(err => {
|
||||
/* c8 ignore next 2 */
|
||||
this.logger.warn('[PlaywrightBinary.fetch:error] Playwright version %s browser data request failed: %s',
|
||||
version, err);
|
||||
}),
|
||||
),
|
||||
this.logger.warn(
|
||||
'[PlaywrightBinary.fetch:error] Playwright version %s browser data request failed: %s',
|
||||
version,
|
||||
err
|
||||
);
|
||||
})
|
||||
)
|
||||
);
|
||||
// if chromium-headless-shell not exists on browsers, copy chromium to chromium-headless-shell
|
||||
if (!browsers.find(browser => browser.name === 'chromium-headless-shell')) {
|
||||
if (
|
||||
!browsers.some(browser => browser.name === 'chromium-headless-shell')
|
||||
) {
|
||||
const chromium = browsers.find(browser => browser.name === 'chromium');
|
||||
// {
|
||||
// "name": "chromium",
|
||||
@@ -380,8 +462,14 @@ export class PlaywrightBinary extends AbstractBinary {
|
||||
}
|
||||
}
|
||||
// if chromium-tip-of-tree-headless-shell not exists on browsers, copy chromium-tip-of-tree to chromium-tip-of-tree-headless-shell
|
||||
if (!browsers.find(browser => browser.name === 'chromium-tip-of-tree-headless-shell')) {
|
||||
const chromiumTipOfTree = browsers.find(browser => browser.name === 'chromium-tip-of-tree');
|
||||
if (
|
||||
!browsers.some(
|
||||
browser => browser.name === 'chromium-tip-of-tree-headless-shell'
|
||||
)
|
||||
) {
|
||||
const chromiumTipOfTree = browsers.find(
|
||||
browser => browser.name === 'chromium-tip-of-tree'
|
||||
);
|
||||
if (chromiumTipOfTree) {
|
||||
browsers.push({
|
||||
...chromiumTipOfTree,
|
||||
@@ -403,9 +491,10 @@ export class PlaywrightBinary extends AbstractBinary {
|
||||
// https://playwright.azureedge.net/builds/chromium-tip-of-tree/1293/chromium-tip-of-tree-headless-shell-mac-arm64.zip
|
||||
browserDirname = 'chromium-tip-of-tree';
|
||||
}
|
||||
for (const [ platform, remotePath ] of Object.entries(downloadPaths)) {
|
||||
for (const [platform, remotePath] of Object.entries(downloadPaths)) {
|
||||
if (typeof remotePath !== 'string') continue;
|
||||
const revision = browser.revisionOverrides?.[platform] ?? browser.revision;
|
||||
const revision =
|
||||
browser.revisionOverrides?.[platform] ?? browser.revision;
|
||||
const itemDate = browser.browserVersion || revision;
|
||||
const url = DOWNLOAD_HOST + util.format(remotePath, revision);
|
||||
const name = path.basename(remotePath);
|
||||
@@ -420,8 +509,14 @@ export class PlaywrightBinary extends AbstractBinary {
|
||||
});
|
||||
this.dirItems[dir] = [];
|
||||
}
|
||||
if (!this.dirItems[dir].find(item => item.name === name)) {
|
||||
this.dirItems[dir].push({ name, isDir: false, url, size: '-', date: itemDate });
|
||||
if (!this.dirItems[dir].some(item => item.name === name)) {
|
||||
this.dirItems[dir].push({
|
||||
name,
|
||||
isDir: false,
|
||||
url,
|
||||
size: '-',
|
||||
date: itemDate,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,15 @@
|
||||
import path from 'node:path';
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import binaries, { BinaryName } from '../../../../config/binaries';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
|
||||
import { SingletonProto } from 'egg';
|
||||
|
||||
import binaries, { type BinaryName } from '../../../../config/binaries.ts';
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import {
|
||||
AbstractBinary,
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Prisma)
|
||||
@@ -30,15 +37,17 @@ export class PrismaBinary extends AbstractBinary {
|
||||
const commitIdMap: Record<string, boolean> = {};
|
||||
// https://list-binaries.prisma-orm.workers.dev/?delimiter=/&prefix=all_commits/61023c35d2c8762f66f09bc4183d2f630b541d08/
|
||||
for (const version in data.versions) {
|
||||
const major = parseInt(version.split('.', 1)[0]);
|
||||
const major = Number.parseInt(version.split('.', 1)[0]);
|
||||
// need >= 3.0.0
|
||||
if (major < 3) continue;
|
||||
const date = data.time[version];
|
||||
const pkg = data.versions[version];
|
||||
// https://registry.npmjs.com/@prisma/engines/4.14.1
|
||||
// https://registry.npmjs.com/@prisma/engines/5.7.0 should read from dependencies
|
||||
const enginesVersion = pkg.devDependencies?.['@prisma/engines-version']
|
||||
|| pkg.dependencies?.['@prisma/engines-version'] || '';
|
||||
const enginesVersion =
|
||||
pkg.devDependencies?.['@prisma/engines-version'] ||
|
||||
pkg.dependencies?.['@prisma/engines-version'] ||
|
||||
'';
|
||||
// "@prisma/engines-version": "4.14.0-67.d9a4c5988f480fa576d43970d5a23641aa77bc9c"
|
||||
// "@prisma/engines-version": "5.7.0-41.79fb5193cf0a8fdbef536e4b4a159cad677ab1b9"
|
||||
const matched = /\.(\w{30,})$/.exec(enginesVersion);
|
||||
@@ -56,19 +65,23 @@ export class PrismaBinary extends AbstractBinary {
|
||||
}
|
||||
}
|
||||
|
||||
async fetch(dir: string, binaryName: BinaryName): Promise<FetchResult | undefined> {
|
||||
async fetch(
|
||||
dir: string,
|
||||
binaryName: BinaryName
|
||||
): Promise<FetchResult | undefined> {
|
||||
const existsItems = this.dirItems[dir];
|
||||
if (existsItems) {
|
||||
return { items: existsItems, nextParams: null };
|
||||
}
|
||||
// /foo/ => foo/
|
||||
const binaryConfig = binaries[binaryName];
|
||||
const subDir = dir.substring(1);
|
||||
const subDir = dir.slice(1);
|
||||
const url = `${binaryConfig.distUrl}?delimiter=/&prefix=${encodeURIComponent(subDir)}`;
|
||||
const result = await this.requestJSON(url);
|
||||
return { items: this.#parseItems(result), nextParams: null };
|
||||
}
|
||||
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
#parseItems(result: any): BinaryItem[] {
|
||||
const items: BinaryItem[] = [];
|
||||
// objects": [
|
||||
|
||||
@@ -1,6 +1,17 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
import { SingletonProto } from 'egg';
|
||||
import { XMLParser } from 'fast-xml-parser';
|
||||
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import {
|
||||
AbstractBinary,
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
|
||||
export const platforms = ['Linux_x64', 'Mac', 'Mac_Arm', 'Win', 'Win_x64'];
|
||||
|
||||
const MAX_DEPTH = 1;
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Puppeteer)
|
||||
@@ -13,72 +24,28 @@ export class PuppeteerBinary extends AbstractBinary {
|
||||
this.dirItems = undefined;
|
||||
}
|
||||
|
||||
async fetch(dir: string): Promise<FetchResult | undefined> {
|
||||
async fetch(
|
||||
dir: string,
|
||||
_binaryName: string,
|
||||
lastData?: Record<string, unknown>
|
||||
): Promise<FetchResult | undefined> {
|
||||
if (!this.dirItems) {
|
||||
const pkgUrl = 'https://registry.npmjs.com/puppeteer';
|
||||
const data = await this.requestJSON(pkgUrl);
|
||||
const s3Url = 'https://chromium-browser-snapshots.storage.googleapis.com';
|
||||
const chromiumRevisions = new Map<string, string>();
|
||||
this.dirItems = {};
|
||||
this.dirItems['/'] = [];
|
||||
const chromiumRevisions = new Map<string, string>();
|
||||
for (const version in data.versions) {
|
||||
// find chromium versions
|
||||
const pkg = data.versions[version];
|
||||
const revision = pkg.puppeteer?.chromium_revision ? String(pkg.puppeteer.chromium_revision) : '';
|
||||
if (revision && !chromiumRevisions.has(revision)) {
|
||||
chromiumRevisions.set(revision, data.time[version]);
|
||||
}
|
||||
}
|
||||
|
||||
// https://unpkg.com/puppeteer@5.1.0/lib/cjs/revisions.js
|
||||
// https://unpkg.com/puppeteer@latest/lib/cjs/puppeteer/revisions.js
|
||||
// exports.PUPPETEER_REVISIONS = {
|
||||
// chromium: '768783',
|
||||
// firefox: 'latest',
|
||||
// };
|
||||
const unpkgURL = 'https://unpkg.com/puppeteer-core@latest/lib/cjs/puppeteer/revisions.js';
|
||||
const text = await this.requestXml(unpkgURL);
|
||||
const m = /chromium:\s+\'(\d+)\'\,/.exec(text);
|
||||
if (m && !chromiumRevisions.has(m[1])) {
|
||||
chromiumRevisions.set(m[1], new Date().toISOString());
|
||||
}
|
||||
|
||||
// download LAST_CHANGE
|
||||
// https://github.com/chaopeng/chromium-downloader/blob/master/get-chromium#L28
|
||||
const LAST_CHANGE_URL = 'https://www.googleapis.com/download/storage/v1/b/chromium-browser-snapshots/o/Linux_x64%2FLAST_CHANGE?alt=media';
|
||||
const lastRevision = await this.requestXml(LAST_CHANGE_URL);
|
||||
if (lastRevision) {
|
||||
chromiumRevisions.set(lastRevision, new Date().toISOString());
|
||||
}
|
||||
|
||||
// old versions
|
||||
// v5.0.0
|
||||
chromiumRevisions.set('756035', data.time['5.0.0']);
|
||||
// v5.2.0
|
||||
chromiumRevisions.set('768783', data.time['5.2.0']);
|
||||
// v5.2.1
|
||||
chromiumRevisions.set('782078', data.time['5.2.1']);
|
||||
// v5.3.0
|
||||
chromiumRevisions.set('800071', data.time['5.3.0']);
|
||||
// v5.4.0
|
||||
chromiumRevisions.set('809590', data.time['5.4.0']);
|
||||
// v5.5.0
|
||||
chromiumRevisions.set('818858', data.time['5.5.0']);
|
||||
// v6.0.0
|
||||
chromiumRevisions.set('843427', data.time['6.0.0']);
|
||||
// "7.0.0"
|
||||
chromiumRevisions.set('848005', data.time['7.0.0']);
|
||||
// https://github.com/puppeteer/puppeteer/blob/v8.0.0/src/revisions.ts#L23
|
||||
// "8.0.0":"2021-02-26T08:36:50.107Z"
|
||||
chromiumRevisions.set('856583', data.time['8.0.0']);
|
||||
// "9.0.0":"2021-04-21T11:27:32.513Z"
|
||||
chromiumRevisions.set('869685', data.time['9.0.0']);
|
||||
// "10.0.0":"2021-05-31T12:42:27.486Z"
|
||||
chromiumRevisions.set('884014', data.time['10.0.0']);
|
||||
// "11.0.0":"2021-11-03T09:29:12.751Z"
|
||||
chromiumRevisions.set('901912', data.time['11.0.0']);
|
||||
|
||||
const platforms = [ 'Linux_x64', 'Mac', 'Mac_Arm', 'Win', 'Win_x64' ];
|
||||
for (const platform of platforms) {
|
||||
const revision = lastData?.[platform] as string;
|
||||
if (!revision) {
|
||||
// 丢弃库中历史不带 lastData 的任务,防止遍历任务过多
|
||||
this.logger.info(
|
||||
'drop puppeteer task if has no last data for platform %s, lastPlatform',
|
||||
platform,
|
||||
lastData
|
||||
);
|
||||
return;
|
||||
}
|
||||
let marker = revision ? `${platform}/${revision}/REVISIONS` : undefined;
|
||||
this.dirItems['/'].push({
|
||||
name: `${platform}/`,
|
||||
date: new Date().toISOString(),
|
||||
@@ -87,8 +54,35 @@ export class PuppeteerBinary extends AbstractBinary {
|
||||
url: '',
|
||||
});
|
||||
this.dirItems[`/${platform}/`] = [];
|
||||
let i = 0;
|
||||
do {
|
||||
let requestUrl = `${s3Url}?prefix=${platform}&max-keys=100`;
|
||||
if (marker) {
|
||||
requestUrl += `&marker=${marker}`;
|
||||
}
|
||||
const xml = await this.requestXml(requestUrl);
|
||||
const parser = new XMLParser();
|
||||
const obj = parser.parse(xml);
|
||||
if (
|
||||
obj.ListBucketResult.IsTruncated === true &&
|
||||
obj.ListBucketResult.NextMarker
|
||||
) {
|
||||
marker = obj.ListBucketResult.NextMarker;
|
||||
} else {
|
||||
marker = undefined;
|
||||
}
|
||||
for (const content of obj.ListBucketResult.Contents) {
|
||||
// /Linux_x64/1041455/REVISIONS
|
||||
if (content.Key.endsWith('/REVISIONS')) {
|
||||
const revision = content.Key.split('/')[1].trim();
|
||||
chromiumRevisions.set(revision, content.LastModified);
|
||||
}
|
||||
}
|
||||
// 最多遍历 100 次防止内存爆炸,下次同步任务会继续
|
||||
} while (i++ < MAX_DEPTH && marker !== undefined);
|
||||
}
|
||||
for (const [ revision, date ] of chromiumRevisions.entries()) {
|
||||
|
||||
for (const [revision, date] of chromiumRevisions.entries()) {
|
||||
// https://github.com/puppeteer/puppeteer/blob/eebf452d38b79bb2ea1a1ba84c3d2ea6f2f9f899/src/node/BrowserFetcher.ts#L40
|
||||
// chrome: {
|
||||
// linux: '%s/chromium-browser-snapshots/Linux_x64/%d/%s.zip',
|
||||
@@ -113,7 +107,7 @@ export class PuppeteerBinary extends AbstractBinary {
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `https://storage.googleapis.com/chromium-browser-snapshots/${platform}/${revision}/${name}`,
|
||||
ignoreDownloadStatuses: [ 404 ],
|
||||
ignoreDownloadStatuses: [404],
|
||||
},
|
||||
];
|
||||
}
|
||||
@@ -124,15 +118,14 @@ export class PuppeteerBinary extends AbstractBinary {
|
||||
}
|
||||
|
||||
// https://github.com/puppeteer/puppeteer/blob/eebf452d38b79bb2ea1a1ba84c3d2ea6f2f9f899/src/node/BrowserFetcher.ts#L72
|
||||
private archiveName(
|
||||
platform: string,
|
||||
revision: string,
|
||||
): string {
|
||||
private archiveName(platform: string, revision: string): string {
|
||||
if (platform === 'Linux_x64') return 'chrome-linux';
|
||||
if (platform === 'Mac' || platform === 'Mac_Arm') return 'chrome-mac';
|
||||
if (platform === 'Win' || platform === 'Win_x64') {
|
||||
// Windows archive name changed at r591479.
|
||||
return parseInt(revision, 10) > 591479 ? 'chrome-win' : 'chrome-win32';
|
||||
return Number.parseInt(revision, 10) > 591_479
|
||||
? 'chrome-win'
|
||||
: 'chrome-win32';
|
||||
}
|
||||
return '';
|
||||
}
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { BinaryType } from '../../enum/Binary';
|
||||
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary';
|
||||
import { SingletonProto } from 'egg';
|
||||
|
||||
import { BinaryType } from '../../enum/Binary.ts';
|
||||
import {
|
||||
AbstractBinary,
|
||||
BinaryAdapter,
|
||||
type BinaryItem,
|
||||
type FetchResult,
|
||||
} from './AbstractBinary.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@BinaryAdapter(BinaryType.Sqlcipher)
|
||||
@@ -16,7 +22,8 @@ export class SqlcipherBinary extends AbstractBinary {
|
||||
} = {
|
||||
'/': [],
|
||||
};
|
||||
const s3Url = 'https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher';
|
||||
const s3Url =
|
||||
'https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher';
|
||||
const pkgUrl = 'https://registry.npmjs.com/@journeyapps/sqlcipher';
|
||||
const data = await this.requestJSON(pkgUrl);
|
||||
// https://github.com/journeyapps/node-sqlcipher/blob/master/.circleci/config.yml#L407
|
||||
@@ -44,11 +51,12 @@ export class SqlcipherBinary extends AbstractBinary {
|
||||
'win32-ia32',
|
||||
];
|
||||
for (const version in data.versions) {
|
||||
const major = parseInt(version.split('.', 1)[0]);
|
||||
const major = Number.parseInt(version.split('.', 1)[0]);
|
||||
if (major < 5) continue;
|
||||
// >= 5.0.0
|
||||
const pkgVersion = data.versions[version];
|
||||
const napiVersions = pkgVersion.binary && pkgVersion.binary.napi_versions || [];
|
||||
const napiVersions =
|
||||
(pkgVersion.binary && pkgVersion.binary.napi_versions) || [];
|
||||
const date = data.time[version];
|
||||
dirItems['/'].push({
|
||||
name: `v${version}/`,
|
||||
@@ -74,7 +82,7 @@ export class SqlcipherBinary extends AbstractBinary {
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `${s3Url}/v${version}/${name}`,
|
||||
ignoreDownloadStatuses: [ 404, 403 ],
|
||||
ignoreDownloadStatuses: [404, 403],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,32 +1,38 @@
|
||||
import {
|
||||
ImplDecorator,
|
||||
Inject,
|
||||
QualifierImplDecoratorUtil,
|
||||
} from '@eggjs/tegg';
|
||||
import { RegistryType } from '../../../common/enum/Registry';
|
||||
import { Registry } from '../../../core/entity/Registry';
|
||||
import {
|
||||
EggHttpClient,
|
||||
EggLogger,
|
||||
type ImplDecorator,
|
||||
Logger,
|
||||
HttpClient,
|
||||
} from 'egg';
|
||||
|
||||
import type { RegistryType } from '../../../common/enum/Registry.ts';
|
||||
import type { Registry } from '../../../core/entity/Registry.ts';
|
||||
|
||||
export const CHANGE_STREAM_ATTRIBUTE = 'CHANGE_STREAM_ATTRIBUTE';
|
||||
export type ChangesStreamChange = {
|
||||
export interface ChangesStreamChange {
|
||||
seq: string;
|
||||
fullname: string;
|
||||
};
|
||||
}
|
||||
|
||||
export abstract class AbstractChangeStream {
|
||||
@Inject()
|
||||
protected logger: EggLogger;
|
||||
protected logger: Logger;
|
||||
|
||||
@Inject()
|
||||
protected httpclient: EggHttpClient;
|
||||
protected httpClient: HttpClient;
|
||||
|
||||
abstract getInitialSince(registry: Registry): Promise<string>;
|
||||
abstract fetchChanges(registry: Registry, since: string): AsyncGenerator<ChangesStreamChange>;
|
||||
abstract fetchChanges(
|
||||
registry: Registry,
|
||||
since: string
|
||||
): AsyncGenerator<ChangesStreamChange>;
|
||||
|
||||
getChangesStreamUrl(registry: Registry, since: string, limit?: number): string {
|
||||
getChangesStreamUrl(
|
||||
registry: Registry,
|
||||
since: string,
|
||||
limit?: number
|
||||
): string {
|
||||
const url = new URL(registry.changeStream);
|
||||
url.searchParams.set('since', since);
|
||||
if (limit) {
|
||||
@@ -36,5 +42,10 @@ export abstract class AbstractChangeStream {
|
||||
}
|
||||
}
|
||||
|
||||
export const RegistryChangesStream: ImplDecorator<AbstractChangeStream, typeof RegistryType> =
|
||||
QualifierImplDecoratorUtil.generatorDecorator(AbstractChangeStream, CHANGE_STREAM_ATTRIBUTE);
|
||||
export const RegistryChangesStream: ImplDecorator<
|
||||
AbstractChangeStream,
|
||||
typeof RegistryType
|
||||
> = QualifierImplDecoratorUtil.generatorDecorator(
|
||||
AbstractChangeStream,
|
||||
CHANGE_STREAM_ATTRIBUTE
|
||||
);
|
||||
|
||||
@@ -1,35 +1,43 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { RegistryType } from '../../../common/enum/Registry';
|
||||
import { Registry } from '../../../core/entity/Registry';
|
||||
import { E500 } from 'egg-errors';
|
||||
import { AbstractChangeStream, RegistryChangesStream } from './AbstractChangesStream';
|
||||
import { SingletonProto } from 'egg';
|
||||
import { E500 } from 'egg/errors';
|
||||
|
||||
import { RegistryType } from '../../../common/enum/Registry.ts';
|
||||
import type { Registry } from '../../../core/entity/Registry.ts';
|
||||
import {
|
||||
AbstractChangeStream,
|
||||
RegistryChangesStream,
|
||||
} from './AbstractChangesStream.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@RegistryChangesStream(RegistryType.Cnpmcore)
|
||||
export class CnpmcoreChangesStream extends AbstractChangeStream {
|
||||
|
||||
async getInitialSince(registry: Registry): Promise<string> {
|
||||
const db = (new URL(registry.changeStream)).origin;
|
||||
const { status, data } = await this.httpclient.request(db, {
|
||||
const db = new URL(registry.changeStream).origin;
|
||||
const { status, data } = await this.httpClient.request(db, {
|
||||
followRedirect: true,
|
||||
timeout: 10000,
|
||||
timeout: 10_000,
|
||||
dataType: 'json',
|
||||
});
|
||||
if (!data.update_seq) {
|
||||
throw new E500(`get getInitialSince failed: ${data.update_seq}`);
|
||||
}
|
||||
const since = String(data.update_seq - 10);
|
||||
this.logger.warn('[NpmChangesStream.getInitialSince:firstSeq] GET %s status: %s, data: %j, since: %s',
|
||||
registry.name, status, data, since);
|
||||
this.logger.warn(
|
||||
'[NpmChangesStream.getInitialSince:firstSeq] GET %s status: %s, data: %j, since: %s',
|
||||
registry.name,
|
||||
status,
|
||||
data,
|
||||
since
|
||||
);
|
||||
return since;
|
||||
}
|
||||
|
||||
async* fetchChanges(registry: Registry, since: string) {
|
||||
async *fetchChanges(registry: Registry, since: string) {
|
||||
const db = this.getChangesStreamUrl(registry, since);
|
||||
// json mode
|
||||
const { data } = await this.httpclient.request(db, {
|
||||
const { data } = await this.httpClient.request(db, {
|
||||
followRedirect: true,
|
||||
timeout: 30000,
|
||||
timeout: 30_000,
|
||||
dataType: 'json',
|
||||
gzip: true,
|
||||
});
|
||||
|
||||
@@ -1,50 +1,61 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { RegistryType } from '../../../common/enum/Registry';
|
||||
import { Registry } from '../../../core/entity/Registry';
|
||||
import { E500 } from 'egg-errors';
|
||||
import { AbstractChangeStream, RegistryChangesStream } from './AbstractChangesStream';
|
||||
import { SingletonProto } from 'egg';
|
||||
import { E500 } from 'egg/errors';
|
||||
|
||||
const MAX_LIMIT = 10000;
|
||||
import { RegistryType } from '../../../common/enum/Registry.ts';
|
||||
import type { Registry } from '../../../core/entity/Registry.ts';
|
||||
import {
|
||||
AbstractChangeStream,
|
||||
RegistryChangesStream,
|
||||
} from './AbstractChangesStream.ts';
|
||||
|
||||
type FetchResults = {
|
||||
const MAX_LIMIT = 10_000;
|
||||
|
||||
interface FetchResults {
|
||||
results: {
|
||||
seq: number;
|
||||
type: string;
|
||||
id: string;
|
||||
changes: Record<string, string>[];
|
||||
gmt_modified: Date,
|
||||
gmt_modified: Date;
|
||||
}[];
|
||||
};
|
||||
}
|
||||
|
||||
@SingletonProto()
|
||||
@RegistryChangesStream(RegistryType.Cnpmjsorg)
|
||||
export class CnpmjsorgChangesStream extends AbstractChangeStream {
|
||||
|
||||
// cnpmjsorg 未实现 update_seq 字段
|
||||
// 默认返回当前时间戳字符串
|
||||
async getInitialSince(registry: Registry): Promise<string> {
|
||||
const since = String((new Date()).getTime());
|
||||
this.logger.warn(`[CnpmjsorgChangesStream.getInitialSince] since: ${since}, skip query ${registry.changeStream}`);
|
||||
const since = String(Date.now());
|
||||
this.logger.warn(
|
||||
`[CnpmjsorgChangesStream.getInitialSince] since: ${since}, skip query ${registry.changeStream}`
|
||||
);
|
||||
return since;
|
||||
}
|
||||
|
||||
private async tryFetch(registry: Registry, since: string, limit = 1000): Promise<{ data: FetchResults }> {
|
||||
private async tryFetch(
|
||||
registry: Registry,
|
||||
since: string,
|
||||
limit = 1000
|
||||
): Promise<{ data: FetchResults }> {
|
||||
if (limit > MAX_LIMIT) {
|
||||
throw new E500(`limit too large, current since: ${since}, limit: ${limit}`);
|
||||
throw new E500(
|
||||
`limit too large, current since: ${since}, limit: ${limit}`
|
||||
);
|
||||
}
|
||||
const db = this.getChangesStreamUrl(registry, since, limit);
|
||||
// json mode
|
||||
const res = await this.httpclient.request<FetchResults>(db, {
|
||||
const res = await this.httpClient.request<FetchResults>(db, {
|
||||
followRedirect: true,
|
||||
timeout: 30000,
|
||||
timeout: 30_000,
|
||||
dataType: 'json',
|
||||
gzip: true,
|
||||
});
|
||||
const { results = [] } = res.data;
|
||||
if (results?.length >= limit) {
|
||||
const [ first ] = results;
|
||||
const [first] = results;
|
||||
const last = results[results.length - 1];
|
||||
if (first.gmt_modified === last.gmt_modified) {
|
||||
if (first.gmt_modified === last?.gmt_modified) {
|
||||
return await this.tryFetch(registry, since, limit + 1000);
|
||||
}
|
||||
}
|
||||
@@ -52,7 +63,7 @@ export class CnpmjsorgChangesStream extends AbstractChangeStream {
|
||||
return res;
|
||||
}
|
||||
|
||||
async* fetchChanges(registry: Registry, since: string) {
|
||||
async *fetchChanges(registry: Registry, since: string) {
|
||||
// ref: https://github.com/cnpm/cnpmjs.org/pull/1734
|
||||
// 由于 cnpmjsorg 无法计算准确的 seq
|
||||
// since 是一个时间戳,需要确保一次返回的结果中首尾两个 gmtModified 不相等
|
||||
@@ -60,7 +71,7 @@ export class CnpmjsorgChangesStream extends AbstractChangeStream {
|
||||
|
||||
if (data.results?.length > 0) {
|
||||
for (const change of data.results) {
|
||||
const seq = new Date(change.gmt_modified).getTime() + '';
|
||||
const seq = `${new Date(change.gmt_modified).getTime()}`;
|
||||
const fullname = change.id;
|
||||
if (seq && fullname && seq !== since) {
|
||||
const change = {
|
||||
|
||||
@@ -1,55 +1,84 @@
|
||||
import { SingletonProto } from '@eggjs/tegg';
|
||||
import { E500 } from 'egg-errors';
|
||||
import { RegistryType } from '../../../common/enum/Registry';
|
||||
import { Registry } from '../../../core/entity/Registry';
|
||||
import { AbstractChangeStream, ChangesStreamChange, RegistryChangesStream } from './AbstractChangesStream';
|
||||
import { SingletonProto } from 'egg';
|
||||
import { E500 } from 'egg/errors';
|
||||
|
||||
import { RegistryType } from '../../../common/enum/Registry.ts';
|
||||
import type { Registry } from '../../../core/entity/Registry.ts';
|
||||
import {
|
||||
AbstractChangeStream,
|
||||
RegistryChangesStream,
|
||||
type ChangesStreamChange,
|
||||
} from './AbstractChangesStream.ts';
|
||||
|
||||
@SingletonProto()
|
||||
@RegistryChangesStream(RegistryType.Npm)
|
||||
export class NpmChangesStream extends AbstractChangeStream {
|
||||
|
||||
async getInitialSince(registry: Registry): Promise<string> {
|
||||
const db = (new URL(registry.changeStream)).origin;
|
||||
const { status, data } = await this.httpclient.request(db, {
|
||||
const db = new URL(registry.changeStream).origin;
|
||||
const { status, data } = await this.httpClient.request(db, {
|
||||
followRedirect: true,
|
||||
timeout: 10000,
|
||||
timeout: 10_000,
|
||||
dataType: 'json',
|
||||
headers: {
|
||||
'npm-replication-opt-in': 'true',
|
||||
},
|
||||
});
|
||||
const since = String(data.update_seq - 10);
|
||||
if (!data.update_seq) {
|
||||
throw new E500(`get getInitialSince failed: ${data.update_seq}`);
|
||||
}
|
||||
this.logger.warn('[NpmChangesStream.getInitialSince] GET %s status: %s, data: %j, since: %s',
|
||||
registry.name, registry.changeStream, status, data, since);
|
||||
this.logger.warn(
|
||||
'[NpmChangesStream.getInitialSince] GET %s status: %s, data: %j, since: %s',
|
||||
registry.name,
|
||||
registry.changeStream,
|
||||
status,
|
||||
data,
|
||||
since
|
||||
);
|
||||
return since;
|
||||
}
|
||||
|
||||
async* fetchChanges(registry: Registry, since: string) {
|
||||
async *fetchChanges(
|
||||
registry: Registry,
|
||||
since: string
|
||||
): AsyncGenerator<ChangesStreamChange> {
|
||||
// https://github.com/orgs/community/discussions/152515
|
||||
const db = this.getChangesStreamUrl(registry, since);
|
||||
const { res } = await this.httpclient.request(db, {
|
||||
streaming: true,
|
||||
timeout: 60000,
|
||||
const { data, headers } = await this.httpClient.request(db, {
|
||||
timeout: 60_000,
|
||||
headers: {
|
||||
'npm-replication-opt-in': 'true',
|
||||
},
|
||||
dataType: 'json',
|
||||
gzip: true,
|
||||
});
|
||||
const count = data.results?.length;
|
||||
const last_seq = data.last_seq;
|
||||
this.logger.info(
|
||||
'[NpmChangesStream.fetchChanges] %s, count: %s, last_seq: %s, headers: %j',
|
||||
db,
|
||||
count,
|
||||
last_seq,
|
||||
headers
|
||||
);
|
||||
|
||||
let buf = '';
|
||||
for await (const chunk of res) {
|
||||
const text = chunk.toString();
|
||||
const lines = text.split('\n');
|
||||
|
||||
for (const line of lines) {
|
||||
const content = buf + line;
|
||||
const match = /"seq":(\d+),"id":"([^"]+)"/g.exec(content);
|
||||
const seq = match?.[1];
|
||||
const fullname = match?.[2];
|
||||
if (seq && fullname) {
|
||||
buf = '';
|
||||
const change: ChangesStreamChange = { fullname, seq };
|
||||
if (data.results?.length > 0) {
|
||||
for (const change of data.results) {
|
||||
// {
|
||||
// seq: 2495018,
|
||||
// id: 'ng-create-all-project',
|
||||
// changes: [ { rev: '3-be3a014aab8e379ba28a28adb8e10142' }, [length]: 1 ],
|
||||
// deleted: true
|
||||
// },
|
||||
const seq = String(change.seq);
|
||||
const fullname = change.id;
|
||||
if (seq && fullname && seq !== since) {
|
||||
const change = {
|
||||
fullname,
|
||||
seq,
|
||||
};
|
||||
yield change;
|
||||
} else {
|
||||
buf += line;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,27 +1,34 @@
|
||||
import { performance } from 'perf_hooks';
|
||||
import { Advice, AdviceContext, IAdvice } from '@eggjs/tegg/aop';
|
||||
import { Inject } from '@eggjs/tegg';
|
||||
import { EggLogger } from 'egg';
|
||||
import { performance } from 'node:perf_hooks';
|
||||
|
||||
import { Advice, type AdviceContext, type IAdvice } from 'egg/aop';
|
||||
import { Inject, Logger } from 'egg';
|
||||
|
||||
const START = Symbol('AsyncTimer#start');
|
||||
const SUCCEED = Symbol('AsyncTimer#succeed');
|
||||
|
||||
// auto print async function call performance timer log into logger
|
||||
@Advice()
|
||||
export class AsyncTimer implements IAdvice {
|
||||
@Inject()
|
||||
private readonly logger: EggLogger;
|
||||
private start: number;
|
||||
private succeed = true;
|
||||
private readonly logger: Logger;
|
||||
|
||||
async beforeCall() {
|
||||
this.start = performance.now();
|
||||
async beforeCall(ctx: AdviceContext) {
|
||||
ctx.set(START, performance.now());
|
||||
ctx.set(SUCCEED, true);
|
||||
}
|
||||
|
||||
async afterThrow() {
|
||||
this.succeed = false;
|
||||
async afterThrow(ctx: AdviceContext) {
|
||||
ctx.set(SUCCEED, false);
|
||||
}
|
||||
|
||||
async afterFinally(ctx: AdviceContext) {
|
||||
const ms = Math.floor((performance.now() - this.start) * 1000) / 1000;
|
||||
this.logger.info('[%s] [%s:%s|%s]',
|
||||
ms, ctx.that.constructor.name, ctx.method, this.succeed ? 'T' : 'F');
|
||||
const ms = Math.floor((performance.now() - ctx.get(START)) * 1000) / 1000;
|
||||
this.logger.info(
|
||||
'[%s] [%s:%s|%s]',
|
||||
ms,
|
||||
ctx.that.constructor.name,
|
||||
ctx.method,
|
||||
ctx.get(SUCCEED) ? 'T' : 'F'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import dayjs from 'dayjs';
|
||||
import customParseFormat from 'dayjs/plugin/customParseFormat';
|
||||
import customParseFormat from 'dayjs/plugin/customParseFormat.js';
|
||||
dayjs.extend(customParseFormat);
|
||||
|
||||
export default dayjs;
|
||||
|
||||
@@ -3,6 +3,7 @@ export enum BinaryType {
|
||||
Bucket = 'bucket',
|
||||
Cypress = 'cypress',
|
||||
Electron = 'electron',
|
||||
Firefox = 'firefox',
|
||||
GitHub = 'github',
|
||||
Imagemin = 'imagemin',
|
||||
Node = 'node',
|
||||
|
||||
4
app/common/enum/Total.ts
Normal file
4
app/common/enum/Total.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export enum TotalType {
|
||||
PackageCount = 'packageCount',
|
||||
PackageVersionCount = 'packageVersionCount',
|
||||
}
|
||||
@@ -1,11 +1,11 @@
|
||||
export enum LoginResultCode {
|
||||
UserNotFound,
|
||||
Success,
|
||||
Fail,
|
||||
UserNotFound = 0,
|
||||
Success = 1,
|
||||
Fail = 2,
|
||||
}
|
||||
|
||||
export enum WanStatusCode {
|
||||
UserNotFound,
|
||||
Unbound,
|
||||
Bound,
|
||||
UserNotFound = 0,
|
||||
Unbound = 1,
|
||||
Bound = 2,
|
||||
}
|
||||
|
||||
@@ -2,5 +2,6 @@
|
||||
"name": "cnpmcore-common",
|
||||
"eggModule": {
|
||||
"name": "cnpmcoreCommon"
|
||||
}
|
||||
},
|
||||
"type": "module"
|
||||
}
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { CnpmcoreConfig } from '../port/config';
|
||||
import { Readable } from 'stream';
|
||||
import { IncomingHttpHeaders } from 'http';
|
||||
import { EggContext } from '@eggjs/tegg';
|
||||
import { estypes } from '@elastic/elasticsearch';
|
||||
import type { Readable } from 'node:stream';
|
||||
import type { IncomingHttpHeaders } from 'node:http';
|
||||
|
||||
import type { Context } from 'egg';
|
||||
import type { estypes } from '@elastic/elasticsearch';
|
||||
import type { CnpmcoreConfig } from '../port/config.ts';
|
||||
|
||||
export interface UploadResult {
|
||||
key: string;
|
||||
@@ -19,8 +20,8 @@ export interface UploadOptions {
|
||||
|
||||
export interface AppendOptions {
|
||||
key: string;
|
||||
position?: string,
|
||||
headers?: IncomingHttpHeaders,
|
||||
position?: string;
|
||||
headers?: IncomingHttpHeaders;
|
||||
}
|
||||
|
||||
export interface DownloadOptions {
|
||||
@@ -40,7 +41,11 @@ export interface NFSClient {
|
||||
|
||||
createDownloadStream(key: string): Promise<Readable | undefined>;
|
||||
|
||||
download(key: string, filepath: string, options: DownloadOptions): Promise<void>;
|
||||
download(
|
||||
key: string,
|
||||
filepath: string,
|
||||
options: DownloadOptions
|
||||
): Promise<void>;
|
||||
|
||||
url?(key: string): string;
|
||||
}
|
||||
@@ -52,6 +57,7 @@ export interface QueueAdapter {
|
||||
}
|
||||
|
||||
export interface SearchAdapter {
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
search<T>(query: any): Promise<estypes.SearchHitsMetadata<T>>;
|
||||
upsert<T>(id: string, document: T): Promise<string>;
|
||||
delete(id: string): Promise<string>;
|
||||
@@ -67,14 +73,11 @@ export interface userResult {
|
||||
email: string;
|
||||
}
|
||||
export interface AuthClient {
|
||||
getAuthUrl(ctx: EggContext): Promise<AuthUrlResult>;
|
||||
getAuthUrl(ctx: Context): Promise<AuthUrlResult>;
|
||||
ensureCurrentUser(): Promise<userResult | null>;
|
||||
}
|
||||
|
||||
declare module 'egg' {
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore
|
||||
// avoid TS2310 Type 'EggAppConfig' recursively references itself as a base type.
|
||||
interface EggAppConfig {
|
||||
cnpmcore: CnpmcoreConfig;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
|
||||
interface BinaryData extends EntityData {
|
||||
binaryId: string;
|
||||
@@ -21,7 +21,7 @@ export class Binary extends Entity {
|
||||
isDir: boolean;
|
||||
size: number;
|
||||
date: string;
|
||||
sourceUrl?: string;
|
||||
sourceUrl: string;
|
||||
ignoreDownloadStatuses?: number[];
|
||||
|
||||
constructor(data: BinaryData) {
|
||||
|
||||
@@ -12,11 +12,11 @@ export class BugVersion {
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
listAllPackagesHasBugs(): Array<string> {
|
||||
listAllPackagesHasBugs(): string[] {
|
||||
return Object.keys(this.data);
|
||||
}
|
||||
|
||||
listBugVersions(pkgName: string): Array<string> {
|
||||
listBugVersions(pkgName: string): string[] {
|
||||
const bugVersionPackage = this.data[pkgName];
|
||||
if (!bugVersionPackage) {
|
||||
return [];
|
||||
@@ -31,18 +31,24 @@ export class BugVersion {
|
||||
}
|
||||
|
||||
// TODO manifest typing
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
fixManifest(bugVersionManifest: any, fixVersionManifest: any): any {
|
||||
// If the tarball is same, manifest has fixed.
|
||||
if (bugVersionManifest.dist.tarball === fixVersionManifest.dist.tarball) {
|
||||
return;
|
||||
}
|
||||
const advice = this.fixVersion(bugVersionManifest.name, bugVersionManifest.version);
|
||||
const advice = this.fixVersion(
|
||||
bugVersionManifest.name,
|
||||
bugVersionManifest.version
|
||||
);
|
||||
if (!advice) {
|
||||
return;
|
||||
}
|
||||
const newManifest = JSON.parse(JSON.stringify(fixVersionManifest));
|
||||
const newManifest = structuredClone(fixVersionManifest);
|
||||
const hotfixDeprecated = `[WARNING] Use ${advice.version} instead of ${bugVersionManifest.version}, reason: ${advice.reason}`;
|
||||
newManifest.deprecated = bugVersionManifest.deprecated ? `${bugVersionManifest.deprecated} (${hotfixDeprecated})` : hotfixDeprecated;
|
||||
newManifest.deprecated = bugVersionManifest.deprecated
|
||||
? `${bugVersionManifest.deprecated} (${hotfixDeprecated})`
|
||||
: hotfixDeprecated;
|
||||
// don't change version
|
||||
newManifest.version = bugVersionManifest.version;
|
||||
return newManifest;
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
|
||||
interface ChangeData extends EntityData {
|
||||
changeId: string;
|
||||
type: string;
|
||||
targetName: string;
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
data: any;
|
||||
}
|
||||
|
||||
@@ -12,6 +13,7 @@ export class Change extends Entity {
|
||||
changeId: string;
|
||||
type: string;
|
||||
targetName: string;
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
data: any;
|
||||
|
||||
constructor(data: ChangeData) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
|
||||
interface DistData extends EntityData {
|
||||
distId: string;
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { HookType } from '../../common/enum/Hook';
|
||||
import crypto from 'crypto';
|
||||
import crypto from 'node:crypto';
|
||||
|
||||
export type CreateHookData = Omit<EasyData<HookData, 'hookId'>, 'enable' | 'latestTaskId'>;
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
import type { HookType } from '../../common/enum/Hook.ts';
|
||||
|
||||
export type CreateHookData = Omit<
|
||||
EasyData<HookData, 'hookId'>,
|
||||
'enable' | 'latestTaskId'
|
||||
>;
|
||||
|
||||
export interface HookData extends EntityData {
|
||||
hookId: string;
|
||||
@@ -39,10 +43,11 @@ export class Hook extends Entity {
|
||||
}
|
||||
|
||||
static create(data: CreateHookData): Hook {
|
||||
const hookData: EasyData<HookData, 'hookId'> = Object.assign({}, data, {
|
||||
const hookData: EasyData<HookData, 'hookId'> = {
|
||||
...data,
|
||||
enable: true,
|
||||
latestTaskId: undefined,
|
||||
});
|
||||
};
|
||||
const newData = EntityUtil.defaultData(hookData, 'hookId');
|
||||
return new Hook(newData);
|
||||
}
|
||||
@@ -50,7 +55,8 @@ export class Hook extends Entity {
|
||||
// payload 可能会特别大,如果做多次 stringify 浪费太多 cpu
|
||||
signPayload(payload: object) {
|
||||
const payloadStr = JSON.stringify(payload);
|
||||
const digest = crypto.createHmac('sha256', this.secret)
|
||||
const digest = crypto
|
||||
.createHmac('sha256', this.secret)
|
||||
.update(JSON.stringify(payload))
|
||||
.digest('hex');
|
||||
return {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { HookEventType } from '../../common/enum/Hook';
|
||||
import { HookEventType } from '../../common/enum/Hook.ts';
|
||||
|
||||
export interface PublishChangePayload {
|
||||
'dist-tag'?: string;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { Dist } from './Dist';
|
||||
import { getFullname } from '../../common/PackageUtil';
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
import { Dist } from './Dist.ts';
|
||||
import { getFullname } from '../../common/PackageUtil.ts';
|
||||
|
||||
interface PackageData extends EntityData {
|
||||
scope: string;
|
||||
@@ -22,6 +22,13 @@ export enum DIST_NAMES {
|
||||
ABBREVIATED_MANIFESTS = 'abbreviated_manifests.json',
|
||||
}
|
||||
|
||||
export function isPkgManifest(fileType: DIST_NAMES) {
|
||||
return (
|
||||
fileType === DIST_NAMES.FULL_MANIFESTS ||
|
||||
fileType === DIST_NAMES.ABBREVIATED_MANIFESTS
|
||||
);
|
||||
}
|
||||
|
||||
interface FileInfo {
|
||||
size: number;
|
||||
shasum: string;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
|
||||
interface PackageTagData extends EntityData {
|
||||
packageId: string;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Dist } from './Dist';
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { PaddingSemVer } from './PaddingSemVer';
|
||||
import type { Dist } from './Dist.ts';
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
import { PaddingSemVer } from './PaddingSemVer.ts';
|
||||
|
||||
interface PackageVersionData extends EntityData {
|
||||
packageId: string;
|
||||
@@ -48,7 +48,9 @@ export class PackageVersion extends Entity {
|
||||
}
|
||||
}
|
||||
|
||||
static create(data: EasyData<PackageVersionData, 'packageVersionId'>): PackageVersion {
|
||||
static create(
|
||||
data: EasyData<PackageVersionData, 'packageVersionId'>
|
||||
): PackageVersion {
|
||||
const newData = EntityUtil.defaultData(data, 'packageVersionId');
|
||||
return new PackageVersion(newData);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
|
||||
interface PackageVersionBlockData extends EntityData {
|
||||
packageVersionBlockId: string;
|
||||
@@ -22,7 +22,9 @@ export class PackageVersionBlock extends Entity {
|
||||
this.reason = data.reason;
|
||||
}
|
||||
|
||||
static create(data: EasyData<PackageVersionBlockData, 'packageVersionBlockId'>): PackageVersionBlock {
|
||||
static create(
|
||||
data: EasyData<PackageVersionBlockData, 'packageVersionBlockId'>
|
||||
): PackageVersionBlock {
|
||||
const newData = EntityUtil.defaultData(data, 'packageVersionBlockId');
|
||||
return new PackageVersionBlock(newData);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Dist } from './Dist';
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
import type { Dist } from './Dist.ts';
|
||||
|
||||
interface PackageVersionFileData extends EntityData {
|
||||
packageVersionFileId: string;
|
||||
@@ -33,10 +33,14 @@ export class PackageVersionFile extends Entity {
|
||||
}
|
||||
|
||||
get path() {
|
||||
return this.directory === '/' ? `/${this.name}` : `${this.directory}/${this.name}`;
|
||||
return this.directory === '/'
|
||||
? `/${this.name}`
|
||||
: `${this.directory}/${this.name}`;
|
||||
}
|
||||
|
||||
static create(data: EasyData<PackageVersionFileData, 'packageVersionFileId'>): PackageVersionFile {
|
||||
static create(
|
||||
data: EasyData<PackageVersionFileData, 'packageVersionFileId'>
|
||||
): PackageVersionFile {
|
||||
const newData = EntityUtil.defaultData(data, 'packageVersionFileId');
|
||||
return new PackageVersionFile(newData);
|
||||
}
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
|
||||
interface PackageVersionManifestData extends EntityData {
|
||||
packageId: string;
|
||||
packageVersionId: string;
|
||||
packageVersionManifestId: string;
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
manifest: any;
|
||||
}
|
||||
|
||||
@@ -12,6 +13,7 @@ export class PackageVersionManifest extends Entity {
|
||||
packageId: string;
|
||||
packageVersionId: string;
|
||||
packageVersionManifestId: string;
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
manifest: any;
|
||||
|
||||
constructor(data: PackageVersionManifestData) {
|
||||
@@ -22,7 +24,9 @@ export class PackageVersionManifest extends Entity {
|
||||
this.manifest = data.manifest;
|
||||
}
|
||||
|
||||
static create(data: EasyData<PackageVersionManifestData, 'packageVersionManifestId'>): PackageVersionManifest {
|
||||
static create(
|
||||
data: EasyData<PackageVersionManifestData, 'packageVersionManifestId'>
|
||||
): PackageVersionManifest {
|
||||
const newData = EntityUtil.defaultData(data, 'packageVersionManifestId');
|
||||
return new PackageVersionManifest(newData);
|
||||
}
|
||||
|
||||
@@ -14,9 +14,10 @@ export class PaddingSemVer {
|
||||
return;
|
||||
}
|
||||
this.semver = new SemVer(semver);
|
||||
if ((this.semver as any).includePrerelease) {
|
||||
// @ts-expect-error type definition is not correct
|
||||
if (this.semver.includePrerelease) {
|
||||
this.isPreRelease = true;
|
||||
} else if (this.semver.prerelease && this.semver.prerelease.length) {
|
||||
} else if (this.semver.prerelease && this.semver.prerelease.length > 0) {
|
||||
this.isPreRelease = true;
|
||||
} else {
|
||||
this.isPreRelease = false;
|
||||
@@ -25,9 +26,10 @@ export class PaddingSemVer {
|
||||
|
||||
get paddingVersion(): string {
|
||||
if (!this._paddingVersion) {
|
||||
this._paddingVersion = PaddingSemVer.paddingVersion(this.semver.major)
|
||||
+ PaddingSemVer.paddingVersion(this.semver.minor)
|
||||
+ PaddingSemVer.paddingVersion(this.semver.patch);
|
||||
this._paddingVersion =
|
||||
PaddingSemVer.paddingVersion(this.semver.major) +
|
||||
PaddingSemVer.paddingVersion(this.semver.minor) +
|
||||
PaddingSemVer.paddingVersion(this.semver.patch);
|
||||
}
|
||||
return this._paddingVersion;
|
||||
}
|
||||
@@ -37,7 +39,8 @@ export class PaddingSemVer {
|
||||
static paddingVersion(v: number) {
|
||||
const t = String(v);
|
||||
if (t.length <= 16) {
|
||||
const padding = new Array(16 - t.length).fill(0)
|
||||
const padding = Array.from({ length: 16 - t.length })
|
||||
.fill(0)
|
||||
.join('');
|
||||
return padding + t;
|
||||
}
|
||||
|
||||
@@ -1,15 +1,17 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData } from '../util/EntityUtil';
|
||||
import { DIST_NAMES } from './Package';
|
||||
import { isPkgManifest } from '../service/ProxyCacheService';
|
||||
import { PROXY_CACHE_DIR_NAME } from '../../common/constants';
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { isPkgManifest, type DIST_NAMES } from './Package.ts';
|
||||
import type { EasyData } from '../util/EntityUtil.ts';
|
||||
import { PROXY_CACHE_DIR_NAME } from '../../common/constants.ts';
|
||||
interface ProxyCacheData extends EntityData {
|
||||
fullname: string;
|
||||
fileType: DIST_NAMES;
|
||||
version?: string;
|
||||
}
|
||||
|
||||
export type CreateProxyCacheData = Omit<EasyData<ProxyCacheData, 'id'>, 'id'| 'filePath'>;
|
||||
export type CreateProxyCacheData = Omit<
|
||||
EasyData<ProxyCacheData, 'id'>,
|
||||
'id' | 'filePath'
|
||||
>;
|
||||
|
||||
export class ProxyCache extends Entity {
|
||||
readonly fullname: string;
|
||||
@@ -38,5 +40,4 @@ export class ProxyCache extends Entity {
|
||||
data.updatedAt = new Date();
|
||||
return data;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import type { RegistryType } from '../../common/enum/Registry';
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
import type { RegistryType } from '../../common/enum/Registry.ts';
|
||||
|
||||
interface RegistryData extends EntityData {
|
||||
name: string;
|
||||
@@ -12,7 +12,10 @@ interface RegistryData extends EntityData {
|
||||
authToken?: string;
|
||||
}
|
||||
|
||||
export type CreateRegistryData = Omit<EasyData<RegistryData, 'registryId'>, 'id'>;
|
||||
export type CreateRegistryData = Omit<
|
||||
EasyData<RegistryData, 'registryId'>,
|
||||
'id'
|
||||
>;
|
||||
|
||||
export class Registry extends Entity {
|
||||
name: string;
|
||||
@@ -35,7 +38,10 @@ export class Registry extends Entity {
|
||||
}
|
||||
|
||||
public static create(data: CreateRegistryData): Registry {
|
||||
const newData = EntityUtil.defaultData<RegistryData, 'registryId'>(data, 'registryId');
|
||||
const newData = EntityUtil.defaultData<RegistryData, 'registryId'>(
|
||||
data,
|
||||
'registryId'
|
||||
);
|
||||
return new Registry(newData);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
|
||||
interface ScopeData extends EntityData {
|
||||
name: string;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Range, Comparator } from 'semver';
|
||||
import { PaddingSemVer } from './PaddingSemVer';
|
||||
import { Comparator, Range } from 'semver';
|
||||
import { PaddingSemVer } from './PaddingSemVer.ts';
|
||||
|
||||
const OPERATOR_MAP = {
|
||||
'<': '$lt',
|
||||
@@ -21,7 +21,8 @@ export class SqlRange {
|
||||
}
|
||||
|
||||
private comparatorToSql(comparator: Comparator) {
|
||||
if (comparator.semver === (Comparator as any).ANY) {
|
||||
// @ts-expect-error type definition is not correct
|
||||
if (comparator.semver === Comparator.ANY) {
|
||||
return {
|
||||
$and: [
|
||||
{
|
||||
@@ -38,11 +39,13 @@ export class SqlRange {
|
||||
};
|
||||
}
|
||||
const paddingSemver = new PaddingSemVer(comparator.semver);
|
||||
const operator = OPERATOR_MAP[comparator.operator as keyof typeof OPERATOR_MAP];
|
||||
const operator =
|
||||
OPERATOR_MAP[comparator.operator as keyof typeof OPERATOR_MAP];
|
||||
if (!operator) {
|
||||
throw new Error(`unknown operator ${comparator.operator}`);
|
||||
}
|
||||
this._containPreRelease = this._containPreRelease || paddingSemver.isPreRelease;
|
||||
this._containPreRelease =
|
||||
this._containPreRelease || paddingSemver.isPreRelease;
|
||||
return {
|
||||
$and: [
|
||||
{
|
||||
@@ -59,8 +62,8 @@ export class SqlRange {
|
||||
};
|
||||
}
|
||||
|
||||
private comparatorSetToSql(comparatorSet: Array<Comparator>) {
|
||||
const condition: Array<object> = [];
|
||||
private comparatorSetToSql(comparatorSet: Comparator[]) {
|
||||
const condition: object[] = [];
|
||||
for (const comparator of comparatorSet) {
|
||||
condition.push(this.comparatorToSql(comparator));
|
||||
}
|
||||
@@ -68,7 +71,7 @@ export class SqlRange {
|
||||
}
|
||||
|
||||
private generateWhere() {
|
||||
const conditions: Array<object> = [];
|
||||
const conditions: object[] = [];
|
||||
for (const rangeSet of this.range.set) {
|
||||
conditions.push(this.comparatorSetToSql(rangeSet as Comparator[]));
|
||||
}
|
||||
|
||||
@@ -1,20 +1,22 @@
|
||||
import os from 'os';
|
||||
import path from 'path';
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { TaskType, TaskState } from '../../common/enum/Task';
|
||||
import { PROXY_CACHE_DIR_NAME } from '../../common/constants';
|
||||
import dayjs from '../../common/dayjs';
|
||||
import { HookEvent } from './HookEvent';
|
||||
import { DIST_NAMES } from './Package';
|
||||
import { isPkgManifest } from '../service/ProxyCacheService';
|
||||
import { InternalServerError } from 'egg-errors';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
|
||||
import { InternalServerError } from 'egg/errors';
|
||||
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
import { TaskState, TaskType } from '../../common/enum/Task.ts';
|
||||
import { PROXY_CACHE_DIR_NAME } from '../../common/constants.ts';
|
||||
import dayjs from '../../common/dayjs.ts';
|
||||
import type { HookEvent } from './HookEvent.ts';
|
||||
import { isPkgManifest, type DIST_NAMES } from './Package.ts';
|
||||
|
||||
export const HOST_NAME = os.hostname();
|
||||
export const PID = process.pid;
|
||||
|
||||
export interface TaskBaseData {
|
||||
taskWorker: string;
|
||||
shouldNotMerge?: boolean;
|
||||
}
|
||||
|
||||
export interface TaskData<T = TaskBaseData> extends EntityData {
|
||||
@@ -32,7 +34,7 @@ export interface TaskData<T = TaskBaseData> extends EntityData {
|
||||
bizId?: string;
|
||||
}
|
||||
|
||||
export type SyncPackageTaskOptions = {
|
||||
export interface SyncPackageTaskOptions {
|
||||
authorId?: string;
|
||||
authorIp?: string;
|
||||
tips?: string;
|
||||
@@ -41,14 +43,14 @@ export type SyncPackageTaskOptions = {
|
||||
// force sync history version
|
||||
forceSyncHistory?: boolean;
|
||||
registryId?: string;
|
||||
specificVersions?: Array<string>;
|
||||
};
|
||||
specificVersions?: string[];
|
||||
}
|
||||
|
||||
export type UpdateProxyCacheTaskOptions = {
|
||||
fullname: string,
|
||||
version?: string,
|
||||
fileType: DIST_NAMES,
|
||||
};
|
||||
export interface UpdateProxyCacheTaskOptions {
|
||||
fullname: string;
|
||||
version?: string;
|
||||
fileType: DIST_NAMES;
|
||||
}
|
||||
|
||||
export interface CreateHookTaskData extends TaskBaseData {
|
||||
hookEvent: HookEvent;
|
||||
@@ -65,22 +67,24 @@ export interface CreateSyncPackageTaskData extends TaskBaseData {
|
||||
skipDependencies?: boolean;
|
||||
syncDownloadData?: boolean;
|
||||
forceSyncHistory?: boolean;
|
||||
specificVersions?: Array<string>;
|
||||
specificVersions?: string[];
|
||||
}
|
||||
|
||||
export interface CreateUpdateProxyCacheTaskData extends TaskBaseData {
|
||||
fullname: string,
|
||||
version?: string,
|
||||
fileType: DIST_NAMES,
|
||||
filePath: string
|
||||
fullname: string;
|
||||
version?: string;
|
||||
fileType: DIST_NAMES;
|
||||
filePath: string;
|
||||
}
|
||||
|
||||
export type SyncBinaryTaskData = Record<string, unknown> & TaskBaseData;
|
||||
|
||||
export interface ChangesStreamTaskData extends TaskBaseData {
|
||||
since: string;
|
||||
last_package?: string,
|
||||
last_package_created?: Date,
|
||||
task_count?: number,
|
||||
registryId?: string,
|
||||
last_package?: string;
|
||||
last_package_created?: Date;
|
||||
task_count?: number;
|
||||
registryId?: string;
|
||||
}
|
||||
|
||||
export interface TaskUpdateCondition {
|
||||
@@ -93,6 +97,7 @@ export type TriggerHookTask = Task<TriggerHookTaskData>;
|
||||
export type CreateSyncPackageTask = Task<CreateSyncPackageTaskData>;
|
||||
export type ChangesStreamTask = Task<ChangesStreamTaskData>;
|
||||
export type CreateUpdateProxyCacheTask = Task<CreateUpdateProxyCacheTaskData>;
|
||||
export type SyncBinaryTask = Task<SyncBinaryTaskData>;
|
||||
|
||||
export class Task<T extends TaskBaseData = TaskBaseData> extends Entity {
|
||||
taskId: string;
|
||||
@@ -134,12 +139,17 @@ export class Task<T extends TaskBaseData = TaskBaseData> extends Entity {
|
||||
this.data.taskWorker = `${HOST_NAME}:${PID}`;
|
||||
}
|
||||
|
||||
private static create<T extends TaskBaseData>(data: EasyData<TaskData<T>, 'taskId'>): Task<T> {
|
||||
private static create<T extends TaskBaseData>(
|
||||
data: EasyData<TaskData<T>, 'taskId'>
|
||||
): Task<T> {
|
||||
const newData = EntityUtil.defaultData(data, 'taskId');
|
||||
return new Task(newData);
|
||||
}
|
||||
|
||||
public static createSyncPackage(fullname: string, options?: SyncPackageTaskOptions): CreateSyncPackageTask {
|
||||
public static createSyncPackage(
|
||||
fullname: string,
|
||||
options?: SyncPackageTaskOptions
|
||||
): CreateSyncPackageTask {
|
||||
const data = {
|
||||
type: TaskType.SyncPackage,
|
||||
state: TaskState.Waiting,
|
||||
@@ -162,7 +172,11 @@ export class Task<T extends TaskBaseData = TaskBaseData> extends Entity {
|
||||
return task;
|
||||
}
|
||||
|
||||
public static createChangesStream(targetName: string, registryId = '', since = ''): ChangesStreamTask {
|
||||
public static createChangesStream(
|
||||
targetName: string,
|
||||
registryId = '',
|
||||
since = ''
|
||||
): ChangesStreamTask {
|
||||
const data = {
|
||||
type: TaskType.ChangesStream,
|
||||
state: TaskState.Waiting,
|
||||
@@ -210,7 +224,10 @@ export class Task<T extends TaskBaseData = TaskBaseData> extends Entity {
|
||||
return task;
|
||||
}
|
||||
|
||||
public static createTriggerHookTask(hookEvent: HookEvent, hookId: string): TriggerHookTask {
|
||||
public static createTriggerHookTask(
|
||||
hookEvent: HookEvent,
|
||||
hookId: string
|
||||
): TriggerHookTask {
|
||||
const data = {
|
||||
type: TaskType.TriggerHook,
|
||||
state: TaskState.Waiting,
|
||||
@@ -230,7 +247,10 @@ export class Task<T extends TaskBaseData = TaskBaseData> extends Entity {
|
||||
return task;
|
||||
}
|
||||
|
||||
public static createSyncBinary(targetName: string, lastData: any): Task {
|
||||
public static createSyncBinary(
|
||||
targetName: string,
|
||||
lastData?: Record<string, unknown>
|
||||
): Task {
|
||||
const data = {
|
||||
type: TaskType.SyncBinary,
|
||||
state: TaskState.Waiting,
|
||||
@@ -249,13 +269,24 @@ export class Task<T extends TaskBaseData = TaskBaseData> extends Entity {
|
||||
return task;
|
||||
}
|
||||
|
||||
public static needMergeWhenWaiting(type: TaskType) {
|
||||
return [ TaskType.SyncBinary, TaskType.SyncPackage ].includes(type);
|
||||
needMergeWhenWaiting(): boolean {
|
||||
// 历史任务补偿时,将 shouldNotMerge 设置为 true,避免合并
|
||||
// 补偿任务单独执行
|
||||
if (this.data.shouldNotMerge === true) {
|
||||
return false;
|
||||
}
|
||||
// 仅合并二进制镜像与 npm 包
|
||||
return [TaskType.SyncBinary, TaskType.SyncPackage].includes(this.type);
|
||||
}
|
||||
|
||||
public static createUpdateProxyCache(targetName: string, options: UpdateProxyCacheTaskOptions):CreateUpdateProxyCacheTask {
|
||||
public static createUpdateProxyCache(
|
||||
targetName: string,
|
||||
options: UpdateProxyCacheTaskOptions
|
||||
): CreateUpdateProxyCacheTask {
|
||||
if (!isPkgManifest(options.fileType)) {
|
||||
throw new InternalServerError('should not update package version manifest.');
|
||||
throw new InternalServerError(
|
||||
'should not update package version manifest.'
|
||||
);
|
||||
}
|
||||
const filePath = `/${PROXY_CACHE_DIR_NAME}/${options.fullname}/${options.fileType}`;
|
||||
const data = {
|
||||
@@ -289,8 +320,8 @@ export class Task<T extends TaskBaseData = TaskBaseData> extends Entity {
|
||||
}
|
||||
}
|
||||
|
||||
export type SyncInfo = {
|
||||
export interface SyncInfo {
|
||||
lastSince: string;
|
||||
taskCount: number;
|
||||
lastPackage?: string;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import dayjs from 'dayjs';
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
|
||||
export enum TokenType {
|
||||
granular = 'granular',
|
||||
@@ -17,7 +18,7 @@ interface BaseTokenData extends EntityData {
|
||||
lastUsedAt?: Date;
|
||||
}
|
||||
|
||||
interface ClassicTokenData extends BaseTokenData{
|
||||
interface ClassicTokenData extends BaseTokenData {
|
||||
isAutomation?: boolean;
|
||||
}
|
||||
interface GranularTokenData extends BaseTokenData {
|
||||
@@ -31,7 +32,9 @@ interface GranularTokenData extends BaseTokenData {
|
||||
|
||||
type TokenData = ClassicTokenData | GranularTokenData;
|
||||
|
||||
export function isGranularToken(data: TokenData | Token): data is GranularTokenData {
|
||||
export function isGranularToken(
|
||||
data: TokenData | Token
|
||||
): data is GranularTokenData {
|
||||
return data.type === TokenType.granular;
|
||||
}
|
||||
|
||||
@@ -51,7 +54,7 @@ export class Token extends Entity {
|
||||
readonly expires?: number;
|
||||
lastUsedAt: Date | null;
|
||||
allowedPackages?: string[];
|
||||
token?: string;
|
||||
token: string;
|
||||
|
||||
constructor(data: TokenData) {
|
||||
super(data);
|
||||
@@ -79,9 +82,10 @@ export class Token extends Entity {
|
||||
static create(data: EasyData<TokenData, 'tokenId'>): Token {
|
||||
const newData = EntityUtil.defaultData(data, 'tokenId');
|
||||
if (isGranularToken(newData) && !newData.expiredAt) {
|
||||
newData.expiredAt = dayjs(newData.createdAt).add(newData.expires, 'days').toDate();
|
||||
newData.expiredAt = dayjs(newData.createdAt)
|
||||
.add(newData.expires, 'days')
|
||||
.toDate();
|
||||
}
|
||||
return new Token(newData);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { cleanUserPrefix } from '../../common/PackageUtil';
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
import { cleanUserPrefix } from '../../common/PackageUtil.ts';
|
||||
|
||||
interface UserData extends EntityData {
|
||||
userId: string;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Entity, EntityData } from './Entity';
|
||||
import { EasyData, EntityUtil } from '../util/EntityUtil';
|
||||
import { Entity, type EntityData } from './Entity.ts';
|
||||
import { EntityUtil, type EasyData } from '../util/EntityUtil.ts';
|
||||
|
||||
interface WebauthnCredentialData extends EntityData {
|
||||
wancId: string;
|
||||
@@ -25,7 +25,9 @@ export class WebauthnCredential extends Entity {
|
||||
this.browserType = data.browserType;
|
||||
}
|
||||
|
||||
static create(data: EasyData<WebauthnCredentialData, 'wancId'>): WebauthnCredential {
|
||||
static create(
|
||||
data: EasyData<WebauthnCredentialData, 'wancId'>
|
||||
): WebauthnCredential {
|
||||
const newData = EntityUtil.defaultData(data, 'wancId');
|
||||
return new WebauthnCredential(newData);
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { Event, Inject } from '@eggjs/tegg';
|
||||
import { EggLogger } from 'egg';
|
||||
import { PACKAGE_VERSION_ADDED } from './index';
|
||||
import { BUG_VERSIONS } from '../../common/constants';
|
||||
import { BugVersionService } from '../service/BugVersionService';
|
||||
import { Event, Inject, Logger } from 'egg';
|
||||
|
||||
import { PACKAGE_VERSION_ADDED } from './index.ts';
|
||||
import { BUG_VERSIONS } from '../../common/constants.ts';
|
||||
import type { BugVersionService } from '../service/BugVersionService.ts';
|
||||
|
||||
@Event(PACKAGE_VERSION_ADDED)
|
||||
export class BugVersionFixHandler {
|
||||
@@ -10,7 +10,7 @@ export class BugVersionFixHandler {
|
||||
private readonly bugVersionService: BugVersionService;
|
||||
|
||||
@Inject()
|
||||
private readonly logger: EggLogger;
|
||||
private readonly logger: Logger;
|
||||
|
||||
async handle(fullname: string) {
|
||||
if (fullname !== BUG_VERSIONS) return;
|
||||
|
||||
@@ -1,18 +1,19 @@
|
||||
import { Event, Inject } from '@eggjs/tegg';
|
||||
import { Event, Inject } from 'egg';
|
||||
|
||||
import {
|
||||
PACKAGE_UNPUBLISHED,
|
||||
PACKAGE_BLOCKED,
|
||||
PACKAGE_UNBLOCKED,
|
||||
PACKAGE_VERSION_ADDED,
|
||||
PACKAGE_VERSION_REMOVED,
|
||||
PACKAGE_TAG_ADDED,
|
||||
PACKAGE_TAG_CHANGED,
|
||||
PACKAGE_TAG_REMOVED,
|
||||
PACKAGE_MAINTAINER_CHANGED,
|
||||
PACKAGE_MAINTAINER_REMOVED,
|
||||
PACKAGE_META_CHANGED,
|
||||
} from './index';
|
||||
import { CacheService } from '../../core/service/CacheService';
|
||||
PACKAGE_TAG_ADDED,
|
||||
PACKAGE_TAG_CHANGED,
|
||||
PACKAGE_TAG_REMOVED,
|
||||
PACKAGE_UNBLOCKED,
|
||||
PACKAGE_UNPUBLISHED,
|
||||
PACKAGE_VERSION_ADDED,
|
||||
PACKAGE_VERSION_REMOVED,
|
||||
} from './index.ts';
|
||||
import type { CacheService } from '../../core/service/CacheService.ts';
|
||||
|
||||
class CacheCleanerEvent {
|
||||
@Inject()
|
||||
|
||||
@@ -1,22 +1,24 @@
|
||||
import { EggAppConfig } from 'egg';
|
||||
import { Event, Inject } from '@eggjs/tegg';
|
||||
|
||||
import { Event, Inject, Config } from 'egg';
|
||||
|
||||
import {
|
||||
PACKAGE_UNPUBLISHED,
|
||||
PACKAGE_VERSION_ADDED,
|
||||
PACKAGE_VERSION_REMOVED,
|
||||
type PackageMetaChange,
|
||||
PACKAGE_MAINTAINER_CHANGED,
|
||||
PACKAGE_MAINTAINER_REMOVED,
|
||||
PACKAGE_META_CHANGED,
|
||||
PACKAGE_TAG_ADDED,
|
||||
PACKAGE_TAG_CHANGED,
|
||||
PACKAGE_TAG_REMOVED,
|
||||
PACKAGE_MAINTAINER_CHANGED,
|
||||
PACKAGE_MAINTAINER_REMOVED,
|
||||
PACKAGE_META_CHANGED, PackageMetaChange,
|
||||
} from './index';
|
||||
import { ChangeRepository } from '../../repository/ChangeRepository';
|
||||
import { Change } from '../entity/Change';
|
||||
import { HookEvent } from '../entity/HookEvent';
|
||||
import { Task } from '../entity/Task';
|
||||
import { User } from '../entity/User';
|
||||
import { TaskService } from '../service/TaskService';
|
||||
PACKAGE_UNPUBLISHED,
|
||||
PACKAGE_VERSION_ADDED,
|
||||
PACKAGE_VERSION_REMOVED,
|
||||
} from './index.ts';
|
||||
import type { ChangeRepository } from '../../repository/ChangeRepository.ts';
|
||||
import { Change } from '../entity/Change.ts';
|
||||
import { HookEvent } from '../entity/HookEvent.ts';
|
||||
import { Task } from '../entity/Task.ts';
|
||||
import type { User } from '../entity/User.ts';
|
||||
import type { TaskService } from '../service/TaskService.ts';
|
||||
|
||||
class ChangesStreamEvent {
|
||||
@Inject()
|
||||
@@ -26,13 +28,17 @@ class ChangesStreamEvent {
|
||||
protected readonly taskService: TaskService;
|
||||
|
||||
@Inject()
|
||||
protected readonly config: EggAppConfig;
|
||||
protected readonly config: Config;
|
||||
|
||||
protected get hookEnable() {
|
||||
return this.config.cnpmcore.hookEnable;
|
||||
}
|
||||
|
||||
protected async addChange(type: string, fullname: string, data: object): Promise<Change> {
|
||||
protected async addChange(
|
||||
type: string,
|
||||
fullname: string,
|
||||
data: object
|
||||
): Promise<Change> {
|
||||
const change = Change.create({
|
||||
type,
|
||||
targetName: fullname,
|
||||
@@ -48,7 +54,9 @@ export class PackageUnpublishedChangesStreamEvent extends ChangesStreamEvent {
|
||||
async handle(fullname: string) {
|
||||
const change = await this.addChange(PACKAGE_UNPUBLISHED, fullname, {});
|
||||
if (this.hookEnable) {
|
||||
const task = Task.createCreateHookTask(HookEvent.createUnpublishEvent(fullname, change.changeId));
|
||||
const task = Task.createCreateHookTask(
|
||||
HookEvent.createUnpublishEvent(fullname, change.changeId)
|
||||
);
|
||||
await this.taskService.createTask(task, true);
|
||||
}
|
||||
}
|
||||
@@ -57,9 +65,13 @@ export class PackageUnpublishedChangesStreamEvent extends ChangesStreamEvent {
|
||||
@Event(PACKAGE_VERSION_ADDED)
|
||||
export class PackageVersionAddedChangesStreamEvent extends ChangesStreamEvent {
|
||||
async handle(fullname: string, version: string, tag?: string) {
|
||||
const change = await this.addChange(PACKAGE_VERSION_ADDED, fullname, { version });
|
||||
const change = await this.addChange(PACKAGE_VERSION_ADDED, fullname, {
|
||||
version,
|
||||
});
|
||||
if (this.hookEnable) {
|
||||
const task = Task.createCreateHookTask(HookEvent.createPublishEvent(fullname, change.changeId, version, tag));
|
||||
const task = Task.createCreateHookTask(
|
||||
HookEvent.createPublishEvent(fullname, change.changeId, version, tag)
|
||||
);
|
||||
await this.taskService.createTask(task, true);
|
||||
}
|
||||
}
|
||||
@@ -68,9 +80,13 @@ export class PackageVersionAddedChangesStreamEvent extends ChangesStreamEvent {
|
||||
@Event(PACKAGE_VERSION_REMOVED)
|
||||
export class PackageVersionRemovedChangesStreamEvent extends ChangesStreamEvent {
|
||||
async handle(fullname: string, version: string, tag?: string) {
|
||||
const change = await this.addChange(PACKAGE_VERSION_REMOVED, fullname, { version });
|
||||
const change = await this.addChange(PACKAGE_VERSION_REMOVED, fullname, {
|
||||
version,
|
||||
});
|
||||
if (this.hookEnable) {
|
||||
const task = Task.createCreateHookTask(HookEvent.createUnpublishEvent(fullname, change.changeId, version, tag));
|
||||
const task = Task.createCreateHookTask(
|
||||
HookEvent.createUnpublishEvent(fullname, change.changeId, version, tag)
|
||||
);
|
||||
await this.taskService.createTask(task, true);
|
||||
}
|
||||
}
|
||||
@@ -81,7 +97,9 @@ export class PackageTagAddedChangesStreamEvent extends ChangesStreamEvent {
|
||||
async handle(fullname: string, tag: string) {
|
||||
const change = await this.addChange(PACKAGE_TAG_ADDED, fullname, { tag });
|
||||
if (this.hookEnable) {
|
||||
const task = Task.createCreateHookTask(HookEvent.createDistTagEvent(fullname, change.changeId, tag));
|
||||
const task = Task.createCreateHookTask(
|
||||
HookEvent.createDistTagEvent(fullname, change.changeId, tag)
|
||||
);
|
||||
await this.taskService.createTask(task, true);
|
||||
}
|
||||
}
|
||||
@@ -92,7 +110,9 @@ export class PackageTagChangedChangesStreamEvent extends ChangesStreamEvent {
|
||||
async handle(fullname: string, tag: string) {
|
||||
const change = await this.addChange(PACKAGE_TAG_CHANGED, fullname, { tag });
|
||||
if (this.hookEnable) {
|
||||
const task = Task.createCreateHookTask(HookEvent.createDistTagEvent(fullname, change.changeId, tag));
|
||||
const task = Task.createCreateHookTask(
|
||||
HookEvent.createDistTagEvent(fullname, change.changeId, tag)
|
||||
);
|
||||
await this.taskService.createTask(task, true);
|
||||
}
|
||||
}
|
||||
@@ -103,7 +123,9 @@ export class PackageTagRemovedChangesStreamEvent extends ChangesStreamEvent {
|
||||
async handle(fullname: string, tag: string) {
|
||||
const change = await this.addChange(PACKAGE_TAG_REMOVED, fullname, { tag });
|
||||
if (this.hookEnable) {
|
||||
const task = Task.createCreateHookTask(HookEvent.createDistTagRmEvent(fullname, change.changeId, tag));
|
||||
const task = Task.createCreateHookTask(
|
||||
HookEvent.createDistTagRmEvent(fullname, change.changeId, tag)
|
||||
);
|
||||
await this.taskService.createTask(task, true);
|
||||
}
|
||||
}
|
||||
@@ -112,11 +134,17 @@ export class PackageTagRemovedChangesStreamEvent extends ChangesStreamEvent {
|
||||
@Event(PACKAGE_MAINTAINER_CHANGED)
|
||||
export class PackageMaintainerChangedChangesStreamEvent extends ChangesStreamEvent {
|
||||
async handle(fullname: string, maintainers: User[]) {
|
||||
const change = await this.addChange(PACKAGE_MAINTAINER_CHANGED, fullname, {});
|
||||
const change = await this.addChange(
|
||||
PACKAGE_MAINTAINER_CHANGED,
|
||||
fullname,
|
||||
{}
|
||||
);
|
||||
// TODO 应该比较差值,而不是全量推送
|
||||
if (this.hookEnable) {
|
||||
for (const maintainer of maintainers) {
|
||||
const task = Task.createCreateHookTask(HookEvent.createOwnerEvent(fullname, change.changeId, maintainer.name));
|
||||
const task = Task.createCreateHookTask(
|
||||
HookEvent.createOwnerEvent(fullname, change.changeId, maintainer.name)
|
||||
);
|
||||
await this.taskService.createTask(task, true);
|
||||
}
|
||||
}
|
||||
@@ -126,9 +154,13 @@ export class PackageMaintainerChangedChangesStreamEvent extends ChangesStreamEve
|
||||
@Event(PACKAGE_MAINTAINER_REMOVED)
|
||||
export class PackageMaintainerRemovedChangesStreamEvent extends ChangesStreamEvent {
|
||||
async handle(fullname: string, maintainer: string) {
|
||||
const change = await this.addChange(PACKAGE_MAINTAINER_REMOVED, fullname, { maintainer });
|
||||
const change = await this.addChange(PACKAGE_MAINTAINER_REMOVED, fullname, {
|
||||
maintainer,
|
||||
});
|
||||
if (this.hookEnable) {
|
||||
const task = Task.createCreateHookTask(HookEvent.createOwnerRmEvent(fullname, change.changeId, maintainer));
|
||||
const task = Task.createCreateHookTask(
|
||||
HookEvent.createOwnerRmEvent(fullname, change.changeId, maintainer)
|
||||
);
|
||||
await this.taskService.createTask(task, true);
|
||||
}
|
||||
}
|
||||
@@ -137,11 +169,19 @@ export class PackageMaintainerRemovedChangesStreamEvent extends ChangesStreamEve
|
||||
@Event(PACKAGE_META_CHANGED)
|
||||
export class PackageMetaChangedChangesStreamEvent extends ChangesStreamEvent {
|
||||
async handle(fullname: string, meta: PackageMetaChange) {
|
||||
const change = await this.addChange(PACKAGE_META_CHANGED, fullname, { ...meta });
|
||||
const change = await this.addChange(PACKAGE_META_CHANGED, fullname, {
|
||||
...meta,
|
||||
});
|
||||
const { deprecateds } = meta;
|
||||
if (this.hookEnable) {
|
||||
for (const deprecated of deprecateds || []) {
|
||||
const task = Task.createCreateHookTask(HookEvent.createDeprecatedEvent(fullname, change.changeId, deprecated.version));
|
||||
const task = Task.createCreateHookTask(
|
||||
HookEvent.createDeprecatedEvent(
|
||||
fullname,
|
||||
change.changeId,
|
||||
deprecated.version
|
||||
)
|
||||
);
|
||||
await this.taskService.createTask(task, true);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,30 +1,38 @@
|
||||
import { Event, Inject } from '@eggjs/tegg';
|
||||
import {
|
||||
EggAppConfig,
|
||||
} from 'egg';
|
||||
import { PACKAGE_VERSION_ADDED } from './index';
|
||||
import { getScopeAndName } from '../../common/PackageUtil';
|
||||
import { PackageVersionManifest as PackageVersionManifestEntity } from '../entity/PackageVersionManifest';
|
||||
import { PackageRepository } from '../../repository/PackageRepository';
|
||||
import { DistRepository } from '../../repository/DistRepository';
|
||||
import { Config, Event, Inject } from 'egg';
|
||||
|
||||
import { PACKAGE_VERSION_ADDED } from './index.ts';
|
||||
import { getScopeAndName } from '../../common/PackageUtil.ts';
|
||||
import { PackageVersionManifest as PackageVersionManifestEntity } from '../entity/PackageVersionManifest.ts';
|
||||
import type { PackageRepository } from '../../repository/PackageRepository.ts';
|
||||
import type { DistRepository } from '../../repository/DistRepository.ts';
|
||||
|
||||
class StoreManifestEvent {
|
||||
@Inject()
|
||||
protected readonly config: EggAppConfig;
|
||||
protected readonly config: Config;
|
||||
@Inject()
|
||||
private readonly packageRepository: PackageRepository;
|
||||
@Inject()
|
||||
private readonly distRepository: DistRepository;
|
||||
|
||||
protected async savePackageVersionManifest(fullname: string, version: string) {
|
||||
if (!this.config.cnpmcore.enableStoreFullPackageVersionManifestsToDatabase) return;
|
||||
protected async savePackageVersionManifest(
|
||||
fullname: string,
|
||||
version: string
|
||||
) {
|
||||
if (!this.config.cnpmcore.enableStoreFullPackageVersionManifestsToDatabase)
|
||||
return;
|
||||
|
||||
const [ scope, name ] = getScopeAndName(fullname);
|
||||
const [scope, name] = getScopeAndName(fullname);
|
||||
const packageId = await this.packageRepository.findPackageId(scope, name);
|
||||
if (!packageId) return;
|
||||
const packageVersion = await this.packageRepository.findPackageVersion(packageId, version);
|
||||
const packageVersion = await this.packageRepository.findPackageVersion(
|
||||
packageId,
|
||||
version
|
||||
);
|
||||
if (!packageVersion) return;
|
||||
const manifest = await this.distRepository.findPackageVersionManifest(packageId, version);
|
||||
const manifest = await this.distRepository.findPackageVersionManifest(
|
||||
packageId,
|
||||
version
|
||||
);
|
||||
if (!manifest) return;
|
||||
const entity = PackageVersionManifestEntity.create({
|
||||
packageId,
|
||||
|
||||
@@ -1,29 +1,26 @@
|
||||
// TODO sync event
|
||||
/* eslint-disable @typescript-eslint/no-unused-vars */
|
||||
import { EggAppConfig } from 'egg';
|
||||
import { Event, Inject } from '@eggjs/tegg';
|
||||
import { Config, Event, Inject } from 'egg';
|
||||
import {
|
||||
PACKAGE_UNPUBLISHED,
|
||||
PACKAGE_VERSION_ADDED,
|
||||
PACKAGE_VERSION_REMOVED,
|
||||
PACKAGE_TAG_ADDED,
|
||||
PACKAGE_TAG_CHANGED,
|
||||
PACKAGE_TAG_REMOVED,
|
||||
PACKAGE_BLOCKED,
|
||||
PACKAGE_MAINTAINER_CHANGED,
|
||||
PACKAGE_MAINTAINER_REMOVED,
|
||||
PACKAGE_META_CHANGED,
|
||||
PACKAGE_BLOCKED,
|
||||
PACKAGE_TAG_ADDED,
|
||||
PACKAGE_TAG_CHANGED,
|
||||
PACKAGE_TAG_REMOVED,
|
||||
PACKAGE_UNBLOCKED,
|
||||
} from './index';
|
||||
|
||||
import { PackageSearchService } from '../service/PackageSearchService';
|
||||
PACKAGE_UNPUBLISHED,
|
||||
PACKAGE_VERSION_ADDED,
|
||||
PACKAGE_VERSION_REMOVED,
|
||||
} from './index.ts';
|
||||
import type { PackageSearchService } from '../service/PackageSearchService.ts';
|
||||
|
||||
class SyncESPackage {
|
||||
@Inject()
|
||||
protected readonly packageSearchService: PackageSearchService;
|
||||
|
||||
@Inject()
|
||||
protected readonly config: EggAppConfig;
|
||||
protected readonly config: Config;
|
||||
|
||||
protected async syncPackage(fullname: string) {
|
||||
if (!this.config.cnpmcore.enableElasticsearch) return;
|
||||
|
||||
@@ -1,18 +1,20 @@
|
||||
import { Event, Inject } from '@eggjs/tegg';
|
||||
import { Event, Inject, Config, Logger } from 'egg';
|
||||
import { ForbiddenError } from 'egg/errors';
|
||||
|
||||
import {
|
||||
EggAppConfig, EggLogger,
|
||||
} from 'egg';
|
||||
import { ForbiddenError } from 'egg-errors';
|
||||
import { PACKAGE_VERSION_ADDED, PACKAGE_TAG_ADDED, PACKAGE_TAG_CHANGED } from './index';
|
||||
import { getScopeAndName } from '../../common/PackageUtil';
|
||||
import { PackageManagerService } from '../service/PackageManagerService';
|
||||
import { PackageVersionFileService } from '../service/PackageVersionFileService';
|
||||
PACKAGE_TAG_ADDED,
|
||||
PACKAGE_TAG_CHANGED,
|
||||
PACKAGE_VERSION_ADDED,
|
||||
} from './index.ts';
|
||||
import { getScopeAndName } from '../../common/PackageUtil.ts';
|
||||
import type { PackageManagerService } from '../service/PackageManagerService.ts';
|
||||
import type { PackageVersionFileService } from '../service/PackageVersionFileService.ts';
|
||||
|
||||
class SyncPackageVersionFileEvent {
|
||||
@Inject()
|
||||
protected readonly config: EggAppConfig;
|
||||
protected readonly config: Config;
|
||||
@Inject()
|
||||
protected readonly logger: EggLogger;
|
||||
protected readonly logger: Logger;
|
||||
@Inject()
|
||||
private readonly packageManagerService: PackageManagerService;
|
||||
@Inject()
|
||||
@@ -23,17 +25,28 @@ class SyncPackageVersionFileEvent {
|
||||
if (!this.config.cnpmcore.enableUnpkg) return;
|
||||
if (!this.config.cnpmcore.enableSyncUnpkgFiles) return;
|
||||
// ignore sync on unittest
|
||||
if (this.config.env === 'unittest' && fullname !== '@cnpm/unittest-unpkg-demo') return;
|
||||
const [ scope, name ] = getScopeAndName(fullname);
|
||||
const { packageVersion } = await this.packageManagerService.showPackageVersionByVersionOrTag(
|
||||
scope, name, version);
|
||||
if (
|
||||
this.config.env === 'unittest' &&
|
||||
fullname !== '@cnpm/unittest-unpkg-demo'
|
||||
)
|
||||
return;
|
||||
const [scope, name] = getScopeAndName(fullname);
|
||||
const { packageVersion } =
|
||||
await this.packageManagerService.showPackageVersionByVersionOrTag(
|
||||
scope,
|
||||
name,
|
||||
version
|
||||
);
|
||||
if (!packageVersion) return;
|
||||
try {
|
||||
await this.packageVersionFileService.syncPackageVersionFiles(packageVersion);
|
||||
await this.packageVersionFileService.syncPackageVersionFiles(
|
||||
packageVersion
|
||||
);
|
||||
} catch (err) {
|
||||
if (err instanceof ForbiddenError) {
|
||||
this.logger.info('[SyncPackageVersionFileEvent.syncPackageVersionFile] ignore sync files, cause: %s',
|
||||
err.message,
|
||||
this.logger.info(
|
||||
'[SyncPackageVersionFileEvent.syncPackageVersionFile] ignore sync files, cause: %s',
|
||||
err.message
|
||||
);
|
||||
return;
|
||||
}
|
||||
@@ -42,9 +55,13 @@ class SyncPackageVersionFileEvent {
|
||||
}
|
||||
|
||||
protected async syncPackageReadmeToLatestVersion(fullname: string) {
|
||||
const [ scope, name ] = getScopeAndName(fullname);
|
||||
const { pkg, packageVersion } = await this.packageManagerService.showPackageVersionByVersionOrTag(
|
||||
scope, name, 'latest');
|
||||
const [scope, name] = getScopeAndName(fullname);
|
||||
const { pkg, packageVersion } =
|
||||
await this.packageManagerService.showPackageVersionByVersionOrTag(
|
||||
scope,
|
||||
name,
|
||||
'latest'
|
||||
);
|
||||
if (!pkg || !packageVersion) return;
|
||||
await this.packageVersionFileService.syncPackageReadme(pkg, packageVersion);
|
||||
}
|
||||
|
||||
23
app/core/event/TotalHandler.ts
Normal file
23
app/core/event/TotalHandler.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { Event, Inject } from 'egg';
|
||||
|
||||
import { PACKAGE_ADDED, PACKAGE_VERSION_ADDED } from './index.ts';
|
||||
import type { TotalRepository } from '../../repository/TotalRepository.ts';
|
||||
|
||||
class TotalHandlerEvent {
|
||||
@Inject()
|
||||
protected readonly totalRepository: TotalRepository;
|
||||
}
|
||||
|
||||
@Event(PACKAGE_ADDED)
|
||||
export class PackageAddedTotalHandlerEvent extends TotalHandlerEvent {
|
||||
async handle() {
|
||||
await this.totalRepository.incrementPackageCount();
|
||||
}
|
||||
}
|
||||
|
||||
@Event(PACKAGE_VERSION_ADDED)
|
||||
export class PackageVersionAddedTotalHandlerEvent extends TotalHandlerEvent {
|
||||
async handle() {
|
||||
await this.totalRepository.incrementPackageVersionCount();
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import '@eggjs/tegg';
|
||||
import { User } from '../entity/User';
|
||||
import type { User } from '../entity/User.ts';
|
||||
|
||||
export const PACKAGE_ADDED = 'PACKAGE_ADDED';
|
||||
export const PACKAGE_UNPUBLISHED = 'PACKAGE_UNPUBLISHED';
|
||||
export const PACKAGE_BLOCKED = 'PACKAGE_BLOCKED';
|
||||
export const PACKAGE_UNBLOCKED = 'PACKAGE_UNBLOCKED';
|
||||
@@ -15,26 +15,43 @@ export const PACKAGE_META_CHANGED = 'PACKAGE_META_CHANGED';
|
||||
|
||||
export interface PackageDeprecated {
|
||||
version: string;
|
||||
deprecated: string;
|
||||
deprecated?: string;
|
||||
}
|
||||
|
||||
export interface PackageMetaChange {
|
||||
deprecateds?: Array<PackageDeprecated>;
|
||||
deprecateds?: PackageDeprecated[];
|
||||
}
|
||||
|
||||
|
||||
declare module '@eggjs/tegg' {
|
||||
declare module 'egg' {
|
||||
interface Events {
|
||||
[PACKAGE_ADDED]: (fullname: string) => Promise<void>;
|
||||
[PACKAGE_UNPUBLISHED]: (fullname: string) => Promise<void>;
|
||||
[PACKAGE_BLOCKED]: (fullname: string) => Promise<void>;
|
||||
[PACKAGE_UNBLOCKED]: (fullname: string) => Promise<void>;
|
||||
[PACKAGE_VERSION_ADDED]: (fullname: string, version: string, tag?: string) => Promise<void>;
|
||||
[PACKAGE_VERSION_REMOVED]: (fullname: string, version: string, tag?: string) => Promise<void>;
|
||||
[PACKAGE_VERSION_ADDED]: (
|
||||
fullname: string,
|
||||
version: string,
|
||||
tag?: string
|
||||
) => Promise<void>;
|
||||
[PACKAGE_VERSION_REMOVED]: (
|
||||
fullname: string,
|
||||
version: string,
|
||||
tag?: string
|
||||
) => Promise<void>;
|
||||
[PACKAGE_TAG_ADDED]: (fullname: string, tag: string) => Promise<void>;
|
||||
[PACKAGE_TAG_CHANGED]: (fullname: string, tag: string) => Promise<void>;
|
||||
[PACKAGE_TAG_REMOVED]: (fullname: string, tag: string) => Promise<void>;
|
||||
[PACKAGE_MAINTAINER_CHANGED]: (fullname: string, maintainers: User[]) => Promise<void>;
|
||||
[PACKAGE_MAINTAINER_REMOVED]: (fullname: string, maintainer: string) => Promise<void>;
|
||||
[PACKAGE_META_CHANGED]: (fullname: string, meta: PackageMetaChange) => Promise<void>;
|
||||
[PACKAGE_MAINTAINER_CHANGED]: (
|
||||
fullname: string,
|
||||
maintainers: User[]
|
||||
) => Promise<void>;
|
||||
[PACKAGE_MAINTAINER_REMOVED]: (
|
||||
fullname: string,
|
||||
maintainer: string
|
||||
) => Promise<void>;
|
||||
[PACKAGE_META_CHANGED]: (
|
||||
fullname: string,
|
||||
meta: PackageMetaChange
|
||||
) => Promise<void>;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,5 +2,6 @@
|
||||
"name": "cnpmcore-core",
|
||||
"eggModule": {
|
||||
"name": "cnpmcoreCore"
|
||||
}
|
||||
},
|
||||
"type": "module"
|
||||
}
|
||||
|
||||
@@ -1,27 +1,33 @@
|
||||
import { rm } from 'fs/promises';
|
||||
import fs from 'node:fs/promises';
|
||||
|
||||
import {
|
||||
AccessLevel,
|
||||
SingletonProto,
|
||||
Inject,
|
||||
EggObjectFactory,
|
||||
} from '@eggjs/tegg';
|
||||
import {
|
||||
EggHttpClient,
|
||||
SingletonProto,
|
||||
type EggObjectFactory,
|
||||
HttpClient,
|
||||
} from 'egg';
|
||||
import fs from 'fs/promises';
|
||||
import { sortBy } from 'lodash';
|
||||
import binaries, { BinaryName, CategoryName } from '../../../config/binaries';
|
||||
import { BinaryRepository } from '../../repository/BinaryRepository';
|
||||
import { Task } from '../entity/Task';
|
||||
import { Binary } from '../entity/Binary';
|
||||
import { TaskService } from './TaskService';
|
||||
import { NFSAdapter } from '../../common/adapter/NFSAdapter';
|
||||
import { downloadToTempfile } from '../../common/FileUtil';
|
||||
import { isTimeoutError } from '../../common/ErrorUtil';
|
||||
import { AbstractBinary, BinaryItem } from '../../common/adapter/binary/AbstractBinary';
|
||||
import { AbstractService } from '../../common/AbstractService';
|
||||
import { BinaryType } from '../../common/enum/Binary';
|
||||
import { TaskType, TaskState } from '../../common/enum/Task';
|
||||
import { sortBy } from 'lodash-es';
|
||||
|
||||
import binaries, {
|
||||
type BinaryName,
|
||||
type CategoryName,
|
||||
} from '../../../config/binaries.ts';
|
||||
import type { BinaryRepository } from '../../repository/BinaryRepository.ts';
|
||||
import { Task, type SyncBinaryTask } from '../entity/Task.ts';
|
||||
import { Binary } from '../entity/Binary.ts';
|
||||
import type { TaskService } from './TaskService.ts';
|
||||
import type { NFSAdapter } from '../../common/adapter/NFSAdapter.ts';
|
||||
import { downloadToTempfile } from '../../common/FileUtil.ts';
|
||||
import { isTimeoutError } from '../../common/ErrorUtil.ts';
|
||||
import {
|
||||
AbstractBinary,
|
||||
type BinaryItem,
|
||||
} from '../../common/adapter/binary/AbstractBinary.ts';
|
||||
import { AbstractService } from '../../common/AbstractService.ts';
|
||||
import { BinaryType } from '../../common/enum/Binary.ts';
|
||||
import { TaskState, TaskType } from '../../common/enum/Task.ts';
|
||||
import { platforms } from '../../common/adapter/binary/PuppeteerBinary.ts';
|
||||
|
||||
function isoNow() {
|
||||
return new Date().toISOString();
|
||||
@@ -36,7 +42,7 @@ export class BinarySyncerService extends AbstractService {
|
||||
@Inject()
|
||||
private readonly taskService: TaskService;
|
||||
@Inject()
|
||||
private readonly httpclient: EggHttpClient;
|
||||
private readonly httpClient: HttpClient;
|
||||
@Inject()
|
||||
private readonly nfsAdapter: NFSAdapter;
|
||||
@Inject()
|
||||
@@ -45,40 +51,49 @@ export class BinarySyncerService extends AbstractService {
|
||||
// canvas/v2.6.1/canvas-v2.6.1-node-v57-linux-glibc-x64.tar.gz
|
||||
// -> node-canvas-prebuilt/v2.6.1/node-canvas-prebuilt-v2.6.1-node-v57-linux-glibc-x64.tar.gz
|
||||
// canvas 历史版本的 targetName 可能是 category 需要兼容
|
||||
public async findBinary(targetName: BinaryName | CategoryName, parent: string, name: string) {
|
||||
public async findBinary(
|
||||
targetName: BinaryName | CategoryName,
|
||||
parent: string,
|
||||
name: string
|
||||
) {
|
||||
return await this.binaryRepository.findBinary(targetName, parent, name);
|
||||
}
|
||||
|
||||
public async listDirBinaries(binary: Binary) {
|
||||
return await this.binaryRepository.listBinaries(binary.category, `${binary.parent}${binary.name}`);
|
||||
public async listDirBinaries(
|
||||
binary: Binary,
|
||||
options?: {
|
||||
limit: number;
|
||||
since: string;
|
||||
}
|
||||
) {
|
||||
return await this.binaryRepository.listBinaries(
|
||||
binary.category,
|
||||
`${binary.parent}${binary.name}`,
|
||||
options
|
||||
);
|
||||
}
|
||||
|
||||
public async listRootBinaries(binaryName: BinaryName) {
|
||||
// 通常 binaryName 和 category 是一样的,但是有些特殊的 binaryName 会有多个 category,比如 canvas
|
||||
// 所以查询 canvas 的时候,需要将 binaryName 和 category 的数据都查出来
|
||||
const {
|
||||
category,
|
||||
} = binaries[binaryName];
|
||||
const reqs = [
|
||||
this.binaryRepository.listBinaries(binaryName, '/'),
|
||||
];
|
||||
const { category } = binaries[binaryName];
|
||||
const reqs = [this.binaryRepository.listBinaries(binaryName, '/')];
|
||||
if (category && category !== binaryName) {
|
||||
reqs.push(this.binaryRepository.listBinaries(category, '/'));
|
||||
}
|
||||
|
||||
const [
|
||||
rootBinary,
|
||||
categoryBinary,
|
||||
] = await Promise.all(reqs);
|
||||
const [rootBinary, categoryBinary] = await Promise.all(reqs);
|
||||
|
||||
const versions = rootBinary.map(b => b.name);
|
||||
categoryBinary?.forEach(b => {
|
||||
const version = b.name;
|
||||
// 只将没有的版本添加进去
|
||||
if (!versions.includes(version)) {
|
||||
rootBinary.push(b);
|
||||
const versions = new Set(rootBinary.map(b => b.name));
|
||||
if (categoryBinary) {
|
||||
for (const b of categoryBinary) {
|
||||
const version = b.name;
|
||||
// 只将没有的版本添加进去
|
||||
if (!versions.has(version)) {
|
||||
rootBinary.push(b);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return rootBinary;
|
||||
}
|
||||
@@ -87,66 +102,131 @@ export class BinarySyncerService extends AbstractService {
|
||||
return await this.nfsAdapter.getDownloadUrlOrStream(binary.storePath);
|
||||
}
|
||||
|
||||
public async createTask(binaryName: BinaryName, lastData?: any) {
|
||||
public async createTask(
|
||||
binaryName: BinaryName,
|
||||
lastData?: Record<string, unknown>
|
||||
) {
|
||||
// chromium-browser-snapshots 产物极大,完整遍历 s3 bucket 耗时会太长
|
||||
// 必须从上次同步的 revision 之后开始遍历
|
||||
// 如果需要补偿数据,可以
|
||||
if (binaryName === 'chromium-browser-snapshots') {
|
||||
lastData = lastData || {};
|
||||
for (const platform of platforms) {
|
||||
if (lastData[platform]) continue;
|
||||
const binaryDir = await this.binaryRepository.findLatestBinaryDir(
|
||||
'chromium-browser-snapshots',
|
||||
`/${platform}/`
|
||||
);
|
||||
if (binaryDir) {
|
||||
lastData[platform] = binaryDir.name.slice(0, -1);
|
||||
}
|
||||
}
|
||||
const latestBinary = await this.binaryRepository.findLatestBinary(
|
||||
'chromium-browser-snapshots'
|
||||
);
|
||||
if (latestBinary && !lastData.lastSyncTime) {
|
||||
lastData.lastSyncTime = latestBinary.date;
|
||||
}
|
||||
}
|
||||
try {
|
||||
return await this.taskService.createTask(Task.createSyncBinary(binaryName, lastData), false);
|
||||
return await this.taskService.createTask(
|
||||
Task.createSyncBinary(binaryName, lastData),
|
||||
false
|
||||
);
|
||||
} catch (e) {
|
||||
this.logger.error('[BinarySyncerService.createTask] binaryName: %s, error: %s', binaryName, e);
|
||||
this.logger.error(
|
||||
'[BinarySyncerService.createTask] binaryName: %s, error: %s',
|
||||
binaryName,
|
||||
e
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public async findTask(taskId: string) {
|
||||
return await this.taskService.findTask(taskId);
|
||||
public async findTask(taskId: string): Promise<SyncBinaryTask | null> {
|
||||
return (await this.taskService.findTask(taskId)) as SyncBinaryTask;
|
||||
}
|
||||
|
||||
public async findTaskLog(task: Task) {
|
||||
public async findTaskLog(task: SyncBinaryTask) {
|
||||
return await this.taskService.findTaskLog(task);
|
||||
}
|
||||
|
||||
public async findExecuteTask() {
|
||||
return await this.taskService.findExecuteTask(TaskType.SyncBinary);
|
||||
public async findExecuteTask(): Promise<SyncBinaryTask | null> {
|
||||
return (await this.taskService.findExecuteTask(
|
||||
TaskType.SyncBinary
|
||||
)) as SyncBinaryTask;
|
||||
}
|
||||
|
||||
public async executeTask(task: Task) {
|
||||
public async executeTask(task: SyncBinaryTask) {
|
||||
const binaryName = task.targetName as BinaryName;
|
||||
const binaryAdapter = await this.getBinaryAdapter(binaryName);
|
||||
const logUrl = `${this.config.cnpmcore.registry}/-/binary/${binaryName}/syncs/${task.taskId}/log`;
|
||||
let logs: string[] = [];
|
||||
logs.push(`[${isoNow()}] 🚧🚧🚧🚧🚧 Start sync binary "${binaryName}" 🚧🚧🚧🚧🚧`);
|
||||
logs.push(
|
||||
`[${isoNow()}] 🚧🚧🚧🚧🚧 Start sync binary "${binaryName}" 🚧🚧🚧🚧🚧`
|
||||
);
|
||||
if (!binaryAdapter) {
|
||||
task.error = 'unknow binaryName';
|
||||
logs.push(`[${isoNow()}] ❌ Synced "${binaryName}" fail, ${task.error}, log: ${logUrl}`);
|
||||
logs.push(
|
||||
`[${isoNow()}] ❌ Synced "${binaryName}" fail, ${task.error}, log: ${logUrl}`
|
||||
);
|
||||
logs.push(`[${isoNow()}] ❌❌❌❌❌ "${binaryName}" ❌❌❌❌❌`);
|
||||
this.logger.error('[BinarySyncerService.executeTask:fail] taskId: %s, targetName: %s, %s',
|
||||
task.taskId, task.targetName, task.error);
|
||||
this.logger.error(
|
||||
'[BinarySyncerService.executeTask:fail] taskId: %s, targetName: %s, %s',
|
||||
task.taskId,
|
||||
task.targetName,
|
||||
task.error
|
||||
);
|
||||
await this.taskService.finishTask(task, TaskState.Fail, logs.join('\n'));
|
||||
return;
|
||||
}
|
||||
|
||||
await this.taskService.appendTaskLog(task, logs.join('\n'));
|
||||
logs = [];
|
||||
this.logger.info('[BinarySyncerService.executeTask:start] taskId: %s, targetName: %s, log: %s',
|
||||
task.taskId, task.targetName, logUrl);
|
||||
this.logger.info(
|
||||
'[BinarySyncerService.executeTask:start] taskId: %s, targetName: %s, log: %s',
|
||||
task.taskId,
|
||||
task.targetName,
|
||||
logUrl
|
||||
);
|
||||
try {
|
||||
const [ hasDownloadError ] = await this.syncDir(binaryAdapter, task, '/');
|
||||
const [hasDownloadError] = await this.syncDir(binaryAdapter, task, '/');
|
||||
logs.push(`[${isoNow()}] 🟢 log: ${logUrl}`);
|
||||
logs.push(`[${isoNow()}] 🟢🟢🟢🟢🟢 "${binaryName}" 🟢🟢🟢🟢🟢`);
|
||||
await this.taskService.finishTask(task, TaskState.Success, logs.join('\n'));
|
||||
await this.taskService.finishTask(
|
||||
task,
|
||||
TaskState.Success,
|
||||
logs.join('\n')
|
||||
);
|
||||
// 确保没有下载异常才算 success
|
||||
await binaryAdapter.finishFetch(!hasDownloadError, binaryName);
|
||||
this.logger.info('[BinarySyncerService.executeTask:success] taskId: %s, targetName: %s, log: %s, hasDownloadError: %s',
|
||||
task.taskId, task.targetName, logUrl, hasDownloadError);
|
||||
} catch (err: any) {
|
||||
this.logger.info(
|
||||
'[BinarySyncerService.executeTask:success] taskId: %s, targetName: %s, log: %s, hasDownloadError: %s',
|
||||
task.taskId,
|
||||
task.targetName,
|
||||
logUrl,
|
||||
hasDownloadError
|
||||
);
|
||||
} catch (err) {
|
||||
task.error = `${err.name}: ${err.message}`;
|
||||
logs.push(`[${isoNow()}] ❌ Synced "${binaryName}" fail, ${task.error}, log: ${logUrl}`);
|
||||
logs.push(
|
||||
`[${isoNow()}] ❌ Synced "${binaryName}" fail, ${task.error}, log: ${logUrl}`
|
||||
);
|
||||
logs.push(`[${isoNow()}] ❌❌❌❌❌ "${binaryName}" ❌❌❌❌❌`);
|
||||
if (isTimeoutError(err)) {
|
||||
this.logger.warn('[BinarySyncerService.executeTask:fail] taskId: %s, targetName: %s, %s',
|
||||
task.taskId, task.targetName, task.error);
|
||||
this.logger.warn(
|
||||
'[BinarySyncerService.executeTask:fail] taskId: %s, targetName: %s, %s',
|
||||
task.taskId,
|
||||
task.targetName,
|
||||
task.error
|
||||
);
|
||||
this.logger.warn(err);
|
||||
} else {
|
||||
this.logger.error('[BinarySyncerService.executeTask:fail] taskId: %s, targetName: %s, %s',
|
||||
task.taskId, task.targetName, task.error);
|
||||
this.logger.error(
|
||||
'[BinarySyncerService.executeTask:fail] taskId: %s, targetName: %s, %s',
|
||||
task.taskId,
|
||||
task.targetName,
|
||||
task.error
|
||||
);
|
||||
this.logger.error(err);
|
||||
}
|
||||
await binaryAdapter.finishFetch(false, binaryName);
|
||||
@@ -154,62 +234,102 @@ export class BinarySyncerService extends AbstractService {
|
||||
}
|
||||
}
|
||||
|
||||
private async syncDir(binaryAdapter: AbstractBinary, task: Task, dir: string, parentIndex = '', latestVersionParent = '/') {
|
||||
private async syncDir(
|
||||
binaryAdapter: AbstractBinary,
|
||||
task: SyncBinaryTask,
|
||||
dir: string,
|
||||
parentIndex = '',
|
||||
latestVersionParent = '/'
|
||||
) {
|
||||
const binaryName = task.targetName as BinaryName;
|
||||
const result = await binaryAdapter.fetch(dir, binaryName);
|
||||
const result = await binaryAdapter.fetch(dir, binaryName, task.data);
|
||||
let hasDownloadError = false;
|
||||
let hasItems = false;
|
||||
if (result && result.items.length > 0) {
|
||||
hasItems = true;
|
||||
let logs: string[] = [];
|
||||
const { newItems, latestVersionDir } = await this.diff(binaryName, dir, result.items, latestVersionParent);
|
||||
logs.push(`[${isoNow()}][${dir}] 🚧 Syncing diff: ${result.items.length} => ${newItems.length}, Binary class: ${binaryAdapter.constructor.name}`);
|
||||
const { newItems, latestVersionDir } = await this.diff(
|
||||
binaryName,
|
||||
dir,
|
||||
result.items,
|
||||
latestVersionParent
|
||||
);
|
||||
logs.push(
|
||||
`[${isoNow()}][${dir}] 🚧 Syncing diff: ${result.items.length} => ${newItems.length}, Binary class: ${binaryAdapter.constructor.name}`
|
||||
);
|
||||
// re-check latest version
|
||||
for (const [ index, { item, reason }] of newItems.entries()) {
|
||||
for (const [index, { item, reason }] of newItems.entries()) {
|
||||
if (item.isDir) {
|
||||
logs.push(`[${isoNow()}][${dir}] 🚧 [${parentIndex}${index}] Start sync dir ${JSON.stringify(item)}, reason: ${reason}`);
|
||||
logs.push(
|
||||
`[${isoNow()}][${dir}] 🚧 [${parentIndex}${index}] Start sync dir ${JSON.stringify(item)}, reason: ${reason}`
|
||||
);
|
||||
await this.taskService.appendTaskLog(task, logs.join('\n'));
|
||||
logs = [];
|
||||
const [ hasError, hasSubItems ] = await this.syncDir(binaryAdapter, task, `${dir}${item.name}`, `${parentIndex}${index}.`, latestVersionDir);
|
||||
const [hasError, hasSubItems] = await this.syncDir(
|
||||
binaryAdapter,
|
||||
task,
|
||||
`${dir}${item.name}`,
|
||||
`${parentIndex}${index}.`,
|
||||
latestVersionDir
|
||||
);
|
||||
if (hasError) {
|
||||
hasDownloadError = true;
|
||||
} else {
|
||||
} else if (hasSubItems) {
|
||||
// if any file download error, let dir sync again next time
|
||||
// if empty dir, don't save it
|
||||
if (hasSubItems) {
|
||||
await this.saveBinaryItem(item);
|
||||
}
|
||||
await this.saveBinaryItem(item);
|
||||
}
|
||||
} else {
|
||||
// download to nfs
|
||||
logs.push(`[${isoNow()}][${dir}] 🚧 [${parentIndex}${index}] Downloading ${JSON.stringify(item)}, reason: ${reason}`);
|
||||
logs.push(
|
||||
`[${isoNow()}][${dir}] 🚧 [${parentIndex}${index}] Downloading ${JSON.stringify(item)}, reason: ${reason}`
|
||||
);
|
||||
// skip exists binary file
|
||||
const existsBinary = await this.binaryRepository.findBinary(item.category, item.parent, item.name);
|
||||
const existsBinary = await this.binaryRepository.findBinary(
|
||||
item.category,
|
||||
item.parent,
|
||||
item.name
|
||||
);
|
||||
if (existsBinary && existsBinary.date === item.date) {
|
||||
logs.push(`[${isoNow()}][${dir}] 🟢 [${parentIndex}${index}] binary file exists, skip download, binaryId: ${existsBinary.binaryId}`);
|
||||
this.logger.info('[BinarySyncerService.syncDir:skipDownload] binaryId: %s exists, storePath: %s',
|
||||
existsBinary.binaryId, existsBinary.storePath);
|
||||
logs.push(
|
||||
`[${isoNow()}][${dir}] 🟢 [${parentIndex}${index}] binary file exists, skip download, binaryId: ${existsBinary.binaryId}`
|
||||
);
|
||||
this.logger.info(
|
||||
'[BinarySyncerService.syncDir:skipDownload] binaryId: %s exists, storePath: %s',
|
||||
existsBinary.binaryId,
|
||||
existsBinary.storePath
|
||||
);
|
||||
continue;
|
||||
}
|
||||
await this.taskService.appendTaskLog(task, logs.join('\n'));
|
||||
logs = [];
|
||||
let localFile = '';
|
||||
try {
|
||||
const { tmpfile, headers, timing } =
|
||||
await downloadToTempfile(
|
||||
this.httpclient, this.config.dataDir, item.sourceUrl!, { ignoreDownloadStatuses: item.ignoreDownloadStatuses });
|
||||
const { tmpfile, headers, timing } = await downloadToTempfile(
|
||||
this.httpClient,
|
||||
this.config.dataDir,
|
||||
item.sourceUrl,
|
||||
{ ignoreDownloadStatuses: item.ignoreDownloadStatuses }
|
||||
);
|
||||
const log = `[${isoNow()}][${dir}] 🟢 [${parentIndex}${index}] HTTP content-length: ${headers['content-length']}, timing: ${JSON.stringify(timing)}, ${item.sourceUrl} => ${tmpfile}`;
|
||||
logs.push(log);
|
||||
this.logger.info('[BinarySyncerService.syncDir:downloadToTempfile] %s', log);
|
||||
this.logger.info(
|
||||
'[BinarySyncerService.syncDir:downloadToTempfile] %s',
|
||||
log
|
||||
);
|
||||
localFile = tmpfile;
|
||||
const binary = await this.saveBinaryItem(item, tmpfile);
|
||||
logs.push(`[${isoNow()}][${dir}] 🟢 [${parentIndex}${index}] Synced file success, binaryId: ${binary.binaryId}`);
|
||||
logs.push(
|
||||
`[${isoNow()}][${dir}] 🟢 [${parentIndex}${index}] Synced file success, binaryId: ${binary.binaryId}`
|
||||
);
|
||||
await this.taskService.appendTaskLog(task, logs.join('\n'));
|
||||
logs = [];
|
||||
} catch (err: any) {
|
||||
} catch (err) {
|
||||
if (err.name === 'DownloadNotFoundError') {
|
||||
this.logger.info('Not found %s, skip it', item.sourceUrl);
|
||||
logs.push(`[${isoNow()}][${dir}] 🧪️ [${parentIndex}${index}] Download ${item.sourceUrl} not found, skip it`);
|
||||
logs.push(
|
||||
`[${isoNow()}][${dir}] 🧪️ [${parentIndex}${index}] Download ${item.sourceUrl} not found, skip it`
|
||||
);
|
||||
} else {
|
||||
if (err.name === 'DownloadStatusInvalidError') {
|
||||
this.logger.warn('Download binary %s %s', item.sourceUrl, err);
|
||||
@@ -217,13 +337,15 @@ export class BinarySyncerService extends AbstractService {
|
||||
this.logger.error('Download binary %s %s', item.sourceUrl, err);
|
||||
}
|
||||
hasDownloadError = true;
|
||||
logs.push(`[${isoNow()}][${dir}] ❌ [${parentIndex}${index}] Download ${item.sourceUrl} error: ${err}`);
|
||||
logs.push(
|
||||
`[${isoNow()}][${dir}] ❌ [${parentIndex}${index}] Download ${item.sourceUrl} error: ${err}`
|
||||
);
|
||||
}
|
||||
await this.taskService.appendTaskLog(task, logs.join('\n'));
|
||||
logs = [];
|
||||
} finally {
|
||||
if (localFile) {
|
||||
await rm(localFile, { force: true });
|
||||
await fs.rm(localFile, { force: true });
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -231,20 +353,29 @@ export class BinarySyncerService extends AbstractService {
|
||||
if (hasDownloadError) {
|
||||
logs.push(`[${isoNow()}][${dir}] ❌ Synced dir fail`);
|
||||
} else {
|
||||
logs.push(`[${isoNow()}][${dir}] 🟢 Synced dir success, hasItems: ${hasItems}`);
|
||||
logs.push(
|
||||
`[${isoNow()}][${dir}] 🟢 Synced dir success, hasItems: ${hasItems}`
|
||||
);
|
||||
}
|
||||
await this.taskService.appendTaskLog(task, logs.join('\n'));
|
||||
}
|
||||
return [ hasDownloadError, hasItems ];
|
||||
return [hasDownloadError, hasItems];
|
||||
}
|
||||
|
||||
|
||||
// see https://github.com/cnpm/cnpmcore/issues/556
|
||||
// 上游可能正在发布新版本、同步流程中断,导致同步的时候,文件列表不一致
|
||||
// 如果的当前目录命中 latestVersionParent 父目录,那么就再校验一下当前目录
|
||||
// 如果 existsItems 为空或者经过修改,那么就不需要 revalidate 了
|
||||
private async diff(binaryName: BinaryName, dir: string, fetchItems: BinaryItem[], latestVersionParent = '/') {
|
||||
const existsItems = await this.binaryRepository.listBinaries(binaryName, dir);
|
||||
private async diff(
|
||||
binaryName: BinaryName,
|
||||
dir: string,
|
||||
fetchItems: BinaryItem[],
|
||||
latestVersionParent = '/'
|
||||
) {
|
||||
const existsItems = await this.binaryRepository.listBinaries(
|
||||
binaryName,
|
||||
dir
|
||||
);
|
||||
const existsMap = new Map<string, Binary>();
|
||||
for (const item of existsItems) {
|
||||
existsMap.set(item.name, item);
|
||||
@@ -277,7 +408,7 @@ export class BinarySyncerService extends AbstractService {
|
||||
existsItem.date = item.date;
|
||||
} else if (dir.endsWith(latestVersionParent)) {
|
||||
if (!latestItem) {
|
||||
latestItem = sortBy(fetchItems, [ 'date' ]).pop();
|
||||
latestItem = sortBy(fetchItems, ['date']).pop();
|
||||
}
|
||||
const isLatestItem = latestItem?.name === item.name;
|
||||
if (isLatestItem && existsItem.isDir) {
|
||||
@@ -290,7 +421,6 @@ export class BinarySyncerService extends AbstractService {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return {
|
||||
newItems: diffItems,
|
||||
latestVersionDir: latestVersionParent,
|
||||
@@ -302,22 +432,35 @@ export class BinarySyncerService extends AbstractService {
|
||||
const stat = await fs.stat(tmpfile);
|
||||
binary.size = stat.size;
|
||||
await this.nfsAdapter.uploadFile(binary.storePath, tmpfile);
|
||||
this.logger.info('[BinarySyncerService.saveBinaryItem:uploadFile] binaryId: %s, size: %d, %s => %s',
|
||||
binary.binaryId, stat.size, tmpfile, binary.storePath);
|
||||
this.logger.info(
|
||||
'[BinarySyncerService.saveBinaryItem:uploadFile] binaryId: %s, size: %d, %s => %s',
|
||||
binary.binaryId,
|
||||
stat.size,
|
||||
tmpfile,
|
||||
binary.storePath
|
||||
);
|
||||
}
|
||||
await this.binaryRepository.saveBinary(binary);
|
||||
return binary;
|
||||
}
|
||||
|
||||
private async getBinaryAdapter(binaryName: BinaryName): Promise<AbstractBinary | undefined> {
|
||||
private async getBinaryAdapter(
|
||||
binaryName: BinaryName
|
||||
): Promise<AbstractBinary | undefined> {
|
||||
const config = this.config.cnpmcore;
|
||||
const binaryConfig = binaries[binaryName];
|
||||
|
||||
let binaryAdapter: AbstractBinary;
|
||||
if (config.sourceRegistryIsCNpm) {
|
||||
binaryAdapter = await this.eggObjectFactory.getEggObject(AbstractBinary, BinaryType.Api);
|
||||
binaryAdapter = await this.eggObjectFactory.getEggObject(
|
||||
AbstractBinary,
|
||||
BinaryType.Api
|
||||
);
|
||||
} else {
|
||||
binaryAdapter = await this.eggObjectFactory.getEggObject(AbstractBinary, binaryConfig.type);
|
||||
binaryAdapter = await this.eggObjectFactory.getEggObject(
|
||||
AbstractBinary,
|
||||
binaryConfig.type
|
||||
);
|
||||
}
|
||||
await binaryAdapter.initFetch(binaryName);
|
||||
return binaryAdapter;
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
import { AccessLevel, SingletonProto, Inject } from '@eggjs/tegg';
|
||||
import { EggLogger } from 'egg';
|
||||
import { AccessLevel, Inject, SingletonProto, Logger } from 'egg';
|
||||
import pMap from 'p-map';
|
||||
import { BugVersion } from '../entity/BugVersion';
|
||||
import { PackageJSONType, PackageRepository } from '../../repository/PackageRepository';
|
||||
import { DistRepository } from '../../repository/DistRepository';
|
||||
import { getScopeAndName } from '../../common/PackageUtil';
|
||||
import { CacheService } from './CacheService';
|
||||
import { BUG_VERSIONS, LATEST_TAG } from '../../common/constants';
|
||||
import { BugVersionStore } from '../../common/adapter/BugVersionStore';
|
||||
import { BugVersion } from '../entity/BugVersion.ts';
|
||||
import type {
|
||||
PackageJSONType,
|
||||
PackageRepository,
|
||||
} from '../../repository/PackageRepository.ts';
|
||||
import type { DistRepository } from '../../repository/DistRepository.ts';
|
||||
import { getScopeAndName } from '../../common/PackageUtil.ts';
|
||||
import type { CacheService } from './CacheService.ts';
|
||||
import { BUG_VERSIONS, LATEST_TAG } from '../../common/constants.ts';
|
||||
import type { BugVersionStore } from '../../common/adapter/BugVersionStore.ts';
|
||||
|
||||
@SingletonProto({
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
@@ -20,7 +22,7 @@ export class BugVersionService {
|
||||
private readonly distRepository: DistRepository;
|
||||
|
||||
@Inject()
|
||||
private readonly logger: EggLogger;
|
||||
private readonly logger: Logger;
|
||||
|
||||
@Inject()
|
||||
private readonly cacheService: CacheService;
|
||||
@@ -33,66 +35,109 @@ export class BugVersionService {
|
||||
const pkg = await this.packageRepository.findPackage('', BUG_VERSIONS);
|
||||
if (!pkg) return;
|
||||
/* c8 ignore next 10 */
|
||||
const tag = await this.packageRepository.findPackageTag(pkg!.packageId, LATEST_TAG);
|
||||
const tag = await this.packageRepository.findPackageTag(
|
||||
pkg.packageId,
|
||||
LATEST_TAG
|
||||
);
|
||||
if (!tag) return;
|
||||
let bugVersion = this.bugVersionStore.getBugVersion(tag!.version);
|
||||
let bugVersion = this.bugVersionStore.getBugVersion(tag.version);
|
||||
if (!bugVersion) {
|
||||
const packageVersionJson = (await this.distRepository.findPackageVersionManifest(pkg!.packageId, tag!.version)) as PackageJSONType;
|
||||
const packageVersionJson =
|
||||
(await this.distRepository.findPackageVersionManifest(
|
||||
pkg.packageId,
|
||||
tag.version
|
||||
)) as PackageJSONType;
|
||||
if (!packageVersionJson) return;
|
||||
const data = packageVersionJson.config?.['bug-versions'];
|
||||
bugVersion = new BugVersion(data || {});
|
||||
this.bugVersionStore.setBugVersion(bugVersion, tag!.version);
|
||||
this.bugVersionStore.setBugVersion(bugVersion, tag.version);
|
||||
}
|
||||
return bugVersion;
|
||||
}
|
||||
|
||||
async cleanBugVersionPackageCaches(bugVersion: BugVersion) {
|
||||
const fullnames = bugVersion.listAllPackagesHasBugs();
|
||||
await pMap(fullnames, async fullname => {
|
||||
await this.cacheService.removeCache(fullname);
|
||||
}, {
|
||||
concurrency: 50,
|
||||
stopOnError: false,
|
||||
});
|
||||
await pMap(
|
||||
fullnames,
|
||||
async fullname => {
|
||||
await this.cacheService.removeCache(fullname);
|
||||
},
|
||||
{
|
||||
concurrency: 50,
|
||||
stopOnError: false,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
async fixPackageBugVersions(bugVersion: BugVersion, fullname: string, manifests: Record<string, any>) {
|
||||
async fixPackageBugVersions(
|
||||
bugVersion: BugVersion,
|
||||
fullname: string,
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
manifests: Record<string, any>
|
||||
) {
|
||||
// If package all version unpublished(like pinyin-tool), versions is undefined
|
||||
if (!manifests) return;
|
||||
for (const manifest of Object.values(manifests)) {
|
||||
this.fixPackageBugVersionWithAllVersions(fullname, bugVersion, manifest, manifests);
|
||||
this.fixPackageBugVersionWithAllVersions(
|
||||
fullname,
|
||||
bugVersion,
|
||||
manifest,
|
||||
manifests
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async fixPackageBugVersion(bugVersion: BugVersion, fullname: string, manifest: any) {
|
||||
async fixPackageBugVersion(
|
||||
bugVersion: BugVersion,
|
||||
fullname: string,
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
manifest: any
|
||||
) {
|
||||
const advice = bugVersion.fixVersion(fullname, manifest.version);
|
||||
if (!advice) {
|
||||
return manifest;
|
||||
}
|
||||
const [ scope, name ] = getScopeAndName(fullname);
|
||||
const [scope, name] = getScopeAndName(fullname);
|
||||
const pkg = await this.packageRepository.findPackage(scope, name);
|
||||
if (!pkg) {
|
||||
return manifest;
|
||||
}
|
||||
const packageVersion = await this.packageRepository.findPackageVersion(pkg.packageId, advice.version);
|
||||
const packageVersion = await this.packageRepository.findPackageVersion(
|
||||
pkg.packageId,
|
||||
advice.version
|
||||
);
|
||||
if (!packageVersion) {
|
||||
return manifest;
|
||||
}
|
||||
const fixedManifest = await this.distRepository.findPackageVersionManifest(packageVersion.packageId, advice.version);
|
||||
const fixedManifest = await this.distRepository.findPackageVersionManifest(
|
||||
packageVersion.packageId,
|
||||
advice.version
|
||||
);
|
||||
if (!fixedManifest) {
|
||||
return manifest;
|
||||
}
|
||||
return bugVersion.fixManifest(manifest, fixedManifest);
|
||||
}
|
||||
|
||||
private fixPackageBugVersionWithAllVersions(fullname: string, bugVersion: BugVersion, manifest: any, manifests: Record<string, any>) {
|
||||
private fixPackageBugVersionWithAllVersions(
|
||||
fullname: string,
|
||||
bugVersion: BugVersion,
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
manifest: any,
|
||||
// oxlint-disable-next-line typescript-eslint/no-explicit-any
|
||||
manifests: Record<string, any>
|
||||
) {
|
||||
const advice = bugVersion.fixVersion(fullname, manifest.version);
|
||||
if (!advice) {
|
||||
return;
|
||||
}
|
||||
const fixedManifest = manifests[advice.version];
|
||||
if (!fixedManifest) {
|
||||
this.logger.warn('[BugVersionService] not found pkg for %s@%s manifest', fullname, advice.version);
|
||||
this.logger.warn(
|
||||
'[BugVersionService] not found pkg for %s@%s manifest',
|
||||
fullname,
|
||||
advice.version
|
||||
);
|
||||
return;
|
||||
}
|
||||
const newManifest = bugVersion.fixManifest(manifest, fixedManifest);
|
||||
|
||||
@@ -1,13 +1,10 @@
|
||||
import {
|
||||
AccessLevel,
|
||||
SingletonProto,
|
||||
Inject,
|
||||
} from '@eggjs/tegg';
|
||||
import { CacheAdapter } from '../../common/adapter/CacheAdapter';
|
||||
import { AbstractService } from '../../common/AbstractService';
|
||||
import { ChangesStreamTaskData } from '../entity/Task';
|
||||
import { AccessLevel, Inject, SingletonProto } from 'egg';
|
||||
|
||||
type PackageCacheAttribe = 'etag' | 'manifests';
|
||||
import type { CacheAdapter } from '../../common/adapter/CacheAdapter.ts';
|
||||
import { AbstractService } from '../../common/AbstractService.ts';
|
||||
import type { ChangesStreamTaskData } from '../entity/Task.ts';
|
||||
|
||||
type PackageCacheAttribute = 'etag' | 'manifests';
|
||||
|
||||
export type UpstreamRegistryInfo = {
|
||||
registry_name: string;
|
||||
@@ -15,7 +12,7 @@ export type UpstreamRegistryInfo = {
|
||||
changes_stream_url: string;
|
||||
} & ChangesStreamTaskData;
|
||||
|
||||
export type DownloadInfo = {
|
||||
export interface DownloadInfo {
|
||||
today: number;
|
||||
yesterday: number;
|
||||
samedayLastweek: number;
|
||||
@@ -25,9 +22,9 @@ export type DownloadInfo = {
|
||||
lastweek: number;
|
||||
lastmonth: number;
|
||||
lastyear: number;
|
||||
};
|
||||
}
|
||||
|
||||
export type TotalData = {
|
||||
export interface TotalData {
|
||||
packageCount: number;
|
||||
packageVersionCount: number;
|
||||
lastPackage: string;
|
||||
@@ -37,7 +34,7 @@ export type TotalData = {
|
||||
lastChangeId: number | bigint;
|
||||
cacheTime: string;
|
||||
upstreamRegistries: UpstreamRegistryInfo[];
|
||||
};
|
||||
}
|
||||
const TOTAL_DATA_KEY = '__TOTAL_DATA__';
|
||||
|
||||
@SingletonProto({
|
||||
@@ -57,52 +54,72 @@ export class CacheService extends AbstractService {
|
||||
return await this.cacheAdapter.getBytes(key);
|
||||
}
|
||||
|
||||
public async savePackageEtagAndManifests(fullname: string, isFullManifests: boolean, etag: string, manifests: Buffer) {
|
||||
public async savePackageEtagAndManifests(
|
||||
fullname: string,
|
||||
isFullManifests: boolean,
|
||||
etag: string,
|
||||
manifests: Buffer
|
||||
) {
|
||||
await Promise.all([
|
||||
await this.cacheAdapter.set(this.cacheKey(fullname, isFullManifests, 'etag'), etag),
|
||||
await this.cacheAdapter.setBytes(this.cacheKey(fullname, isFullManifests, 'manifests'), manifests),
|
||||
this.cacheAdapter.set(
|
||||
this.cacheKey(fullname, isFullManifests, 'etag'),
|
||||
etag
|
||||
),
|
||||
this.cacheAdapter.setBytes(
|
||||
this.cacheKey(fullname, isFullManifests, 'manifests'),
|
||||
manifests
|
||||
),
|
||||
]);
|
||||
}
|
||||
|
||||
public async getTotalData() {
|
||||
const value = await this.cacheAdapter.get(TOTAL_DATA_KEY);
|
||||
const totalData: TotalData = value ? JSON.parse(value) : {
|
||||
packageCount: 0,
|
||||
packageVersionCount: 0,
|
||||
lastPackage: '',
|
||||
lastPackageVersion: '',
|
||||
download: {
|
||||
today: 0,
|
||||
thisweek: 0,
|
||||
thismonth: 0,
|
||||
thisyear: 0,
|
||||
lastday: 0,
|
||||
lastweek: 0,
|
||||
lastmonth: 0,
|
||||
lastyear: 0,
|
||||
},
|
||||
changesStream: {},
|
||||
upstreamRegistries: [],
|
||||
lastChangeId: 0,
|
||||
cacheTime: '',
|
||||
};
|
||||
const totalData: TotalData = value
|
||||
? JSON.parse(value)
|
||||
: {
|
||||
packageCount: 0,
|
||||
packageVersionCount: 0,
|
||||
lastPackage: '',
|
||||
lastPackageVersion: '',
|
||||
download: {
|
||||
today: 0,
|
||||
thisweek: 0,
|
||||
thismonth: 0,
|
||||
thisyear: 0,
|
||||
lastday: 0,
|
||||
lastweek: 0,
|
||||
lastmonth: 0,
|
||||
lastyear: 0,
|
||||
},
|
||||
changesStream: {},
|
||||
upstreamRegistries: [],
|
||||
lastChangeId: 0,
|
||||
cacheTime: '',
|
||||
};
|
||||
return totalData;
|
||||
}
|
||||
|
||||
public async saveTotalData(totalData: TotalData) {
|
||||
return await this.cacheAdapter.set(TOTAL_DATA_KEY, JSON.stringify(totalData));
|
||||
return await this.cacheAdapter.set(
|
||||
TOTAL_DATA_KEY,
|
||||
JSON.stringify(totalData)
|
||||
);
|
||||
}
|
||||
|
||||
public async removeCache(fullname: string) {
|
||||
await Promise.all([
|
||||
await this.cacheAdapter.delete(this.cacheKey(fullname, true, 'etag')),
|
||||
await this.cacheAdapter.delete(this.cacheKey(fullname, true, 'manifests')),
|
||||
await this.cacheAdapter.delete(this.cacheKey(fullname, false, 'etag')),
|
||||
await this.cacheAdapter.delete(this.cacheKey(fullname, false, 'manifests')),
|
||||
this.cacheAdapter.delete(this.cacheKey(fullname, true, 'etag')),
|
||||
this.cacheAdapter.delete(this.cacheKey(fullname, true, 'manifests')),
|
||||
this.cacheAdapter.delete(this.cacheKey(fullname, false, 'etag')),
|
||||
this.cacheAdapter.delete(this.cacheKey(fullname, false, 'manifests')),
|
||||
]);
|
||||
}
|
||||
|
||||
private cacheKey(fullname: string, isFullManifests: boolean, attribute: PackageCacheAttribe) {
|
||||
private cacheKey(
|
||||
fullname: string,
|
||||
isFullManifests: boolean,
|
||||
attribute: PackageCacheAttribute
|
||||
) {
|
||||
return `${fullname}|${isFullManifests ? 'full' : 'abbr'}:${attribute}`;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,26 +1,31 @@
|
||||
import os from 'os';
|
||||
import { setTimeout } from 'timers/promises';
|
||||
import os from 'node:os';
|
||||
import { setTimeout } from 'node:timers/promises';
|
||||
|
||||
import {
|
||||
AccessLevel,
|
||||
SingletonProto,
|
||||
EggObjectFactory,
|
||||
Inject,
|
||||
} from '@eggjs/tegg';
|
||||
import { E500 } from 'egg-errors';
|
||||
import { PackageSyncerService, RegistryNotMatchError } from './PackageSyncerService';
|
||||
import { TaskService } from './TaskService';
|
||||
import { RegistryManagerService } from './RegistryManagerService';
|
||||
import { ScopeManagerService } from './ScopeManagerService';
|
||||
import { PackageRepository } from '../../repository/PackageRepository';
|
||||
import { TaskRepository } from '../../repository/TaskRepository';
|
||||
import { HOST_NAME, ChangesStreamTask, Task } from '../entity/Task';
|
||||
import { Registry } from '../entity/Registry';
|
||||
import { AbstractChangeStream } from '../../common/adapter/changesStream/AbstractChangesStream';
|
||||
import { getScopeAndName } from '../../common/PackageUtil';
|
||||
import { isTimeoutError } from '../../common/ErrorUtil';
|
||||
import { GLOBAL_WORKER } from '../../common/constants';
|
||||
import { TaskState, TaskType } from '../../common/enum/Task';
|
||||
import { AbstractService } from '../../common/AbstractService';
|
||||
SingletonProto,
|
||||
type EggObjectFactory,
|
||||
} from 'egg';
|
||||
import { E500 } from 'egg/errors';
|
||||
|
||||
import {
|
||||
RegistryNotMatchError,
|
||||
type PackageSyncerService,
|
||||
} from './PackageSyncerService.ts';
|
||||
import type { TaskService } from './TaskService.ts';
|
||||
import type { RegistryManagerService } from './RegistryManagerService.ts';
|
||||
import type { ScopeManagerService } from './ScopeManagerService.ts';
|
||||
import type { PackageRepository } from '../../repository/PackageRepository.ts';
|
||||
import type { TaskRepository } from '../../repository/TaskRepository.ts';
|
||||
import { HOST_NAME, Task, type ChangesStreamTask } from '../entity/Task.ts';
|
||||
import type { Registry } from '../entity/Registry.ts';
|
||||
import { AbstractChangeStream } from '../../common/adapter/changesStream/AbstractChangesStream.ts';
|
||||
import { getScopeAndName } from '../../common/PackageUtil.ts';
|
||||
import { isTimeoutError } from '../../common/ErrorUtil.ts';
|
||||
import { GLOBAL_WORKER } from '../../common/constants.ts';
|
||||
import { TaskState, TaskType } from '../../common/enum/Task.ts';
|
||||
import { AbstractService } from '../../common/AbstractService.ts';
|
||||
|
||||
@SingletonProto({
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
@@ -33,9 +38,9 @@ export class ChangesStreamService extends AbstractService {
|
||||
@Inject()
|
||||
private readonly taskService: TaskService;
|
||||
@Inject()
|
||||
private readonly registryManagerService : RegistryManagerService;
|
||||
private readonly registryManagerService: RegistryManagerService;
|
||||
@Inject()
|
||||
private readonly scopeManagerService : ScopeManagerService;
|
||||
private readonly scopeManagerService: ScopeManagerService;
|
||||
@Inject()
|
||||
private readonly eggObjectFactory: EggObjectFactory;
|
||||
@Inject()
|
||||
@@ -46,14 +51,22 @@ export class ChangesStreamService extends AbstractService {
|
||||
// `{registryName}_WORKER`: 自定义 scope 的同步源
|
||||
public async findExecuteTask(): Promise<ChangesStreamTask | null> {
|
||||
const targetName = GLOBAL_WORKER;
|
||||
const globalRegistryTask = await this.taskRepository.findTaskByTargetName(targetName, TaskType.ChangesStream);
|
||||
const globalRegistryTask = await this.taskRepository.findTaskByTargetName(
|
||||
targetName,
|
||||
TaskType.ChangesStream
|
||||
);
|
||||
// 如果没有配置默认同步源,先进行初始化
|
||||
if (!globalRegistryTask) {
|
||||
await this.taskService.createTask(Task.createChangesStream(targetName), false);
|
||||
await this.taskService.createTask(
|
||||
Task.createChangesStream(targetName),
|
||||
false
|
||||
);
|
||||
}
|
||||
// 自定义 scope 由 admin 手动创建
|
||||
// 根据 TaskType.ChangesStream 从队列中获取
|
||||
return await this.taskService.findExecuteTask(TaskType.ChangesStream) as ChangesStreamTask;
|
||||
return (await this.taskService.findExecuteTask(
|
||||
TaskType.ChangesStream
|
||||
)) as ChangesStreamTask;
|
||||
}
|
||||
|
||||
public async suspendSync(exit = false) {
|
||||
@@ -65,10 +78,16 @@ export class ChangesStreamService extends AbstractService {
|
||||
}
|
||||
const authorIp = os.hostname();
|
||||
// 暂停当前机器所有的 changesStream 任务
|
||||
const tasks = await this.taskRepository.findTaskByAuthorIpAndType(authorIp, TaskType.ChangesStream);
|
||||
const tasks = await this.taskRepository.findTaskByAuthorIpAndType(
|
||||
authorIp,
|
||||
TaskType.ChangesStream
|
||||
);
|
||||
for (const task of tasks) {
|
||||
if (task.state === TaskState.Processing) {
|
||||
this.logger.info('[ChangesStreamService.suspendSync:suspend] taskId: %s', task.taskId);
|
||||
this.logger.info(
|
||||
'[ChangesStreamService.suspendSync:suspend] taskId: %s',
|
||||
task.taskId
|
||||
);
|
||||
// 1. 更新任务状态为 waiting
|
||||
// 2. 重新推入任务队列供其他机器执行
|
||||
await this.taskService.retryTask(task);
|
||||
@@ -93,8 +112,14 @@ export class ChangesStreamService extends AbstractService {
|
||||
// allow disable changesStream dynamic
|
||||
while (since && this.config.cnpmcore.enableChangesStream) {
|
||||
const { lastSince, taskCount } = await this.executeSync(since, task);
|
||||
this.logger.info('[ChangesStreamService.executeTask:changes] since: %s => %s, %d new tasks, taskId: %s, updatedAt: %j',
|
||||
since, lastSince, taskCount, task.taskId, task.updatedAt);
|
||||
this.logger.info(
|
||||
'[ChangesStreamService.executeTask:changes] since: %s => %s, %d new tasks, taskId: %s, updatedAt: %j',
|
||||
since,
|
||||
lastSince,
|
||||
taskCount,
|
||||
task.taskId,
|
||||
task.updatedAt
|
||||
);
|
||||
since = lastSince;
|
||||
if (taskCount === 0 && this.config.env === 'unittest') {
|
||||
break;
|
||||
@@ -102,7 +127,10 @@ export class ChangesStreamService extends AbstractService {
|
||||
await setTimeout(this.config.cnpmcore.checkChangesStreamInterval);
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.warn('[ChangesStreamService.executeTask:error] %s, exit now', err.message);
|
||||
this.logger.warn(
|
||||
'[ChangesStreamService.executeTask:error] %s, exit now',
|
||||
err.message
|
||||
);
|
||||
if (isTimeoutError(err)) {
|
||||
this.logger.warn(err);
|
||||
} else {
|
||||
@@ -119,9 +147,13 @@ export class ChangesStreamService extends AbstractService {
|
||||
const { registryId } = task.data || {};
|
||||
// 如果已有 registryId, 查询 DB 直接获取
|
||||
if (registryId) {
|
||||
const registry = await this.registryManagerService.findByRegistryId(registryId);
|
||||
const registry =
|
||||
await this.registryManagerService.findByRegistryId(registryId);
|
||||
if (!registry) {
|
||||
this.logger.error('[ChangesStreamService.getRegistry:error] registryId %s not found', registryId);
|
||||
this.logger.error(
|
||||
'[ChangesStreamService.getRegistry:error] registryId %s not found',
|
||||
registryId
|
||||
);
|
||||
throw new E500(`invalid change stream registry: ${registryId}`);
|
||||
}
|
||||
return registry;
|
||||
@@ -129,7 +161,7 @@ export class ChangesStreamService extends AbstractService {
|
||||
|
||||
const registry = await this.registryManagerService.ensureDefaultRegistry();
|
||||
task.data = {
|
||||
...(task.data || {}),
|
||||
...task.data,
|
||||
registryId: registry.registryId,
|
||||
};
|
||||
await this.taskRepository.saveTask(task);
|
||||
@@ -141,9 +173,15 @@ export class ChangesStreamService extends AbstractService {
|
||||
// 1. 如果该包已经指定了 registryId 则以 registryId 为准
|
||||
// 1. 该包的 scope 在当前 registry 下
|
||||
// 2. 如果 registry 下没有配置 scope (认为是通用 registry 地址) ,且该包的 scope 不在其他 registry 下
|
||||
public async needSync(registry: Registry, fullname: string): Promise<boolean> {
|
||||
const [ scopeName, name ] = getScopeAndName(fullname);
|
||||
const packageEntity = await this.packageRepository.findPackage(scopeName, name);
|
||||
public async needSync(
|
||||
registry: Registry,
|
||||
fullname: string
|
||||
): Promise<boolean> {
|
||||
const [scopeName, name] = getScopeAndName(fullname);
|
||||
const packageEntity = await this.packageRepository.findPackage(
|
||||
scopeName,
|
||||
name
|
||||
);
|
||||
|
||||
// 如果包不存在,且处在 exist 模式下,则不同步
|
||||
if (this.config.cnpmcore.syncMode === 'exist' && !packageEntity) {
|
||||
@@ -155,18 +193,24 @@ export class ChangesStreamService extends AbstractService {
|
||||
}
|
||||
|
||||
const scope = await this.scopeManagerService.findByName(scopeName);
|
||||
const inCurrentRegistry = scope && scope?.registryId === registry.registryId;
|
||||
const inCurrentRegistry =
|
||||
scope && scope?.registryId === registry.registryId;
|
||||
if (inCurrentRegistry) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const registryScopeCount = await this.scopeManagerService.countByRegistryId(registry.registryId);
|
||||
const registryScopeCount = await this.scopeManagerService.countByRegistryId(
|
||||
registry.registryId
|
||||
);
|
||||
// 当前包没有 scope 信息,且当前 registry 下没有 scope,是通用 registry,需要同步
|
||||
return !scope && !registryScopeCount;
|
||||
}
|
||||
public async getInitialSince(task: ChangesStreamTask): Promise<string> {
|
||||
const registry = await this.prepareRegistry(task);
|
||||
const changesStreamAdapter = await this.eggObjectFactory.getEggObject(AbstractChangeStream, registry.type) as AbstractChangeStream;
|
||||
const changesStreamAdapter = (await this.eggObjectFactory.getEggObject(
|
||||
AbstractChangeStream,
|
||||
registry.type
|
||||
)) as AbstractChangeStream;
|
||||
const since = await changesStreamAdapter.getInitialSince(registry);
|
||||
return since;
|
||||
}
|
||||
@@ -175,7 +219,10 @@ export class ChangesStreamService extends AbstractService {
|
||||
// 更新任务的 since 和 taskCount 相关字段
|
||||
public async executeSync(since: string, task: ChangesStreamTask) {
|
||||
const registry = await this.prepareRegistry(task);
|
||||
const changesStreamAdapter = await this.eggObjectFactory.getEggObject(AbstractChangeStream, registry.type) as AbstractChangeStream;
|
||||
const changesStreamAdapter = (await this.eggObjectFactory.getEggObject(
|
||||
AbstractChangeStream,
|
||||
registry.type
|
||||
)) as AbstractChangeStream;
|
||||
let taskCount = 0;
|
||||
let lastSince = since;
|
||||
|
||||
@@ -201,17 +248,29 @@ export class ChangesStreamService extends AbstractService {
|
||||
skipDependencies: true,
|
||||
tips,
|
||||
});
|
||||
this.logger.info('[ChangesStreamService.createTask:success] fullname: %s, task: %s, tips: %s',
|
||||
fullname, task.id, tips);
|
||||
this.logger.info(
|
||||
'[ChangesStreamService.createTask:success] fullname: %s, task: %s, tips: %s',
|
||||
fullname,
|
||||
task.id,
|
||||
tips
|
||||
);
|
||||
} catch (err) {
|
||||
if (err instanceof RegistryNotMatchError) {
|
||||
this.logger.warn('[ChangesStreamService.executeSync:skip] fullname: %s, error: %s, tips: %s',
|
||||
fullname, err, tips);
|
||||
this.logger.warn(
|
||||
'[ChangesStreamService.executeSync:skip] fullname: %s, error: %s, tips: %s',
|
||||
fullname,
|
||||
err,
|
||||
tips
|
||||
);
|
||||
continue;
|
||||
}
|
||||
// only log error, make sure changes still reading
|
||||
this.logger.error('[ChangesStreamService.executeSync:error] fullname: %s, error: %s, tips: %s',
|
||||
fullname, err, tips);
|
||||
this.logger.error(
|
||||
'[ChangesStreamService.executeSync:error] fullname: %s, error: %s, tips: %s',
|
||||
fullname,
|
||||
err,
|
||||
tips
|
||||
);
|
||||
this.logger.error(err);
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
import { AccessLevel, SingletonProto, Inject } from '@eggjs/tegg';
|
||||
import { AbstractService } from '../../common/AbstractService';
|
||||
import { HookType } from '../../common/enum/Hook';
|
||||
import { TaskState } from '../../common/enum/Task';
|
||||
import { HookEvent } from '../entity/HookEvent';
|
||||
import { CreateHookTask, Task } from '../entity/Task';
|
||||
import { HookRepository } from '../../repository/HookRepository';
|
||||
import { PackageRepository } from '../../repository/PackageRepository';
|
||||
import { AccessLevel, Inject, SingletonProto } from 'egg';
|
||||
import pMap from 'p-map';
|
||||
import { Hook } from '../entity/Hook';
|
||||
import { TaskService } from './TaskService';
|
||||
import { isoNow } from '../../common/LogUtil';
|
||||
import { getScopeAndName } from '../../common/PackageUtil';
|
||||
|
||||
import { AbstractService } from '../../common/AbstractService.ts';
|
||||
import { HookType } from '../../common/enum/Hook.ts';
|
||||
import { TaskState } from '../../common/enum/Task.ts';
|
||||
import { Task, type CreateHookTask } from '../entity/Task.ts';
|
||||
import type { HookEvent } from '../entity/HookEvent.ts';
|
||||
import type { HookRepository } from '../../repository/HookRepository.ts';
|
||||
import type { PackageRepository } from '../../repository/PackageRepository.ts';
|
||||
import type { Hook } from '../entity/Hook.ts';
|
||||
import type { TaskService } from './TaskService.ts';
|
||||
import { isoNow } from '../../common/LogUtil.ts';
|
||||
import { getScopeAndName } from '../../common/PackageUtil.ts';
|
||||
|
||||
@SingletonProto({
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
@@ -27,10 +28,14 @@ export class CreateHookTriggerService extends AbstractService {
|
||||
|
||||
async executeTask(task: CreateHookTask): Promise<void> {
|
||||
const { hookEvent } = task.data;
|
||||
const [ scope, name ] = getScopeAndName(hookEvent.fullname);
|
||||
const [scope, name] = getScopeAndName(hookEvent.fullname);
|
||||
const pkg = await this.packageRepository.findPackage(scope, name);
|
||||
if (!pkg) {
|
||||
await this.taskService.finishTask(task, TaskState.Success, `[${isoNow()}][Hooks] package ${hookEvent.fullname} not exits`);
|
||||
await this.taskService.finishTask(
|
||||
task,
|
||||
TaskState.Success,
|
||||
`[${isoNow()}][Hooks] package ${hookEvent.fullname} not exits`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -38,41 +43,97 @@ export class CreateHookTriggerService extends AbstractService {
|
||||
`[${isoNow()}][Hooks] Start Create Trigger for ${pkg.fullname} ${task.data.hookEvent.changeId}`,
|
||||
`[${isoNow()}][Hooks] change content ${JSON.stringify(task.data.hookEvent.change)}`,
|
||||
];
|
||||
await this.taskService.finishTask(task, TaskState.Processing, startLog.join('\n'));
|
||||
await this.taskService.finishTask(
|
||||
task,
|
||||
TaskState.Processing,
|
||||
startLog.join('\n')
|
||||
);
|
||||
|
||||
try {
|
||||
await this.taskService.appendTaskLog(task, `[${isoNow()}][Hooks] PushHooks to ${HookType.Package} ${pkg.fullname}\n`);
|
||||
await this.createTriggerByMethod(task, HookType.Package, pkg.fullname, hookEvent);
|
||||
await this.taskService.appendTaskLog(task, `[${isoNow()}][Hooks] PushHooks to ${HookType.Scope} ${pkg.scope}\n`);
|
||||
await this.createTriggerByMethod(task, HookType.Scope, pkg.scope, hookEvent);
|
||||
await this.taskService.appendTaskLog(
|
||||
task,
|
||||
`[${isoNow()}][Hooks] PushHooks to ${HookType.Package} ${pkg.fullname}\n`
|
||||
);
|
||||
await this.createTriggerByMethod(
|
||||
task,
|
||||
HookType.Package,
|
||||
pkg.fullname,
|
||||
hookEvent
|
||||
);
|
||||
await this.taskService.appendTaskLog(
|
||||
task,
|
||||
`[${isoNow()}][Hooks] PushHooks to ${HookType.Scope} ${pkg.scope}\n`
|
||||
);
|
||||
await this.createTriggerByMethod(
|
||||
task,
|
||||
HookType.Scope,
|
||||
pkg.scope,
|
||||
hookEvent
|
||||
);
|
||||
|
||||
const maintainers = await this.packageRepository.listPackageMaintainers(pkg.packageId);
|
||||
const maintainers = await this.packageRepository.listPackageMaintainers(
|
||||
pkg.packageId
|
||||
);
|
||||
for (const maintainer of maintainers) {
|
||||
await this.taskService.appendTaskLog(task, `[${isoNow()}][Hooks] PushHooks to ${HookType.Owner} ${maintainer.name}\n`);
|
||||
await this.createTriggerByMethod(task, HookType.Owner, maintainer.name, hookEvent);
|
||||
await this.taskService.appendTaskLog(
|
||||
task,
|
||||
`[${isoNow()}][Hooks] PushHooks to ${HookType.Owner} ${maintainer.name}\n`
|
||||
);
|
||||
await this.createTriggerByMethod(
|
||||
task,
|
||||
HookType.Owner,
|
||||
maintainer.name,
|
||||
hookEvent
|
||||
);
|
||||
}
|
||||
await this.taskService.finishTask(task, TaskState.Success, `[${isoNow()}][Hooks] create trigger succeed \n`);
|
||||
await this.taskService.finishTask(
|
||||
task,
|
||||
TaskState.Success,
|
||||
`[${isoNow()}][Hooks] create trigger succeed \n`
|
||||
);
|
||||
} catch (e) {
|
||||
e.message = 'create trigger failed: ' + e.message;
|
||||
await this.taskService.finishTask(task, TaskState.Fail, `[${isoNow()}][Hooks] ${e.stack} \n`);
|
||||
e.message = `create trigger failed: ${e.message}`;
|
||||
await this.taskService.finishTask(
|
||||
task,
|
||||
TaskState.Fail,
|
||||
`[${isoNow()}][Hooks] ${e.stack} \n`
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
private async createTriggerByMethod(task: Task, type: HookType, name: string, hookEvent: HookEvent) {
|
||||
private async createTriggerByMethod(
|
||||
task: Task,
|
||||
type: HookType,
|
||||
name: string,
|
||||
hookEvent: HookEvent
|
||||
) {
|
||||
let hooks = await this.hookRepository.listHooksByTypeAndName(type, name);
|
||||
while (hooks.length) {
|
||||
while (hooks.length > 0) {
|
||||
await this.createTriggerTasks(hooks, hookEvent);
|
||||
hooks = await this.hookRepository.listHooksByTypeAndName(type, name, hooks[hooks.length - 1].id);
|
||||
await this.taskService.appendTaskLog(task,
|
||||
`[${isoNow()}][Hooks] PushHooks to ${type} ${name} ${hooks.length} \n`);
|
||||
hooks = await this.hookRepository.listHooksByTypeAndName(
|
||||
type,
|
||||
name,
|
||||
hooks[hooks.length - 1].id
|
||||
);
|
||||
await this.taskService.appendTaskLog(
|
||||
task,
|
||||
`[${isoNow()}][Hooks] PushHooks to ${type} ${name} ${hooks.length} \n`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private async createTriggerTasks(hooks: Array<Hook>, hookEvent: HookEvent) {
|
||||
await pMap(hooks, async hook => {
|
||||
const triggerHookTask = Task.createTriggerHookTask(hookEvent, hook.hookId);
|
||||
await this.taskService.createTask(triggerHookTask, true);
|
||||
}, { concurrency: 5 });
|
||||
private async createTriggerTasks(hooks: Hook[], hookEvent: HookEvent) {
|
||||
await pMap(
|
||||
hooks,
|
||||
async hook => {
|
||||
const triggerHookTask = Task.createTriggerHookTask(
|
||||
hookEvent,
|
||||
hook.hookId
|
||||
);
|
||||
await this.taskService.createTask(triggerHookTask, true);
|
||||
},
|
||||
{ concurrency: 5 }
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
import { ContextEventBus, Inject } from '@eggjs/tegg';
|
||||
import { Advice, IAdvice } from '@eggjs/tegg/aop';
|
||||
|
||||
@Advice()
|
||||
import { Inject, ObjectInitType, type ContextEventBus } from 'egg';
|
||||
import { Advice, type IAdvice } from 'egg/aop';
|
||||
|
||||
@Advice({
|
||||
initType: ObjectInitType.CONTEXT,
|
||||
})
|
||||
export class EventCorkAdvice implements IAdvice {
|
||||
@Inject()
|
||||
private eventBus: ContextEventBus;
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { SingletonProto, AccessLevel, Inject } from '@eggjs/tegg';
|
||||
import { EggLogger } from 'egg';
|
||||
import { AccessLevel, Inject, SingletonProto, Logger } from 'egg';
|
||||
import pMap from 'p-map';
|
||||
import { PackageVersionRepository } from '../../repository/PackageVersionRepository';
|
||||
import { PaddingSemVer } from '../entity/PaddingSemVer';
|
||||
|
||||
import type { PackageVersionRepository } from '../../repository/PackageVersionRepository.ts';
|
||||
import { PaddingSemVer } from '../entity/PaddingSemVer.ts';
|
||||
|
||||
@SingletonProto({
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
@@ -12,22 +12,35 @@ export class FixNoPaddingVersionService {
|
||||
private readonly packageVersionRepository: PackageVersionRepository;
|
||||
|
||||
@Inject()
|
||||
private readonly logger: EggLogger;
|
||||
private readonly logger: Logger;
|
||||
|
||||
async fixPaddingVersion(id?: number): Promise<void> {
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
const packageVersions = await this.packageVersionRepository.findHaveNotPaddingVersion(id);
|
||||
const packageVersions =
|
||||
await this.packageVersionRepository.findHaveNotPaddingVersion(id);
|
||||
if (packageVersions.length === 0) {
|
||||
break;
|
||||
}
|
||||
id = packageVersions[packageVersions.length - 1].id as unknown as number + 1;
|
||||
this.logger.info('[FixNoPaddingVersionService] fix padding version ids %j', packageVersions.map(t => t.id));
|
||||
const lastVersion = packageVersions[packageVersions.length - 1];
|
||||
id =
|
||||
(lastVersion.id as unknown as number) +
|
||||
1;
|
||||
this.logger.info(
|
||||
'[FixNoPaddingVersionService] fix padding version ids %j',
|
||||
packageVersions.map(t => t.id)
|
||||
);
|
||||
|
||||
await pMap(packageVersions, async packageVersion => {
|
||||
const paddingSemver = new PaddingSemVer(packageVersion.version);
|
||||
await this.packageVersionRepository.fixPaddingVersion(packageVersion.packageVersionId, paddingSemver);
|
||||
}, { concurrency: 30 });
|
||||
await pMap(
|
||||
packageVersions,
|
||||
async packageVersion => {
|
||||
const paddingSemver = new PaddingSemVer(packageVersion.version);
|
||||
await this.packageVersionRepository.fixPaddingVersion(
|
||||
packageVersion.packageVersionId,
|
||||
paddingSemver
|
||||
);
|
||||
},
|
||||
{ concurrency: 30 }
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user