refactor: use oxlint instead of eslint (#772)

say goodbye to eslint
This commit is contained in:
fengmk2
2025-03-13 23:31:13 +08:00
committed by GitHub
parent ed4d5d07ad
commit ffe723e65f
199 changed files with 11401 additions and 5140 deletions

View File

@@ -1,7 +0,0 @@
app/proxy*
**/*.d.ts
node_modules/
dist/
coverage/
mocks/
.react_entries/

View File

@@ -1,6 +0,0 @@
{
"extends": [
"eslint-config-egg/typescript",
"eslint-config-egg/lib/rules/enforce-node-prefix"
]
}

1
.husky/pre-commit Normal file
View File

@@ -0,0 +1 @@
npx lint-staged

View File

@@ -1,6 +1,52 @@
{
"plugins": ["import"],
"$schema": "./node_modules/oxlint/configuration_schema.json",
"env": {
"node": true,
"mocha": true
},
"categories": {
"correctness": "error",
"perf": "error"
},
"plugins": [
"import",
"typescript",
"unicorn",
"jsdoc",
"node",
"promise",
"oxc"
],
"rules": {
"import/no-cycle": "error"
// eslint
"constructor-super": "error",
"getter-return": "error",
"no-undef": "error",
"no-unreachable": "error",
"no-var": "error",
"no-eq-null": "error",
"no-await-in-loop": "allow",
"eqeqeq": ["error", "smart"],
// import
"import/no-cycle": "error",
"import/no-anonymous-default-export": "error",
"import/no-namespace": "error",
"import/named": "error",
"import/export": "error",
// promise
"promise/no-return-wrap": "error",
"promise/param-names": "error",
"promise/prefer-await-to-callbacks": "error",
"promise/prefer-await-to-then": "error",
"promise/prefer-catch": "error",
"promise/no-return-in-finally": "error",
// unicorn
"unicorn/error-message": "error",
// "unicorn/no-null": "error",
"unicorn/throw-new-error": "error",
// oxc
"oxc/no-map-spread": "error",
// typescript
"typescript/consistent-type-imports": "error"
}
}

6
.prettierrc Normal file
View File

@@ -0,0 +1,6 @@
{
"singleQuote": true,
"trailingComma": "es5",
"tabWidth": 2,
"arrowParens": "avoid"
}

View File

@@ -45,9 +45,7 @@
"extends": "@eggjs/tsconfig",
"compilerOptions": {
"baseUrl": "./",
"moduleResolution": "NodeNext",
"target": "ES2020",
"module": "Node16"
"target": "ES2021"
}
}
```

10
app.ts
View File

@@ -1,6 +1,6 @@
import path from 'node:path';
import { readFile } from 'node:fs/promises';
import { Application, ILifecycleBoot } from 'egg';
import type { Application, ILifecycleBoot } from 'egg';
import { ChangesStreamService } from './app/core/service/ChangesStreamService.js';
declare module 'egg' {
@@ -34,13 +34,17 @@ export default class CnpmcoreAppHook implements ILifecycleBoot {
// ready binary.html and replace registry
const filepath = path.join(this.app.baseDir, 'app/port/binary.html');
const text = await readFile(filepath, 'utf-8');
this.app.binaryHTML = text.replace('{{registry}}', this.app.config.cnpmcore.registry);
this.app.binaryHTML = text.replace(
'{{registry}}',
this.app.config.cnpmcore.registry
);
}
// 应用退出时执行
// 需要暂停当前执行的 changesStream task
async beforeClose() {
const changesStreamService = await this.app.getEggObject(ChangesStreamService);
const changesStreamService =
await this.app.getEggObject(ChangesStreamService);
await changesStreamService.suspendSync(true);
}
}

View File

@@ -1,10 +1,5 @@
import {
Inject,
} from '@eggjs/tegg';
import {
EggAppConfig,
EggLogger,
} from 'egg';
import { Inject } from '@eggjs/tegg';
import type { EggAppConfig, EggLogger } from 'egg';
export abstract class AbstractService {
@Inject()

View File

@@ -1,25 +1,25 @@
const TimeoutErrorNames = [
const TimeoutErrorNames = new Set([
'HttpClientRequestTimeoutError',
'HttpClientConnectTimeoutError',
'ConnectionError',
'ConnectTimeoutError',
'BodyTimeoutError',
'ResponseTimeoutError',
];
]);
export function isTimeoutError(err: Error) {
if (TimeoutErrorNames.includes(err.name)) {
if (TimeoutErrorNames.has(err.name)) {
return true;
}
if (err instanceof AggregateError && err.errors) {
for (const subError of err.errors) {
if (TimeoutErrorNames.includes(subError.name)) {
if (TimeoutErrorNames.has(subError.name)) {
return true;
}
}
}
if ('cause' in err && err.cause instanceof Error) {
if (TimeoutErrorNames.includes(err.cause.name)) {
if (TimeoutErrorNames.has(err.cause.name)) {
return true;
}
}

View File

@@ -4,14 +4,14 @@ import { setTimeout } from 'node:timers/promises';
import path from 'node:path';
import url from 'node:url';
import { randomBytes } from 'node:crypto';
import { EggContextHttpClient, HttpClientResponse } from 'egg';
import type { EggContextHttpClient, HttpClientResponse } from 'egg';
import mime from 'mime-types';
import dayjs from './dayjs.js';
interface DownloadToTempfileOptionalConfig {
retries?: number,
ignoreDownloadStatuses?: number[],
remoteAuthToken?: string
retries?: number;
ignoreDownloadStatuses?: number[];
remoteAuthToken?: string;
}
export async function createTempDir(dataDir: string, dirname?: string) {
@@ -28,17 +28,29 @@ export async function createTempfile(dataDir: string, filename: string) {
const tmpdir = await createTempDir(dataDir);
// The filename is a URL (from dist.tarball), which needs to be truncated, (`getconf NAME_MAX /` # max filename length: 255 bytes)
// https://github.com/cnpm/cnpmjs.org/pull/1345
const tmpfile = path.join(tmpdir, `${randomBytes(10).toString('hex')}-${path.basename(url.parse(filename).pathname!)}`);
const tmpfile = path.join(
tmpdir,
`${randomBytes(10).toString('hex')}-${path.basename(url.parse(filename).pathname!)}`
);
return tmpfile;
}
export async function downloadToTempfile(httpclient: EggContextHttpClient,
dataDir: string, url: string, optionalConfig?: DownloadToTempfileOptionalConfig) {
export async function downloadToTempfile(
httpclient: EggContextHttpClient,
dataDir: string,
url: string,
optionalConfig?: DownloadToTempfileOptionalConfig
) {
let retries = optionalConfig?.retries || 3;
let lastError: any;
while (retries > 0) {
try {
return await _downloadToTempfile(httpclient, dataDir, url, optionalConfig);
return await _downloadToTempfile(
httpclient,
dataDir,
url,
optionalConfig
);
} catch (err: any) {
if (err.name === 'DownloadNotFoundError') throw err;
lastError = err;
@@ -46,7 +58,8 @@ export async function downloadToTempfile(httpclient: EggContextHttpClient,
retries--;
if (retries > 0) {
// sleep 1s ~ 4s in random
const delay = process.env.NODE_ENV === 'test' ? 1 : 1000 + Math.random() * 4000;
const delay =
process.env.NODE_ENV === 'test' ? 1 : 1000 + Math.random() * 4000;
await setTimeout(delay);
}
}
@@ -57,8 +70,12 @@ export interface Tempfile {
headers: HttpClientResponse['res']['headers'];
timing: HttpClientResponse['res']['timing'];
}
async function _downloadToTempfile(httpclient: EggContextHttpClient,
dataDir: string, url: string, optionalConfig?: DownloadToTempfileOptionalConfig): Promise<Tempfile> {
async function _downloadToTempfile(
httpclient: EggContextHttpClient,
dataDir: string,
url: string,
optionalConfig?: DownloadToTempfileOptionalConfig
): Promise<Tempfile> {
const tmpfile = await createTempfile(dataDir, url);
const writeStream = createWriteStream(tmpfile);
try {
@@ -68,14 +85,18 @@ async function _downloadToTempfile(httpclient: EggContextHttpClient,
if (optionalConfig?.remoteAuthToken) {
requestHeaders.authorization = `Bearer ${optionalConfig.remoteAuthToken}`;
}
const { status, headers, res } = await httpclient.request(url, {
const { status, headers, res } = (await httpclient.request(url, {
timeout: 60000 * 10,
headers: requestHeaders,
writeStream,
timing: true,
followRedirect: true,
}) as HttpClientResponse;
if (status === 404 || (optionalConfig?.ignoreDownloadStatuses && optionalConfig.ignoreDownloadStatuses.includes(status))) {
})) as HttpClientResponse;
if (
status === 404 ||
(optionalConfig?.ignoreDownloadStatuses &&
optionalConfig.ignoreDownloadStatuses.includes(status))
) {
const err = new Error(`Not found, status(${status})`);
err.name = 'DownloadNotFoundError';
throw err;
@@ -114,7 +135,11 @@ export function mimeLookup(filepath: string) {
const filename = path.basename(filepath).toLowerCase();
if (filename.endsWith('.ts')) return PLAIN_TEXT;
if (filename.endsWith('.lock')) return PLAIN_TEXT;
return mime.lookup(filename) ||
WHITE_FILENAME_CONTENT_TYPES[filename as keyof typeof WHITE_FILENAME_CONTENT_TYPES] ||
DEFAULT_CONTENT_TYPE;
return (
mime.lookup(filename) ||
WHITE_FILENAME_CONTENT_TYPES[
filename as keyof typeof WHITE_FILENAME_CONTENT_TYPES
] ||
DEFAULT_CONTENT_TYPE
);
}

View File

@@ -1,11 +1,14 @@
import { createReadStream } from 'node:fs';
import { Readable } from 'node:stream';
import { pipeline } from 'node:stream/promises';
import * as ssri from 'ssri';
import type { HashLike } from 'ssri';
import { fromData, fromStream } from 'ssri';
// @ts-expect-error type error
import tar from '@fengmk2/tar';
import type { AuthorType, PackageJSONType } from '../repository/PackageRepository.js';
import type {
AuthorType,
PackageJSONType,
} from '../repository/PackageRepository.js';
// /@cnpm%2ffoo
// /@cnpm%2Ffoo
@@ -13,13 +16,14 @@ import type { AuthorType, PackageJSONType } from '../repository/PackageRepositor
// /foo
// name max length is 214 chars
// https://www.npmjs.com/package/path-to-regexp#custom-matching-parameters
export const FULLNAME_REG_STRING = '@[^/]{1,220}/[^/]{1,220}|@[^%]+%2[fF][^/]{1,220}|[^@/]{1,220}';
export const FULLNAME_REG_STRING =
'@[^/]{1,220}/[^/]{1,220}|@[^%]+%2[fF][^/]{1,220}|[^@/]{1,220}';
export function getScopeAndName(fullname: string): string[] {
if (fullname.startsWith('@')) {
return fullname.split('/', 2);
}
return [ '', fullname ];
return ['', fullname];
}
export function getFullname(scope: string, name: string): string {
@@ -35,14 +39,14 @@ export function getPrefixedName(prefix: string, username: string): string {
}
export async function calculateIntegrity(contentOrFile: Uint8Array | string) {
let integrityObj;
let integrityObj: HashLike;
if (typeof contentOrFile === 'string') {
integrityObj = await ssri.fromStream(createReadStream(contentOrFile), {
algorithms: [ 'sha512', 'sha1' ],
integrityObj = await fromStream(createReadStream(contentOrFile), {
algorithms: ['sha512', 'sha1'],
});
} else {
integrityObj = ssri.fromData(contentOrFile, {
algorithms: [ 'sha512', 'sha1' ],
integrityObj = fromData(contentOrFile, {
algorithms: ['sha512', 'sha1'],
});
}
const integrity = integrityObj.sha512[0].toString() as string;
@@ -50,7 +54,12 @@ export async function calculateIntegrity(contentOrFile: Uint8Array | string) {
return { integrity, shasum };
}
export function formatTarball(registry: string, scope: string, name: string, version: string) {
export function formatTarball(
registry: string,
scope: string,
name: string,
version: string
) {
const fullname = getFullname(scope, name);
return `${registry}/${fullname}/-/${name}-${version}.tgz`;
}
@@ -69,7 +78,9 @@ export function detectInstallScript(manifest: any) {
}
/** 判断一个版本压缩包中是否包含 npm-shrinkwrap.json */
export async function hasShrinkWrapInTgz(contentOrFile: Uint8Array | string): Promise<boolean> {
export async function hasShrinkWrapInTgz(
contentOrFile: Uint8Array | string
): Promise<boolean> {
let readable: Readable;
if (typeof contentOrFile === 'string') {
readable = createReadStream(contentOrFile);
@@ -102,12 +113,17 @@ export async function hasShrinkWrapInTgz(contentOrFile: Uint8Array | string): Pr
if (e.code === 'ABORT_ERR') {
return hasShrinkWrap;
}
throw Object.assign(new Error('[hasShrinkWrapInTgz] Fail to parse input file'), { cause: e });
throw Object.assign(
new Error('[hasShrinkWrapInTgz] Fail to parse input file'),
{ cause: e }
);
}
}
/** 写入 ES 时,格式化 author */
export function formatAuthor(author: string | AuthorType | undefined): AuthorType | undefined {
export function formatAuthor(
author: string | AuthorType | undefined
): AuthorType | undefined {
if (author === undefined) {
return author;
}
@@ -119,10 +135,12 @@ export function formatAuthor(author: string | AuthorType | undefined): AuthorTyp
return author;
}
export async function extractPackageJSON(tarballBytes: Buffer): Promise<PackageJSONType> {
export async function extractPackageJSON(
tarballBytes: Buffer
): Promise<PackageJSONType> {
return new Promise((resolve, reject) => {
Readable.from(tarballBytes)
.pipe(tar.t({
Readable.from(tarballBytes).pipe(
tar.t({
filter: (name: string) => name === 'package/package.json',
onentry: async (entry: any) => {
const chunks: Buffer[] = [];
@@ -136,6 +154,7 @@ export async function extractPackageJSON(tarballBytes: Buffer): Promise<PackageJ
reject(new Error('Error parsing package.json'));
}
},
}));
})
);
});
}

View File

@@ -1,4 +1,4 @@
import { EggContext } from '@eggjs/tegg';
import type { EggContext } from '@eggjs/tegg';
export function isSyncWorkerRequest(ctx: EggContext) {
// sync request will contain this query params

View File

@@ -1,7 +1,7 @@
import crypto from 'node:crypto';
import base from 'base-x';
import { crc32 } from '@node-rs/crc32';
import * as ssri from 'ssri';
import { checkData, create } from 'ssri';
import UAParser from 'ua-parser-js';
const base62 = base('0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ');
@@ -29,12 +29,12 @@ export function checkToken(token: string, prefix: string): boolean {
}
export function integrity(plain: string): string {
return ssri.create().update(plain).digest()
return create().update(plain).digest()
.toString();
}
export function checkIntegrity(plain: string, expectedIntegrity: string): boolean {
return !!ssri.checkData(plain, expectedIntegrity);
return !!checkData(plain, expectedIntegrity);
}
export function sha512(plain: string): string {

View File

@@ -1,5 +1,5 @@
import { AccessLevel, SingletonProto } from '@eggjs/tegg';
import { BugVersion } from '../../core/entity/BugVersion.js';
import type { BugVersion } from '../../core/entity/BugVersion.js';
@SingletonProto({
accessLevel: AccessLevel.PUBLIC,

View File

@@ -1,14 +1,10 @@
import { Readable } from 'node:stream';
import { IncomingHttpHeaders } from 'node:http';
import {
SingletonProto,
AccessLevel,
Inject,
} from '@eggjs/tegg';
import type { Readable } from 'node:stream';
import type { IncomingHttpHeaders } from 'node:http';
import { SingletonProto, AccessLevel, Inject } from '@eggjs/tegg';
import { Pointcut } from '@eggjs/tegg/aop';
import { EggLogger } from 'egg';
import type { EggLogger } from 'egg';
import { AsyncTimer } from '../aop/AsyncTimer.js';
import { NFSClient } from '../typing.js';
import type { NFSClient } from '../typing.js';
const INSTANCE_NAME = 'nfsAdapter';
@@ -25,13 +21,23 @@ export class NFSAdapter {
@Pointcut(AsyncTimer)
async uploadBytes(storeKey: string, bytes: Uint8Array) {
this.logger.info('[%s:uploadBytes] key: %s, bytes: %d', INSTANCE_NAME, storeKey, bytes.length);
this.logger.info(
'[%s:uploadBytes] key: %s, bytes: %d',
INSTANCE_NAME,
storeKey,
bytes.length
);
await this.nfsClient.uploadBytes(bytes, { key: storeKey });
}
// will return next store position
@Pointcut(AsyncTimer)
async appendBytes(storeKey: string, bytes: Uint8Array, position?: string, headers?: IncomingHttpHeaders) {
async appendBytes(
storeKey: string,
bytes: Uint8Array,
position?: string,
headers?: IncomingHttpHeaders
) {
// make sure position is undefined by the first time
if (!position) position = undefined;
const options = {
@@ -45,14 +51,24 @@ export class NFSAdapter {
@Pointcut(AsyncTimer)
async uploadFile(storeKey: string, file: string) {
this.logger.info('[%s:uploadFile] key: %s, file: %s', INSTANCE_NAME, storeKey, file);
this.logger.info(
'[%s:uploadFile] key: %s, file: %s',
INSTANCE_NAME,
storeKey,
file
);
await this.nfsClient.upload(file, { key: storeKey });
}
@Pointcut(AsyncTimer)
async downloadFile(storeKey: string, file: string, timeout: number) {
this.logger.info('[%s:downloadFile] key: %s, file: %s, timeout: %s',
INSTANCE_NAME, storeKey, file, timeout);
this.logger.info(
'[%s:downloadFile] key: %s, file: %s, timeout: %s',
INSTANCE_NAME,
storeKey,
file,
timeout
);
await this.nfsClient.download(storeKey, file, { timeout });
}
@@ -79,7 +95,9 @@ export class NFSAdapter {
}
}
async getDownloadUrlOrStream(storeKey: string): Promise<string | Readable | undefined> {
async getDownloadUrlOrStream(
storeKey: string
): Promise<string | Readable | undefined> {
const downloadUrl = await this.getDownloadUrl(storeKey);
if (downloadUrl) {
return downloadUrl;

View File

@@ -1,17 +1,13 @@
import { setTimeout } from 'node:timers/promises';
import {
ContextProto,
AccessLevel,
Inject,
} from '@eggjs/tegg';
import {
import { ContextProto, AccessLevel, Inject } from '@eggjs/tegg';
import type {
EggLogger,
EggContextHttpClient,
EggAppConfig,
HttpClientRequestOptions,
HttpClientResponse,
} from 'egg';
import { PackageManifestType } from '../../repository/PackageRepository.js';
import type { PackageManifestType } from '../../repository/PackageRepository.js';
import { isTimeoutError } from '../ErrorUtil.js';
type HttpMethod = HttpClientRequestOptions['method'];
@@ -42,7 +38,10 @@ export class NPMRegistry {
this.registryHost = registryHost;
}
public async getFullManifests(fullname: string, optionalConfig?: { retries?: number, remoteAuthToken?: string }): Promise<{ method: HttpMethod } & HttpClientResponse<PackageManifestType>> {
public async getFullManifests(
fullname: string,
optionalConfig?: { retries?: number; remoteAuthToken?: string }
): Promise<{ method: HttpMethod } & HttpClientResponse<PackageManifestType>> {
let retries = optionalConfig?.retries || 3;
// set query t=timestamp, make sure CDN cache disable
// cache=0 is sync worker request flag
@@ -52,7 +51,9 @@ export class NPMRegistry {
try {
// large package: https://r.cnpmjs.org/%40procore%2Fcore-icons
// https://r.cnpmjs.org/intraactive-sdk-ui 44s
const authorization = this.genAuthorizationHeader(optionalConfig?.remoteAuthToken);
const authorization = this.genAuthorizationHeader(
optionalConfig?.remoteAuthToken
);
return await this.request('GET', url, undefined, {
timeout: 120000,
headers: { authorization },
@@ -66,7 +67,8 @@ export class NPMRegistry {
retries--;
if (retries > 0) {
// sleep 1s ~ 4s in random
const delay = process.env.NODE_ENV === 'test' ? 1 : 1000 + Math.random() * 4000;
const delay =
process.env.NODE_ENV === 'test' ? 1 : 1000 + Math.random() * 4000;
await setTimeout(delay);
}
}
@@ -74,8 +76,13 @@ export class NPMRegistry {
}
// app.put('/:name/sync', sync.sync);
public async createSyncTask(fullname: string, optionalConfig?: { remoteAuthToken?:string}): Promise<RegistryResponse> {
const authorization = this.genAuthorizationHeader(optionalConfig?.remoteAuthToken);
public async createSyncTask(
fullname: string,
optionalConfig?: { remoteAuthToken?: string }
): Promise<RegistryResponse> {
const authorization = this.genAuthorizationHeader(
optionalConfig?.remoteAuthToken
);
const url = `${this.registry}/${encodeURIComponent(fullname)}/sync?sync_upstream=true&nodeps=true`;
// {
// ok: true,
@@ -85,21 +92,41 @@ export class NPMRegistry {
}
// app.get('/:name/sync/log/:id', sync.getSyncLog);
public async getSyncTask(fullname: string, id: string, offset: number, optionalConfig?:{ remoteAuthToken?:string }): Promise<RegistryResponse> {
const authorization = this.genAuthorizationHeader(optionalConfig?.remoteAuthToken);
public async getSyncTask(
fullname: string,
id: string,
offset: number,
optionalConfig?: { remoteAuthToken?: string }
): Promise<RegistryResponse> {
const authorization = this.genAuthorizationHeader(
optionalConfig?.remoteAuthToken
);
const url = `${this.registry}/${encodeURIComponent(fullname)}/sync/log/${id}?offset=${offset}`;
// { ok: true, syncDone: syncDone, log: log }
return await this.request('GET', url, undefined, { authorization });
}
public async getDownloadRanges(registry: string, fullname: string, start: string, end: string, optionalConfig?:{ remoteAuthToken?:string }): Promise<RegistryResponse> {
const authorization = this.genAuthorizationHeader(optionalConfig?.remoteAuthToken);
public async getDownloadRanges(
registry: string,
fullname: string,
start: string,
end: string,
optionalConfig?: { remoteAuthToken?: string }
): Promise<RegistryResponse> {
const authorization = this.genAuthorizationHeader(
optionalConfig?.remoteAuthToken
);
const url = `${registry}/downloads/range/${start}:${end}/${encodeURIComponent(fullname)}`;
return await this.request('GET', url, undefined, { authorization });
}
private async request(method: HttpMethod, url: string, params?: object, options?: object): Promise<RegistryResponse> {
const res = await this.httpclient.request(url, {
private async request(
method: HttpMethod,
url: string,
params?: object,
options?: object
): Promise<RegistryResponse> {
const res = (await this.httpclient.request(url, {
method,
data: params,
dataType: 'json',
@@ -109,15 +136,20 @@ export class NPMRegistry {
followRedirect: true,
gzip: true,
...options,
}) as HttpClientResponse;
this.logger.info('[NPMRegistry:request] %s %s, status: %s', method, url, res.status);
})) as HttpClientResponse;
this.logger.info(
'[NPMRegistry:request] %s %s, status: %s',
method,
url,
res.status
);
return {
method,
...res,
};
}
public genAuthorizationHeader(remoteAuthToken?:string) {
public genAuthorizationHeader(remoteAuthToken?: string) {
return remoteAuthToken ? `Bearer ${remoteAuthToken}` : '';
}
}

View File

@@ -1,7 +1,11 @@
import { ImplDecorator, Inject, QualifierImplDecoratorUtil } from '@eggjs/tegg';
import { EggHttpClient, EggLogger } from 'egg';
import { BinaryType } from '../../enum/Binary.js';
import { BinaryName, BinaryTaskConfig } from '../../../../config/binaries.js';
import type { ImplDecorator } from '@eggjs/tegg';
import { Inject, QualifierImplDecoratorUtil } from '@eggjs/tegg';
import type { EggHttpClient, EggLogger } from 'egg';
import type { BinaryType } from '../../enum/Binary.js';
import type {
BinaryName,
BinaryTaskConfig,
} from '../../../../config/binaries.js';
export type BinaryItem = {
name: string;
@@ -17,7 +21,7 @@ export type FetchResult = {
nextParams?: any;
};
const platforms = [ 'darwin', 'linux', 'win32' ] as const;
const platforms = ['darwin', 'linux', 'win32'] as const;
export const BINARY_ADAPTER_ATTRIBUTE = Symbol('BINARY_ADAPTER_ATTRIBUTE');
@@ -29,7 +33,10 @@ export abstract class AbstractBinary {
protected httpclient: EggHttpClient;
abstract initFetch(binaryName: BinaryName): Promise<void>;
abstract fetch(dir: string, binaryName: BinaryName): Promise<FetchResult | undefined>;
abstract fetch(
dir: string,
binaryName: BinaryName
): Promise<FetchResult | undefined>;
// eslint-disable-next-line @typescript-eslint/no-unused-vars
async finishFetch(_success: boolean, _binaryName: BinaryName): Promise<void> {
@@ -44,13 +51,22 @@ export abstract class AbstractBinary {
});
const xml = data.toString() as string;
if (status !== 200) {
this.logger.warn('[AbstractBinary.requestXml:non-200-status] url: %s, status: %s, headers: %j, xml: %j', url, status, headers, xml);
this.logger.warn(
'[AbstractBinary.requestXml:non-200-status] url: %s, status: %s, headers: %j, xml: %j',
url,
status,
headers,
xml
);
return '';
}
return xml;
}
protected async requestJSON(url: string, requestHeaders?: Record<string, string>) {
protected async requestJSON(
url: string,
requestHeaders?: Record<string, string>
) {
const { status, data, headers } = await this.httpclient.request(url, {
timeout: 30000,
dataType: 'json',
@@ -59,7 +75,12 @@ export abstract class AbstractBinary {
headers: requestHeaders,
});
if (status !== 200) {
this.logger.warn('[AbstractBinary.requestJSON:non-200-status] url: %s, status: %s, headers: %j', url, status, headers);
this.logger.warn(
'[AbstractBinary.requestJSON:non-200-status] url: %s, status: %s, headers: %j',
url,
status,
headers
);
return data;
}
return data;
@@ -68,7 +89,9 @@ export abstract class AbstractBinary {
// https://nodejs.org/api/n-api.html#n_api_node_api_version_matrix
protected async listNodeABIVersions() {
const nodeABIVersions: number[] = [];
const versions = await this.requestJSON('https://nodejs.org/dist/index.json');
const versions = await this.requestJSON(
'https://nodejs.org/dist/index.json'
);
for (const version of versions) {
if (!version.modules) continue;
const modulesVersion = parseInt(version.modules);
@@ -89,21 +112,24 @@ export abstract class AbstractBinary {
if (binaryConfig?.options?.nodeArchs) return binaryConfig.options.nodeArchs;
// https://nodejs.org/api/os.html#osarch
return {
linux: [ 'arm', 'arm64', 's390x', 'ia32', 'x64' ],
darwin: [ 'arm64', 'ia32', 'x64' ],
win32: [ 'ia32', 'x64' ],
linux: ['arm', 'arm64', 's390x', 'ia32', 'x64'],
darwin: ['arm64', 'ia32', 'x64'],
win32: ['ia32', 'x64'],
};
}
protected listNodeLibcs(): Record<typeof platforms[number], string[]> {
protected listNodeLibcs(): Record<(typeof platforms)[number], string[]> {
// https://github.com/lovell/detect-libc/blob/master/lib/detect-libc.js#L42
return {
darwin: [ 'unknown' ],
linux: [ 'glibc', 'musl' ],
win32: [ 'unknown' ],
darwin: ['unknown'],
linux: ['glibc', 'musl'],
win32: ['unknown'],
};
}
}
export const BinaryAdapter: ImplDecorator<AbstractBinary, typeof BinaryType> =
QualifierImplDecoratorUtil.generatorDecorator(AbstractBinary, BINARY_ADAPTER_ATTRIBUTE);
QualifierImplDecoratorUtil.generatorDecorator(
AbstractBinary,
BINARY_ADAPTER_ATTRIBUTE
);

View File

@@ -1,7 +1,8 @@
import { Inject, SingletonProto } from '@eggjs/tegg';
import { EggAppConfig } from 'egg';
import type { EggAppConfig } from 'egg';
import { BinaryType } from '../../enum/Binary.js';
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary.js';
import type { FetchResult, BinaryItem } from './AbstractBinary.js';
import { AbstractBinary, BinaryAdapter } from './AbstractBinary.js';
@SingletonProto()
@BinaryAdapter(BinaryType.Api)
@@ -14,12 +15,20 @@ export class ApiBinary extends AbstractBinary {
return;
}
async fetch(dir: string, binaryName: string): Promise<FetchResult | undefined> {
const apiUrl = this.config.cnpmcore.syncBinaryFromAPISource || `${this.config.cnpmcore.sourceRegistry}/-/binary`;
async fetch(
dir: string,
binaryName: string
): Promise<FetchResult | undefined> {
const apiUrl =
this.config.cnpmcore.syncBinaryFromAPISource ||
`${this.config.cnpmcore.sourceRegistry}/-/binary`;
const url = `${apiUrl}/${binaryName}${dir}`;
const data = await this.requestJSON(url);
if (!Array.isArray(data)) {
this.logger.warn('[ApiBinary.fetch:response-data-not-array] data: %j', data);
this.logger.warn(
'[ApiBinary.fetch:response-data-not-array] data: %j',
data
);
return;
}
const items: BinaryItem[] = [];

View File

@@ -1,8 +1,13 @@
import path from 'node:path';
import { SingletonProto } from '@eggjs/tegg';
import { BinaryType } from '../../enum/Binary.js';
import binaries, { BinaryName, BinaryTaskConfig } from '../../../../config/binaries.js';
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary.js';
import type {
BinaryName,
BinaryTaskConfig,
} from '../../../../config/binaries.js';
import binaries from '../../../../config/binaries.js';
import type { FetchResult, BinaryItem } from './AbstractBinary.js';
import { AbstractBinary, BinaryAdapter } from './AbstractBinary.js';
@SingletonProto()
@BinaryAdapter(BinaryType.Bucket)
@@ -12,7 +17,10 @@ export class BucketBinary extends AbstractBinary {
return;
}
async fetch(dir: string, binaryName: BinaryName): Promise<FetchResult | undefined> {
async fetch(
dir: string,
binaryName: BinaryName
): Promise<FetchResult | undefined> {
// /foo/ => foo/
const binaryConfig = binaries[binaryName];
const subDir = dir.substring(1);
@@ -21,13 +29,18 @@ export class BucketBinary extends AbstractBinary {
return { items: this.parseItems(xml, dir, binaryConfig), nextParams: null };
}
protected parseItems(xml: string, dir: string, binaryConfig: BinaryTaskConfig): BinaryItem[] {
protected parseItems(
xml: string,
dir: string,
binaryConfig: BinaryTaskConfig
): BinaryItem[] {
const items: BinaryItem[] = [];
// https://nwjs2.s3.amazonaws.com/?prefix=v0.59.0%2Fx64%2F
// https://chromedriver.storage.googleapis.com/?delimiter=/&prefix=
// <Contents><Key>2.0/chromedriver_linux32.zip</Key><Generation>1380149859530000</Generation><MetaGeneration>2</MetaGeneration><LastModified>2013-09-25T22:57:39.349Z</LastModified><ETag>"c0d96102715c4916b872f91f5bf9b12c"</ETag><Size>7262134</Size><Owner/></Contents><Contents>
// <Contents><Key>v0.59.0/nwjs-v0.59.0-linux-ia32.tar.gz</Key><LastModified>2015-11-02T02:34:18.000Z</LastModified><ETag>&quot;b1b7a52928e9f874bad0cabf7f74ba8e&quot;</ETag><Size>22842</Size><StorageClass>STANDARD</StorageClass></Contents>
const fileRe = /<Contents><Key>([^<]+?)<\/Key>(?:<Generation>\d+?<\/Generation>)?(?:<MetaGeneration>\d+?<\/MetaGeneration>)?<LastModified>([^<]+?)<\/LastModified><ETag>[^<]+?<\/ETag><Size>(\d+?)<\/Size>/g;
const fileRe =
/<Contents><Key>([^<]+?)<\/Key>(?:<Generation>\d+?<\/Generation>)?(?:<MetaGeneration>\d+?<\/MetaGeneration>)?<LastModified>([^<]+?)<\/LastModified><ETag>[^<]+?<\/ETag><Size>(\d+?)<\/Size>/g;
let matchs = xml.matchAll(fileRe);
for (const m of matchs) {
const fullname = m[1].trim();
@@ -52,7 +65,8 @@ export class BucketBinary extends AbstractBinary {
});
}
// <CommonPrefixes><Prefix>v0.59.0/x64/</Prefix></CommonPrefixes>
const dirRe = /<CommonPrefixes><Prefix>([^<]+?)<\/Prefix><\/CommonPrefixes>/g;
const dirRe =
/<CommonPrefixes><Prefix>([^<]+?)<\/Prefix><\/CommonPrefixes>/g;
matchs = xml.matchAll(dirRe);
for (const m of matchs) {
// <Prefix>AWSLogs/</Prefix>

View File

@@ -1,7 +1,8 @@
import { basename } from 'node:path';
import { SingletonProto } from '@eggjs/tegg';
import { BinaryType } from '../../enum/Binary.js';
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary.js';
import type { FetchResult, BinaryItem } from './AbstractBinary.js';
import { AbstractBinary, BinaryAdapter } from './AbstractBinary.js';
@SingletonProto()
@BinaryAdapter(BinaryType.ChromeForTesting)
@@ -18,7 +19,11 @@ export class ChromeForTestingBinary extends AbstractBinary {
}
async finishFetch(success: boolean) {
if (success && this.#timestamp && ChromeForTestingBinary.lastTimestamp !== this.#timestamp) {
if (
success &&
this.#timestamp &&
ChromeForTestingBinary.lastTimestamp !== this.#timestamp
) {
ChromeForTestingBinary.lastTimestamp = this.#timestamp;
}
}
@@ -26,22 +31,35 @@ export class ChromeForTestingBinary extends AbstractBinary {
async #syncDirItems() {
this.dirItems = {};
this.dirItems['/'] = [];
const jsonApiEndpoint = 'https://googlechromelabs.github.io/chrome-for-testing/known-good-versions-with-downloads.json';
const { data, status, headers } = await this.httpclient.request(jsonApiEndpoint, {
dataType: 'json',
timeout: 30000,
followRedirect: true,
gzip: true,
});
const jsonApiEndpoint =
'https://googlechromelabs.github.io/chrome-for-testing/known-good-versions-with-downloads.json';
const { data, status, headers } = await this.httpclient.request(
jsonApiEndpoint,
{
dataType: 'json',
timeout: 30000,
followRedirect: true,
gzip: true,
}
);
if (status !== 200) {
this.logger.warn('[ChromeForTestingBinary.request:non-200-status] url: %s, status: %s, headers: %j, data: %j',
jsonApiEndpoint, status, headers, data);
this.logger.warn(
'[ChromeForTestingBinary.request:non-200-status] url: %s, status: %s, headers: %j, data: %j',
jsonApiEndpoint,
status,
headers,
data
);
return;
}
this.#timestamp = data.timestamp;
const hasNewData = this.#timestamp !== ChromeForTestingBinary.lastTimestamp;
this.logger.info('[ChromeForTestingBinary] remote data timestamp: %j, last timestamp: %j, hasNewData: %s',
this.#timestamp, ChromeForTestingBinary.lastTimestamp, hasNewData);
this.logger.info(
'[ChromeForTestingBinary] remote data timestamp: %j, last timestamp: %j, hasNewData: %s',
this.#timestamp,
ChromeForTestingBinary.lastTimestamp,
hasNewData
);
if (!hasNewData) {
return;
}

View File

@@ -1,6 +1,7 @@
import { SingletonProto } from '@eggjs/tegg';
import { BinaryType } from '../../enum/Binary.js';
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary.js';
import type { FetchResult, BinaryItem } from './AbstractBinary.js';
import { AbstractBinary, BinaryAdapter } from './AbstractBinary.js';
@SingletonProto()
@BinaryAdapter(BinaryType.Cypress)
@@ -53,8 +54,10 @@ export class CypressBinary extends AbstractBinary {
// { platform: 'win32', arch: 'x64' },
// ]
const platforms = [
'darwin-x64', 'darwin-arm64',
'linux-x64', 'linux-arm64',
'darwin-x64',
'darwin-arm64',
'linux-x64',
'linux-arm64',
'win32-x64',
];
for (const platform of platforms) {

View File

@@ -1,8 +1,7 @@
import path from 'node:path';
import { SingletonProto } from '@eggjs/tegg';
import {
AbstractBinary, FetchResult, BinaryItem, BinaryAdapter,
} from './AbstractBinary.js';
import type { FetchResult, BinaryItem } from './AbstractBinary.js';
import { AbstractBinary, BinaryAdapter } from './AbstractBinary.js';
import { BinaryType } from '../../enum/Binary.js';
@SingletonProto()
@@ -20,15 +19,23 @@ export class EdgedriverBinary extends AbstractBinary {
this.dirItems = {};
this.dirItems['/'] = [];
const jsonApiEndpoint = 'https://edgeupdates.microsoft.com/api/products';
const { data, status, headers } = await this.httpclient.request(jsonApiEndpoint, {
dataType: 'json',
timeout: 30000,
followRedirect: true,
gzip: true,
});
const { data, status, headers } = await this.httpclient.request(
jsonApiEndpoint,
{
dataType: 'json',
timeout: 30000,
followRedirect: true,
gzip: true,
}
);
if (status !== 200) {
this.logger.warn('[EdgedriverBinary.request:non-200-status] url: %s, status: %s, headers: %j, data: %j',
jsonApiEndpoint, status, headers, data);
this.logger.warn(
'[EdgedriverBinary.request:non-200-status] url: %s, status: %s, headers: %j, data: %j',
jsonApiEndpoint,
status,
headers,
data
);
return;
}
this.logger.info('[EdgedriverBinary] remote data length: %s', data.length);
@@ -175,7 +182,8 @@ export class EdgedriverBinary extends AbstractBinary {
#parseItems(xml: string): BinaryItem[] {
const items: BinaryItem[] = [];
// <Blob><Name>124.0.2478.97/edgedriver_arm64.zip</Name><Url>https://msedgewebdriverstorage.blob.core.windows.net/edgewebdriver/124.0.2478.97/edgedriver_arm64.zip</Url><Properties><Last-Modified>Fri, 10 May 2024 18:35:44 GMT</Last-Modified><Etag>0x8DC712000713C13</Etag><Content-Length>9191362</Content-Length><Content-Type>application/octet-stream</Content-Type><Content-Encoding /><Content-Language /><Content-MD5>1tjPTf5JU6KKB06Qf1JOGw==</Content-MD5><Cache-Control /><BlobType>BlockBlob</BlobType><LeaseStatus>unlocked</LeaseStatus></Properties></Blob>
const fileRe = /<Blob><Name>([^<]+?)<\/Name><Url>([^<]+?)<\/Url><Properties><Last-Modified>([^<]+?)<\/Last-Modified><Etag>(?:[^<]+?)<\/Etag><Content-Length>(\d+)<\/Content-Length>/g;
const fileRe =
/<Blob><Name>([^<]+?)<\/Name><Url>([^<]+?)<\/Url><Properties><Last-Modified>([^<]+?)<\/Last-Modified><Etag>(?:[^<]+?)<\/Etag><Content-Length>(\d+)<\/Content-Length>/g;
const matchItems = xml.matchAll(fileRe);
for (const m of matchItems) {
const fullname = m[1].trim();

View File

@@ -1,13 +1,18 @@
import { SingletonProto } from '@eggjs/tegg';
import binaries, { BinaryName } from '../../../../config/binaries.js';
import type { BinaryName } from '../../../../config/binaries.js';
import binaries from '../../../../config/binaries.js';
import { BinaryType } from '../../enum/Binary.js';
import { BinaryAdapter, BinaryItem, FetchResult } from './AbstractBinary.js';
import type { BinaryItem, FetchResult } from './AbstractBinary.js';
import { BinaryAdapter } from './AbstractBinary.js';
import { GithubBinary } from './GithubBinary.js';
@SingletonProto()
@BinaryAdapter(BinaryType.Electron)
export class ElectronBinary extends GithubBinary {
async fetch(dir: string, binaryName: BinaryName = 'electron'): Promise<FetchResult | undefined> {
async fetch(
dir: string,
binaryName: BinaryName = 'electron'
): Promise<FetchResult | undefined> {
const releases = await this.initReleases(binaryName, binaries.electron);
if (!releases) return;
@@ -34,7 +39,10 @@ export class ElectronBinary extends GithubBinary {
}
} else {
for (const item of releases) {
if (dir === `/${item.tag_name}/` || dir === `/${item.tag_name.substring(1)}/`) {
if (
dir === `/${item.tag_name}/` ||
dir === `/${item.tag_name.substring(1)}/`
) {
items = this.formatItems(item, binaries.electron);
break;
}

View File

@@ -1,7 +1,12 @@
import { SingletonProto } from '@eggjs/tegg';
import binaries, { BinaryName, BinaryTaskConfig } from '../../../../config/binaries.js';
import type {
BinaryName,
BinaryTaskConfig,
} from '../../../../config/binaries.js';
import binaries from '../../../../config/binaries.js';
import { BinaryType } from '../../enum/Binary.js';
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary.js';
import type { FetchResult, BinaryItem } from './AbstractBinary.js';
import { AbstractBinary, BinaryAdapter } from './AbstractBinary.js';
@SingletonProto()
@BinaryAdapter(BinaryType.GitHub)
@@ -12,7 +17,10 @@ export class GithubBinary extends AbstractBinary {
delete this.releases[binaryName];
}
protected async initReleases(binaryName: BinaryName, binaryConfig: BinaryTaskConfig) {
protected async initReleases(
binaryName: BinaryName,
binaryConfig: BinaryTaskConfig
) {
if (!this.releases[binaryName]) {
// https://docs.github.com/en/rest/reference/releases get three pages
// https://api.github.com/repos/electron/electron/releases
@@ -28,11 +36,22 @@ export class GithubBinary extends AbstractBinary {
const data = await this.requestJSON(url, requestHeaders);
if (!Array.isArray(data)) {
// {"message":"API rate limit exceeded for 47.57.239.54. (But here's the good news: Authenticated requests get a higher rate limit. Check out the documentation for more details.)","documentation_url":"https://docs.github.com/rest/overview/resources-in-the-rest-api#rate-limiting"}
if (typeof data?.message === 'string' && data.message.includes('rate limit')) {
this.logger.info('[GithubBinary.fetch:hit-rate-limit] skip sync this time, data: %j, url: %s', data, url);
if (
typeof data?.message === 'string' &&
data.message.includes('rate limit')
) {
this.logger.info(
'[GithubBinary.fetch:hit-rate-limit] skip sync this time, data: %j, url: %s',
data,
url
);
return;
}
this.logger.warn('[GithubBinary.fetch:response-data-not-array] data: %j, url: %s', data, url);
this.logger.warn(
'[GithubBinary.fetch:response-data-not-array] data: %j, url: %s',
data,
url
);
return;
}
releases = releases.concat(data);
@@ -48,7 +67,10 @@ export class GithubBinary extends AbstractBinary {
const maxFileSize = 1024 * 1024 * 250;
for (const asset of releaseItem.assets) {
if (asset.size > maxFileSize) {
this.logger.info('[GithubBinary.formatItems] asset reach max file size(> 250MB), ignore download it, asset: %j', asset);
this.logger.info(
'[GithubBinary.formatItems] asset reach max file size(> 250MB), ignore download it, asset: %j',
asset
);
continue;
}
items.push({
@@ -83,7 +105,10 @@ export class GithubBinary extends AbstractBinary {
return items;
}
async fetch(dir: string, binaryName: BinaryName): Promise<FetchResult | undefined> {
async fetch(
dir: string,
binaryName: BinaryName
): Promise<FetchResult | undefined> {
const binaryConfig = binaries[binaryName];
const releases = await this.initReleases(binaryName, binaryConfig);
if (!releases) return;

View File

@@ -1,7 +1,9 @@
import { SingletonProto } from '@eggjs/tegg';
import binaries, { BinaryName } from '../../../../config/binaries.js';
import type { BinaryName } from '../../../../config/binaries.js';
import binaries from '../../../../config/binaries.js';
import { BinaryType } from '../../enum/Binary.js';
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary.js';
import type { FetchResult, BinaryItem } from './AbstractBinary.js';
import { AbstractBinary, BinaryAdapter } from './AbstractBinary.js';
@SingletonProto()
@BinaryAdapter(BinaryType.Imagemin)
@@ -11,7 +13,10 @@ export class ImageminBinary extends AbstractBinary {
return;
}
async fetch(dir: string, binaryName: BinaryName): Promise<FetchResult | undefined> {
async fetch(
dir: string,
binaryName: BinaryName
): Promise<FetchResult | undefined> {
const binaryConfig = binaries[binaryName];
const dirItems: {
[key: string]: BinaryItem[];
@@ -66,7 +71,7 @@ export class ImageminBinary extends AbstractBinary {
size: '-',
isDir: false,
url: `${binaryConfig.distUrl}/${binaryConfig.repo}${platformDir}${name}`,
ignoreDownloadStatuses: [ 404 ],
ignoreDownloadStatuses: [404],
});
}
} else {
@@ -88,7 +93,7 @@ export class ImageminBinary extends AbstractBinary {
size: '-',
isDir: false,
url: `${binaryConfig.distUrl}/${binaryConfig.repo}${platformArchDir}${name}`,
ignoreDownloadStatuses: [ 404 ],
ignoreDownloadStatuses: [404],
});
}
}

View File

@@ -1,8 +1,10 @@
import { basename } from 'node:path';
import { SingletonProto } from '@eggjs/tegg';
import { BinaryType } from '../../enum/Binary.js';
import binaries, { BinaryName } from '../../../../config/binaries.js';
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary.js';
import type { BinaryName } from '../../../../config/binaries.js';
import binaries from '../../../../config/binaries.js';
import type { FetchResult, BinaryItem } from './AbstractBinary.js';
import { AbstractBinary, BinaryAdapter } from './AbstractBinary.js';
@SingletonProto()
@BinaryAdapter(BinaryType.Node)
@@ -12,7 +14,10 @@ export class NodeBinary extends AbstractBinary {
return;
}
async fetch(dir: string, binaryName: BinaryName): Promise<FetchResult | undefined> {
async fetch(
dir: string,
binaryName: BinaryName
): Promise<FetchResult | undefined> {
const binaryConfig = binaries[binaryName];
const url = `${binaryConfig.distUrl}${dir}`;
const html = await this.requestXml(url);
@@ -30,7 +35,8 @@ export class NodeBinary extends AbstractBinary {
// <a href="/dist/v18.15.0/SHASUMS256.txt.asc">SHASUMS256.txt.asc</a> 04-Nov-2024 17:29 3.7 KB
// <a href="/dist/v18.15.0/SHASUMS256.txt.sig">SHASUMS256.txt.sig</a> 04-Nov-2024 17:29 310 B
// <a href="/dist/v18.15.0/SHASUMS256.txt">SHASUMS256.txt</a> 04-Nov-2024 17:29 3.2 KB
const re = /<a href="([^"]+?)"[^>]*?>[^<]+?<\/a>\s+?((?:[\w-]+? \w{2}:\d{2})|-)\s+?([\d.\-\s\w]+)/ig;
const re =
/<a href="([^"]+?)"[^>]*?>[^<]+?<\/a>\s+?((?:[\w-]+? \w{2}:\d{2})|-)\s+?([\d.\-\s\w]+)/gi;
const matchs = html.matchAll(re);
const items: BinaryItem[] = [];
for (const m of matchs) {

View File

@@ -1,8 +1,10 @@
import { join } from 'node:path';
import { SingletonProto } from '@eggjs/tegg';
import binaries, { BinaryName } from '../../../../config/binaries.js';
import type { BinaryName } from '../../../../config/binaries.js';
import binaries from '../../../../config/binaries.js';
import { BinaryType } from '../../enum/Binary.js';
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary.js';
import type { FetchResult, BinaryItem } from './AbstractBinary.js';
import { AbstractBinary, BinaryAdapter } from './AbstractBinary.js';
@SingletonProto()
@BinaryAdapter(BinaryType.NodePreGyp)
@@ -13,7 +15,10 @@ export class NodePreGypBinary extends AbstractBinary {
}
// https://github.com/mapbox/node-pre-gyp
async fetch(dir: string, binaryName: BinaryName): Promise<FetchResult | undefined> {
async fetch(
dir: string,
binaryName: BinaryName
): Promise<FetchResult | undefined> {
const binaryConfig = binaries[binaryName];
const npmPackageName = binaryConfig.options?.npmPackageName ?? binaryName;
const pkgUrl = `https://registry.npmjs.com/${npmPackageName}`;
@@ -33,20 +38,28 @@ export class NodePreGypBinary extends AbstractBinary {
if (!pkgVersion.binary) continue;
// https://github.com/mapbox/node-pre-gyp#package_name
// defaults to {module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz
let binaryFile = pkgVersion.binary.package_name
|| '{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz';
let binaryFile =
pkgVersion.binary.package_name ||
'{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz';
if (!binaryFile) continue;
const moduleName = pkgVersion.binary.module_name || pkgVersion.name;
binaryFile = binaryFile.replace('{version}', version)
binaryFile = binaryFile
.replace('{version}', version)
.replace('{module_name}', moduleName);
let currentDir = dirItems['/'];
let versionPrefix = '';
let remotePath = pkgVersion.binary.remote_path;
const napiVersions = pkgVersion.binary.napi_versions ?? [];
if (binaryConfig.options?.requiredNapiVersions && napiVersions.length === 0) continue;
if (
binaryConfig.options?.requiredNapiVersions &&
napiVersions.length === 0
)
continue;
if (remotePath?.includes('{version}')) {
const dirName = remotePath.includes('v{version}') ? `v${version}` : version;
const dirName = remotePath.includes('v{version}')
? `v${version}`
: version;
versionPrefix = `/${dirName}`;
dirItems['/'].push({
name: `${dirName}/`,
@@ -67,17 +80,20 @@ export class NodePreGypBinary extends AbstractBinary {
// "remote_path": "{name}/v{version}",
// "package_name": "{node_abi}-{platform}-{arch}-{libc}.tar.gz"
// },
if (binaryFile.includes('{node_abi}')
&& binaryFile.includes('{platform}')
&& binaryFile.includes('{arch}')
&& binaryFile.includes('{libc}')) {
if (
binaryFile.includes('{node_abi}') &&
binaryFile.includes('{platform}') &&
binaryFile.includes('{arch}') &&
binaryFile.includes('{libc}')
) {
for (const nodeAbi of nodeABIVersions) {
for (const platform of nodePlatforms) {
const archs = nodeArchs[platform];
const libcs = nodeLibcs[platform];
for (const arch of archs) {
for (const libc of libcs) {
const name = binaryFile.replace('{node_abi}', `node-v${nodeAbi}`)
const name = binaryFile
.replace('{node_abi}', `node-v${nodeAbi}`)
.replace('{platform}', platform)
.replace('{arch}', arch)
.replace('{libc}', libc);
@@ -87,20 +103,23 @@ export class NodePreGypBinary extends AbstractBinary {
size: '-',
isDir: false,
url: `${binaryConfig.distUrl}/${binaryName}${versionPrefix}/${name}`,
ignoreDownloadStatuses: [ 404 ],
ignoreDownloadStatuses: [404],
});
}
}
}
}
} else if (binaryFile.includes('{node_abi}')
&& binaryFile.includes('{platform}')
&& binaryFile.includes('{arch}')) {
} else if (
binaryFile.includes('{node_abi}') &&
binaryFile.includes('{platform}') &&
binaryFile.includes('{arch}')
) {
for (const nodeAbi of nodeABIVersions) {
for (const platform of nodePlatforms) {
const archs = nodeArchs[platform];
for (const arch of archs) {
const name = binaryFile.replace('{node_abi}', `node-v${nodeAbi}`)
const name = binaryFile
.replace('{node_abi}', `node-v${nodeAbi}`)
.replace('{platform}', platform)
.replace('{arch}', arch);
currentDir.push({
@@ -109,12 +128,15 @@ export class NodePreGypBinary extends AbstractBinary {
size: '-',
isDir: false,
url: `${binaryConfig.distUrl}/${binaryName}${versionPrefix}/${name}`,
ignoreDownloadStatuses: [ 404 ],
ignoreDownloadStatuses: [404],
});
}
}
}
} else if (binaryFile.includes('{platform}-{arch}-{node_napi_label}-{libc}') && napiVersions.length > 0) {
} else if (
binaryFile.includes('{platform}-{arch}-{node_napi_label}-{libc}') &&
napiVersions.length > 0
) {
// https://skia-canvas.s3.us-east-1.amazonaws.com/v0.9.30/darwin-arm64-napi-v6-unknown.tar.gz
// https://github.com/samizdatco/skia-canvas/blob/2a75801d7cce3b4e4e6ad015a173daefaa8465e6/package.json#L48
// "binary": {
@@ -133,7 +155,8 @@ export class NodePreGypBinary extends AbstractBinary {
for (const arch of archs) {
for (const libc of libcs) {
for (const napiVersion of napiVersions) {
const name = binaryFile.replace('{platform}', platform)
const name = binaryFile
.replace('{platform}', platform)
.replace('{arch}', arch)
.replace('{node_napi_label}', `napi-v${napiVersion}`)
.replace('{libc}', libc);
@@ -143,7 +166,7 @@ export class NodePreGypBinary extends AbstractBinary {
size: '-',
isDir: false,
url: `${binaryConfig.distUrl}${versionPrefix}/${name}`,
ignoreDownloadStatuses: [ 404, 403 ],
ignoreDownloadStatuses: [404, 403],
});
}
}
@@ -165,10 +188,12 @@ export class NodePreGypBinary extends AbstractBinary {
const archs = nodeArchs[platform];
for (const arch of archs) {
for (const napiVersion of napiVersions) {
const binaryFileName = binaryFile.replace('{platform}', platform)
const binaryFileName = binaryFile
.replace('{platform}', platform)
.replace('{arch}', arch)
.replace('{node_napi_label}', napiVersion);
remotePath = remotePath.replace('{module_name}', moduleName)
remotePath = remotePath
.replace('{module_name}', moduleName)
.replace('{name}', binaryName)
.replace('{version}', version)
.replace('{configuration}', 'Release');
@@ -180,12 +205,15 @@ export class NodePreGypBinary extends AbstractBinary {
size: '-',
isDir: false,
url: remoteUrl,
ignoreDownloadStatuses: [ 404 ],
ignoreDownloadStatuses: [404],
});
}
}
}
} else if (binaryFile.includes('{platform}') && binaryFile.includes('{arch}')) {
} else if (
binaryFile.includes('{platform}') &&
binaryFile.includes('{arch}')
) {
// https://github.com/grpc/grpc-node/blob/master/packages/grpc-tools/package.json#L29
// "binary": {
// "module_name": "grpc_tools",
@@ -205,9 +233,11 @@ export class NodePreGypBinary extends AbstractBinary {
for (const platform of nodePlatforms) {
const archs = nodeArchs[platform];
for (const arch of archs) {
const binaryFileName = binaryFile.replace('{platform}', platform)
const binaryFileName = binaryFile
.replace('{platform}', platform)
.replace('{arch}', arch);
remotePath = remotePath.replace('{module_name}', moduleName)
remotePath = remotePath
.replace('{module_name}', moduleName)
.replace('{name}', binaryName)
.replace('{version}', version)
.replace('{configuration}', 'Release');
@@ -219,7 +249,7 @@ export class NodePreGypBinary extends AbstractBinary {
size: '-',
isDir: false,
url: remoteUrl,
ignoreDownloadStatuses: [ 404 ],
ignoreDownloadStatuses: [404],
});
}
}

View File

@@ -1,7 +1,8 @@
import { SingletonProto } from '@eggjs/tegg';
import binaries from '../../../../config/binaries.js';
import { BinaryType } from '../../enum/Binary.js';
import { FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary.js';
import type { FetchResult, BinaryItem } from './AbstractBinary.js';
import { BinaryAdapter } from './AbstractBinary.js';
import { BucketBinary } from './BucketBinary.js';
@SingletonProto()
@@ -14,7 +15,9 @@ export class NwjsBinary extends BucketBinary {
const isRootDir = dir === '/';
// /foo/ => foo/
const subDir = dir.substring(1);
const url = isRootDir ? binaryConfig.distUrl : `${this.s3Url}${encodeURIComponent(subDir)}`;
const url = isRootDir
? binaryConfig.distUrl
: `${this.s3Url}${encodeURIComponent(subDir)}`;
const xml = await this.requestXml(url);
if (!xml) return;
@@ -25,7 +28,8 @@ export class NwjsBinary extends BucketBinary {
// <tr><td valign="top"><img src="/icons/folder.gif" alt="[DIR]"></td><td><a href="v0.15.0-rc1/">v0.15.0-rc1/</a></td><td align="right">06-May-2016 12:24 </td><td align="right"> - </td><td>&nbsp;</td></tr>
// <tr><td valign="top"><img src="/icons/folder.gif" alt="[DIR]"></td><td><a href="v0.15.0-rc2/">v0.15.0-rc2/</a></td><td align="right">13-May-2016 20:13 </td><td align="right"> - </td><td>&nbsp;</td></tr>
const items: BinaryItem[] = [];
const re = /<td><a [^>]+?>([^<]+?\/)<\/a><\/td><td [^>]+?>([^>]+?)<\/td>/ig;
const re =
/<td><a [^>]+?>([^<]+?\/)<\/a><\/td><td [^>]+?>([^>]+?)<\/td>/gi;
const matchs = xml.matchAll(re);
for (const m of matchs) {
const name = m[1].trim();

View File

@@ -2,7 +2,8 @@ import util from 'node:util';
import path from 'node:path';
import { SingletonProto } from '@eggjs/tegg';
import { BinaryType } from '../../enum/Binary.js';
import { AbstractBinary, BinaryAdapter, BinaryItem, FetchResult } from './AbstractBinary.js';
import type { BinaryItem, FetchResult } from './AbstractBinary.js';
import { AbstractBinary, BinaryAdapter } from './AbstractBinary.js';
const PACKAGE_URL = 'https://registry.npmjs.com/playwright-core';
const DOWNLOAD_HOST = 'https://playwright.azureedge.net/';
@@ -10,7 +11,7 @@ const DOWNLOAD_HOST = 'https://playwright.azureedge.net/';
// https://github.com/microsoft/playwright/blob/main/packages/playwright-core/src/server/registry/index.ts
/* eslint-disable quote-props */
const DOWNLOAD_PATHS = {
'chromium': {
chromium: {
'<unknown>': undefined,
'ubuntu18.04-x64': undefined,
'ubuntu20.04-x64': 'builds/chromium/%s/chromium-linux.zip',
@@ -27,17 +28,17 @@ const DOWNLOAD_PATHS = {
'mac10.13': 'builds/chromium/%s/chromium-mac.zip',
'mac10.14': 'builds/chromium/%s/chromium-mac.zip',
'mac10.15': 'builds/chromium/%s/chromium-mac.zip',
'mac11': 'builds/chromium/%s/chromium-mac.zip',
mac11: 'builds/chromium/%s/chromium-mac.zip',
'mac11-arm64': 'builds/chromium/%s/chromium-mac-arm64.zip',
'mac12': 'builds/chromium/%s/chromium-mac.zip',
mac12: 'builds/chromium/%s/chromium-mac.zip',
'mac12-arm64': 'builds/chromium/%s/chromium-mac-arm64.zip',
'mac13': 'builds/chromium/%s/chromium-mac.zip',
mac13: 'builds/chromium/%s/chromium-mac.zip',
'mac13-arm64': 'builds/chromium/%s/chromium-mac-arm64.zip',
'mac14': 'builds/chromium/%s/chromium-mac.zip',
mac14: 'builds/chromium/%s/chromium-mac.zip',
'mac14-arm64': 'builds/chromium/%s/chromium-mac-arm64.zip',
'mac15': 'builds/chromium/%s/chromium-mac.zip',
mac15: 'builds/chromium/%s/chromium-mac.zip',
'mac15-arm64': 'builds/chromium/%s/chromium-mac-arm64.zip',
'win64': 'builds/chromium/%s/chromium-win64.zip',
win64: 'builds/chromium/%s/chromium-win64.zip',
},
'chromium-headless-shell': {
'<unknown>': undefined,
@@ -46,87 +47,128 @@ const DOWNLOAD_PATHS = {
'ubuntu22.04-x64': 'builds/chromium/%s/chromium-headless-shell-linux.zip',
'ubuntu24.04-x64': 'builds/chromium/%s/chromium-headless-shell-linux.zip',
'ubuntu18.04-arm64': undefined,
'ubuntu20.04-arm64': 'builds/chromium/%s/chromium-headless-shell-linux-arm64.zip',
'ubuntu22.04-arm64': 'builds/chromium/%s/chromium-headless-shell-linux-arm64.zip',
'ubuntu24.04-arm64': 'builds/chromium/%s/chromium-headless-shell-linux-arm64.zip',
'ubuntu20.04-arm64':
'builds/chromium/%s/chromium-headless-shell-linux-arm64.zip',
'ubuntu22.04-arm64':
'builds/chromium/%s/chromium-headless-shell-linux-arm64.zip',
'ubuntu24.04-arm64':
'builds/chromium/%s/chromium-headless-shell-linux-arm64.zip',
'debian11-x64': 'builds/chromium/%s/chromium-headless-shell-linux.zip',
'debian11-arm64': 'builds/chromium/%s/chromium-headless-shell-linux-arm64.zip',
'debian11-arm64':
'builds/chromium/%s/chromium-headless-shell-linux-arm64.zip',
'debian12-x64': 'builds/chromium/%s/chromium-headless-shell-linux.zip',
'debian12-arm64': 'builds/chromium/%s/chromium-headless-shell-linux-arm64.zip',
'debian12-arm64':
'builds/chromium/%s/chromium-headless-shell-linux-arm64.zip',
'mac10.13': undefined,
'mac10.14': undefined,
'mac10.15': undefined,
'mac11': 'builds/chromium/%s/chromium-headless-shell-mac.zip',
mac11: 'builds/chromium/%s/chromium-headless-shell-mac.zip',
'mac11-arm64': 'builds/chromium/%s/chromium-headless-shell-mac-arm64.zip',
'mac12': 'builds/chromium/%s/chromium-headless-shell-mac.zip',
mac12: 'builds/chromium/%s/chromium-headless-shell-mac.zip',
'mac12-arm64': 'builds/chromium/%s/chromium-headless-shell-mac-arm64.zip',
'mac13': 'builds/chromium/%s/chromium-headless-shell-mac.zip',
mac13: 'builds/chromium/%s/chromium-headless-shell-mac.zip',
'mac13-arm64': 'builds/chromium/%s/chromium-headless-shell-mac-arm64.zip',
'mac14': 'builds/chromium/%s/chromium-headless-shell-mac.zip',
mac14: 'builds/chromium/%s/chromium-headless-shell-mac.zip',
'mac14-arm64': 'builds/chromium/%s/chromium-headless-shell-mac-arm64.zip',
'mac15': 'builds/chromium/%s/chromium-headless-shell-mac.zip',
mac15: 'builds/chromium/%s/chromium-headless-shell-mac.zip',
'mac15-arm64': 'builds/chromium/%s/chromium-headless-shell-mac-arm64.zip',
'win64': 'builds/chromium/%s/chromium-headless-shell-win64.zip',
win64: 'builds/chromium/%s/chromium-headless-shell-win64.zip',
},
'chromium-tip-of-tree': {
'<unknown>': undefined,
'ubuntu18.04-x64': undefined,
'ubuntu20.04-x64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
'ubuntu22.04-x64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
'ubuntu24.04-x64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
'ubuntu20.04-x64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
'ubuntu22.04-x64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
'ubuntu24.04-x64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
'ubuntu18.04-arm64': undefined,
'ubuntu20.04-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
'ubuntu22.04-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
'ubuntu24.04-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
'debian11-x64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
'debian11-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
'debian12-x64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
'debian12-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
'ubuntu20.04-arm64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
'ubuntu22.04-arm64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
'ubuntu24.04-arm64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
'debian11-x64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
'debian11-arm64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
'debian12-x64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux.zip',
'debian12-arm64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-linux-arm64.zip',
'mac10.13': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
'mac10.14': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
'mac10.15': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
'mac11': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
'mac11-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac-arm64.zip',
'mac12': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
'mac12-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac-arm64.zip',
'mac13': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
'mac13-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac-arm64.zip',
'mac14': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
'mac14-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac-arm64.zip',
'mac15': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
'mac15-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac-arm64.zip',
'win64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-win64.zip',
mac11: 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
'mac11-arm64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac-arm64.zip',
mac12: 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
'mac12-arm64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac-arm64.zip',
mac13: 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
'mac13-arm64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac-arm64.zip',
mac14: 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
'mac14-arm64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac-arm64.zip',
mac15: 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac.zip',
'mac15-arm64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-mac-arm64.zip',
win64: 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-win64.zip',
},
'chromium-tip-of-tree-headless-shell': {
'<unknown>': undefined,
'ubuntu18.04-x64': undefined,
'ubuntu20.04-x64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux.zip',
'ubuntu22.04-x64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux.zip',
'ubuntu24.04-x64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux.zip',
'ubuntu20.04-x64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux.zip',
'ubuntu22.04-x64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux.zip',
'ubuntu24.04-x64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux.zip',
'ubuntu18.04-arm64': undefined,
'ubuntu20.04-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux-arm64.zip',
'ubuntu22.04-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux-arm64.zip',
'ubuntu24.04-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux-arm64.zip',
'debian11-x64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux.zip',
'debian11-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux-arm64.zip',
'debian12-x64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux.zip',
'debian12-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux-arm64.zip',
'ubuntu20.04-arm64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux-arm64.zip',
'ubuntu22.04-arm64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux-arm64.zip',
'ubuntu24.04-arm64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux-arm64.zip',
'debian11-x64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux.zip',
'debian11-arm64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux-arm64.zip',
'debian12-x64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux.zip',
'debian12-arm64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-linux-arm64.zip',
'mac10.13': undefined,
'mac10.14': undefined,
'mac10.15': undefined,
'mac11': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac.zip',
'mac11-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac-arm64.zip',
'mac12': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac.zip',
'mac12-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac-arm64.zip',
'mac13': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac.zip',
'mac13-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac-arm64.zip',
'mac14': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac.zip',
'mac14-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac-arm64.zip',
'mac15': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac.zip',
'mac15-arm64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac-arm64.zip',
'win64': 'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-win64.zip',
mac11:
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac.zip',
'mac11-arm64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac-arm64.zip',
mac12:
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac.zip',
'mac12-arm64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac-arm64.zip',
mac13:
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac.zip',
'mac13-arm64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac-arm64.zip',
mac14:
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac.zip',
'mac14-arm64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac-arm64.zip',
mac15:
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac.zip',
'mac15-arm64':
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-mac-arm64.zip',
win64:
'builds/chromium-tip-of-tree/%s/chromium-tip-of-tree-headless-shell-win64.zip',
},
'firefox': {
firefox: {
'<unknown>': undefined,
'ubuntu18.04-x64': undefined,
'ubuntu20.04-x64': 'builds/firefox/%s/firefox-ubuntu-20.04.zip',
@@ -143,17 +185,17 @@ const DOWNLOAD_PATHS = {
'mac10.13': 'builds/firefox/%s/firefox-mac.zip',
'mac10.14': 'builds/firefox/%s/firefox-mac.zip',
'mac10.15': 'builds/firefox/%s/firefox-mac.zip',
'mac11': 'builds/firefox/%s/firefox-mac.zip',
mac11: 'builds/firefox/%s/firefox-mac.zip',
'mac11-arm64': 'builds/firefox/%s/firefox-mac-arm64.zip',
'mac12': 'builds/firefox/%s/firefox-mac.zip',
mac12: 'builds/firefox/%s/firefox-mac.zip',
'mac12-arm64': 'builds/firefox/%s/firefox-mac-arm64.zip',
'mac13': 'builds/firefox/%s/firefox-mac.zip',
mac13: 'builds/firefox/%s/firefox-mac.zip',
'mac13-arm64': 'builds/firefox/%s/firefox-mac-arm64.zip',
'mac14': 'builds/firefox/%s/firefox-mac.zip',
mac14: 'builds/firefox/%s/firefox-mac.zip',
'mac14-arm64': 'builds/firefox/%s/firefox-mac-arm64.zip',
'mac15': 'builds/firefox/%s/firefox-mac.zip',
mac15: 'builds/firefox/%s/firefox-mac.zip',
'mac15-arm64': 'builds/firefox/%s/firefox-mac-arm64.zip',
'win64': 'builds/firefox/%s/firefox-win64.zip',
win64: 'builds/firefox/%s/firefox-win64.zip',
},
'firefox-beta': {
'<unknown>': undefined,
@@ -163,8 +205,10 @@ const DOWNLOAD_PATHS = {
'ubuntu24.04-x64': 'builds/firefox-beta/%s/firefox-beta-ubuntu-24.04.zip',
'ubuntu18.04-arm64': undefined,
'ubuntu20.04-arm64': undefined,
'ubuntu22.04-arm64': 'builds/firefox-beta/%s/firefox-beta-ubuntu-22.04-arm64.zip',
'ubuntu24.04-arm64': 'builds/firefox-beta/%s/firefox-beta-ubuntu-24.04-arm64.zip',
'ubuntu22.04-arm64':
'builds/firefox-beta/%s/firefox-beta-ubuntu-22.04-arm64.zip',
'ubuntu24.04-arm64':
'builds/firefox-beta/%s/firefox-beta-ubuntu-24.04-arm64.zip',
'debian11-x64': 'builds/firefox-beta/%s/firefox-beta-debian-11.zip',
'debian11-arm64': 'builds/firefox-beta/%s/firefox-beta-debian-11-arm64.zip',
'debian12-x64': 'builds/firefox-beta/%s/firefox-beta-debian-12.zip',
@@ -172,19 +216,19 @@ const DOWNLOAD_PATHS = {
'mac10.13': 'builds/firefox-beta/%s/firefox-beta-mac.zip',
'mac10.14': 'builds/firefox-beta/%s/firefox-beta-mac.zip',
'mac10.15': 'builds/firefox-beta/%s/firefox-beta-mac.zip',
'mac11': 'builds/firefox-beta/%s/firefox-beta-mac.zip',
mac11: 'builds/firefox-beta/%s/firefox-beta-mac.zip',
'mac11-arm64': 'builds/firefox-beta/%s/firefox-beta-mac-arm64.zip',
'mac12': 'builds/firefox-beta/%s/firefox-beta-mac.zip',
mac12: 'builds/firefox-beta/%s/firefox-beta-mac.zip',
'mac12-arm64': 'builds/firefox-beta/%s/firefox-beta-mac-arm64.zip',
'mac13': 'builds/firefox-beta/%s/firefox-beta-mac.zip',
mac13: 'builds/firefox-beta/%s/firefox-beta-mac.zip',
'mac13-arm64': 'builds/firefox-beta/%s/firefox-beta-mac-arm64.zip',
'mac14': 'builds/firefox-beta/%s/firefox-beta-mac.zip',
mac14: 'builds/firefox-beta/%s/firefox-beta-mac.zip',
'mac14-arm64': 'builds/firefox-beta/%s/firefox-beta-mac-arm64.zip',
'mac15': 'builds/firefox-beta/%s/firefox-beta-mac.zip',
mac15: 'builds/firefox-beta/%s/firefox-beta-mac.zip',
'mac15-arm64': 'builds/firefox-beta/%s/firefox-beta-mac-arm64.zip',
'win64': 'builds/firefox-beta/%s/firefox-beta-win64.zip',
win64: 'builds/firefox-beta/%s/firefox-beta-win64.zip',
},
'webkit': {
webkit: {
'<unknown>': undefined,
'ubuntu18.04-x64': undefined,
'ubuntu20.04-x64': 'builds/webkit/%s/webkit-ubuntu-20.04.zip',
@@ -199,21 +243,23 @@ const DOWNLOAD_PATHS = {
'debian12-x64': 'builds/webkit/%s/webkit-debian-12.zip',
'debian12-arm64': 'builds/webkit/%s/webkit-debian-12-arm64.zip',
'mac10.13': undefined,
'mac10.14': 'builds/deprecated-webkit-mac-10.14/%s/deprecated-webkit-mac-10.14.zip',
'mac10.15': 'builds/deprecated-webkit-mac-10.15/%s/deprecated-webkit-mac-10.15.zip',
'mac11': 'builds/webkit/%s/webkit-mac-11.zip',
'mac10.14':
'builds/deprecated-webkit-mac-10.14/%s/deprecated-webkit-mac-10.14.zip',
'mac10.15':
'builds/deprecated-webkit-mac-10.15/%s/deprecated-webkit-mac-10.15.zip',
mac11: 'builds/webkit/%s/webkit-mac-11.zip',
'mac11-arm64': 'builds/webkit/%s/webkit-mac-11-arm64.zip',
'mac12': 'builds/webkit/%s/webkit-mac-12.zip',
mac12: 'builds/webkit/%s/webkit-mac-12.zip',
'mac12-arm64': 'builds/webkit/%s/webkit-mac-12-arm64.zip',
'mac13': 'builds/webkit/%s/webkit-mac-13.zip',
mac13: 'builds/webkit/%s/webkit-mac-13.zip',
'mac13-arm64': 'builds/webkit/%s/webkit-mac-13-arm64.zip',
'mac14': 'builds/webkit/%s/webkit-mac-14.zip',
mac14: 'builds/webkit/%s/webkit-mac-14.zip',
'mac14-arm64': 'builds/webkit/%s/webkit-mac-14-arm64.zip',
'mac15': 'builds/webkit/%s/webkit-mac-15.zip',
mac15: 'builds/webkit/%s/webkit-mac-15.zip',
'mac15-arm64': 'builds/webkit/%s/webkit-mac-15-arm64.zip',
'win64': 'builds/webkit/%s/webkit-win64.zip',
win64: 'builds/webkit/%s/webkit-win64.zip',
},
'ffmpeg': {
ffmpeg: {
'<unknown>': undefined,
'ubuntu18.04-x64': undefined,
'ubuntu20.04-x64': 'builds/ffmpeg/%s/ffmpeg-linux.zip',
@@ -230,19 +276,19 @@ const DOWNLOAD_PATHS = {
'mac10.13': 'builds/ffmpeg/%s/ffmpeg-mac.zip',
'mac10.14': 'builds/ffmpeg/%s/ffmpeg-mac.zip',
'mac10.15': 'builds/ffmpeg/%s/ffmpeg-mac.zip',
'mac11': 'builds/ffmpeg/%s/ffmpeg-mac.zip',
mac11: 'builds/ffmpeg/%s/ffmpeg-mac.zip',
'mac11-arm64': 'builds/ffmpeg/%s/ffmpeg-mac-arm64.zip',
'mac12': 'builds/ffmpeg/%s/ffmpeg-mac.zip',
mac12: 'builds/ffmpeg/%s/ffmpeg-mac.zip',
'mac12-arm64': 'builds/ffmpeg/%s/ffmpeg-mac-arm64.zip',
'mac13': 'builds/ffmpeg/%s/ffmpeg-mac.zip',
mac13: 'builds/ffmpeg/%s/ffmpeg-mac.zip',
'mac13-arm64': 'builds/ffmpeg/%s/ffmpeg-mac-arm64.zip',
'mac14': 'builds/ffmpeg/%s/ffmpeg-mac.zip',
mac14: 'builds/ffmpeg/%s/ffmpeg-mac.zip',
'mac14-arm64': 'builds/ffmpeg/%s/ffmpeg-mac-arm64.zip',
'mac15': 'builds/ffmpeg/%s/ffmpeg-mac.zip',
mac15: 'builds/ffmpeg/%s/ffmpeg-mac.zip',
'mac15-arm64': 'builds/ffmpeg/%s/ffmpeg-mac-arm64.zip',
'win64': 'builds/ffmpeg/%s/ffmpeg-win64.zip',
win64: 'builds/ffmpeg/%s/ffmpeg-win64.zip',
},
'winldd': {
winldd: {
'<unknown>': undefined,
'ubuntu18.04-x64': undefined,
'ubuntu20.04-x64': undefined,
@@ -259,19 +305,19 @@ const DOWNLOAD_PATHS = {
'mac10.13': undefined,
'mac10.14': undefined,
'mac10.15': undefined,
'mac11': undefined,
mac11: undefined,
'mac11-arm64': undefined,
'mac12': undefined,
mac12: undefined,
'mac12-arm64': undefined,
'mac13': undefined,
mac13: undefined,
'mac13-arm64': undefined,
'mac14': undefined,
mac14: undefined,
'mac14-arm64': undefined,
'mac15': undefined,
mac15: undefined,
'mac15-arm64': undefined,
'win64': 'builds/winldd/%s/winldd-win64.zip',
win64: 'builds/winldd/%s/winldd-win64.zip',
},
'android': {
android: {
'<unknown>': 'builds/android/%s/android.zip',
'ubuntu18.04-x64': undefined,
'ubuntu20.04-x64': 'builds/android/%s/android.zip',
@@ -288,17 +334,17 @@ const DOWNLOAD_PATHS = {
'mac10.13': 'builds/android/%s/android.zip',
'mac10.14': 'builds/android/%s/android.zip',
'mac10.15': 'builds/android/%s/android.zip',
'mac11': 'builds/android/%s/android.zip',
mac11: 'builds/android/%s/android.zip',
'mac11-arm64': 'builds/android/%s/android.zip',
'mac12': 'builds/android/%s/android.zip',
mac12: 'builds/android/%s/android.zip',
'mac12-arm64': 'builds/android/%s/android.zip',
'mac13': 'builds/android/%s/android.zip',
mac13: 'builds/android/%s/android.zip',
'mac13-arm64': 'builds/android/%s/android.zip',
'mac14': 'builds/android/%s/android.zip',
mac14: 'builds/android/%s/android.zip',
'mac14-arm64': 'builds/android/%s/android.zip',
'mac15': 'builds/android/%s/android.zip',
mac15: 'builds/android/%s/android.zip',
'mac15-arm64': 'builds/android/%s/android.zip',
'win64': 'builds/android/%s/android.zip',
win64: 'builds/android/%s/android.zip',
},
} as const;
@@ -316,17 +362,37 @@ export class PlaywrightBinary extends AbstractBinary {
const nowDateISO = new Date().toISOString();
const buildDirs: BinaryItem[] = [];
for (const browserName of Object.keys(DOWNLOAD_PATHS)) {
if (browserName === 'chromium-headless-shell' || browserName === 'chromium-tip-of-tree-headless-shell') {
if (
browserName === 'chromium-headless-shell' ||
browserName === 'chromium-tip-of-tree-headless-shell'
) {
continue;
}
buildDirs.push({ name: `${browserName}/`, isDir: true, url: '', size: '-', date: nowDateISO });
buildDirs.push({
name: `${browserName}/`,
isDir: true,
url: '',
size: '-',
date: nowDateISO,
});
}
this.dirItems = {
'/': [{ name: 'builds/', isDir: true, url: '', size: '-', date: nowDateISO }],
'/': [
{
name: 'builds/',
isDir: true,
url: '',
size: '-',
date: nowDateISO,
},
],
'/builds/': buildDirs,
};
for (const browserName of Object.keys(DOWNLOAD_PATHS)) {
if (browserName === 'chromium-headless-shell' || browserName === 'chromium-tip-of-tree-headless-shell') {
if (
browserName === 'chromium-headless-shell' ||
browserName === 'chromium-tip-of-tree-headless-shell'
) {
continue;
}
this.dirItems[`/builds/${browserName}/`] = [];
@@ -337,11 +403,16 @@ export class PlaywrightBinary extends AbstractBinary {
.filter(version => version.match(/^(?:\d+\.\d+\.\d+)(?:-beta-\d+)?$/))
// select recently update 20 items
.slice(-20);
const browsers: { name: keyof typeof DOWNLOAD_PATHS; revision: string; browserVersion: string; revisionOverrides?: Record<string, string> }[] = [];
const browsers: {
name: keyof typeof DOWNLOAD_PATHS;
revision: string;
browserVersion: string;
revisionOverrides?: Record<string, string>;
}[] = [];
await Promise.all(
packageVersions.map(version =>
this.requestJSON(
`https://unpkg.com/playwright-core@${version}/browsers.json`,
`https://unpkg.com/playwright-core@${version}/browsers.json`
)
.then(data => {
// browsers: [
@@ -357,13 +428,18 @@ export class PlaywrightBinary extends AbstractBinary {
})
.catch(err => {
/* c8 ignore next 2 */
this.logger.warn('[PlaywrightBinary.fetch:error] Playwright version %s browser data request failed: %s',
version, err);
}),
),
this.logger.warn(
'[PlaywrightBinary.fetch:error] Playwright version %s browser data request failed: %s',
version,
err
);
})
)
);
// if chromium-headless-shell not exists on browsers, copy chromium to chromium-headless-shell
if (!browsers.find(browser => browser.name === 'chromium-headless-shell')) {
if (
!browsers.find(browser => browser.name === 'chromium-headless-shell')
) {
const chromium = browsers.find(browser => browser.name === 'chromium');
// {
// "name": "chromium",
@@ -379,8 +455,14 @@ export class PlaywrightBinary extends AbstractBinary {
}
}
// if chromium-tip-of-tree-headless-shell not exists on browsers, copy chromium-tip-of-tree to chromium-tip-of-tree-headless-shell
if (!browsers.find(browser => browser.name === 'chromium-tip-of-tree-headless-shell')) {
const chromiumTipOfTree = browsers.find(browser => browser.name === 'chromium-tip-of-tree');
if (
!browsers.find(
browser => browser.name === 'chromium-tip-of-tree-headless-shell'
)
) {
const chromiumTipOfTree = browsers.find(
browser => browser.name === 'chromium-tip-of-tree'
);
if (chromiumTipOfTree) {
browsers.push({
...chromiumTipOfTree,
@@ -402,9 +484,10 @@ export class PlaywrightBinary extends AbstractBinary {
// https://playwright.azureedge.net/builds/chromium-tip-of-tree/1293/chromium-tip-of-tree-headless-shell-mac-arm64.zip
browserDirname = 'chromium-tip-of-tree';
}
for (const [ platform, remotePath ] of Object.entries(downloadPaths)) {
for (const [platform, remotePath] of Object.entries(downloadPaths)) {
if (typeof remotePath !== 'string') continue;
const revision = browser.revisionOverrides?.[platform] ?? browser.revision;
const revision =
browser.revisionOverrides?.[platform] ?? browser.revision;
const itemDate = browser.browserVersion || revision;
const url = DOWNLOAD_HOST + util.format(remotePath, revision);
const name = path.basename(remotePath);
@@ -420,7 +503,13 @@ export class PlaywrightBinary extends AbstractBinary {
this.dirItems[dir] = [];
}
if (!this.dirItems[dir].find(item => item.name === name)) {
this.dirItems[dir].push({ name, isDir: false, url, size: '-', date: itemDate });
this.dirItems[dir].push({
name,
isDir: false,
url,
size: '-',
date: itemDate,
});
}
}
}

View File

@@ -1,8 +1,10 @@
import path from 'node:path';
import { SingletonProto } from '@eggjs/tegg';
import { BinaryType } from '../../enum/Binary.js';
import binaries, { BinaryName } from '../../../../config/binaries.js';
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary.js';
import type { BinaryName } from '../../../../config/binaries.js';
import binaries from '../../../../config/binaries.js';
import type { FetchResult, BinaryItem } from './AbstractBinary.js';
import { AbstractBinary, BinaryAdapter } from './AbstractBinary.js';
@SingletonProto()
@BinaryAdapter(BinaryType.Prisma)
@@ -37,8 +39,10 @@ export class PrismaBinary extends AbstractBinary {
const pkg = data.versions[version];
// https://registry.npmjs.com/@prisma/engines/4.14.1
// https://registry.npmjs.com/@prisma/engines/5.7.0 should read from dependencies
const enginesVersion = pkg.devDependencies?.['@prisma/engines-version']
|| pkg.dependencies?.['@prisma/engines-version'] || '';
const enginesVersion =
pkg.devDependencies?.['@prisma/engines-version'] ||
pkg.dependencies?.['@prisma/engines-version'] ||
'';
// "@prisma/engines-version": "4.14.0-67.d9a4c5988f480fa576d43970d5a23641aa77bc9c"
// "@prisma/engines-version": "5.7.0-41.79fb5193cf0a8fdbef536e4b4a159cad677ab1b9"
const matched = /\.(\w{30,})$/.exec(enginesVersion);
@@ -56,7 +60,10 @@ export class PrismaBinary extends AbstractBinary {
}
}
async fetch(dir: string, binaryName: BinaryName): Promise<FetchResult | undefined> {
async fetch(
dir: string,
binaryName: BinaryName
): Promise<FetchResult | undefined> {
const existsItems = this.dirItems[dir];
if (existsItems) {
return { items: existsItems, nextParams: null };

View File

@@ -1,6 +1,7 @@
import { SingletonProto } from '@eggjs/tegg';
import { BinaryType } from '../../enum/Binary.js';
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary.js';
import type { FetchResult, BinaryItem } from './AbstractBinary.js';
import { AbstractBinary, BinaryAdapter } from './AbstractBinary.js';
@SingletonProto()
@BinaryAdapter(BinaryType.Puppeteer)
@@ -23,7 +24,9 @@ export class PuppeteerBinary extends AbstractBinary {
for (const version in data.versions) {
// find chromium versions
const pkg = data.versions[version];
const revision = pkg.puppeteer?.chromium_revision ? String(pkg.puppeteer.chromium_revision) : '';
const revision = pkg.puppeteer?.chromium_revision
? String(pkg.puppeteer.chromium_revision)
: '';
if (revision && !chromiumRevisions.has(revision)) {
chromiumRevisions.set(revision, data.time[version]);
}
@@ -35,7 +38,8 @@ export class PuppeteerBinary extends AbstractBinary {
// chromium: '768783',
// firefox: 'latest',
// };
const unpkgURL = 'https://unpkg.com/puppeteer-core@latest/lib/cjs/puppeteer/revisions.js';
const unpkgURL =
'https://unpkg.com/puppeteer-core@latest/lib/cjs/puppeteer/revisions.js';
const text = await this.requestXml(unpkgURL);
const m = /chromium:\s+'(\d+)',/.exec(text);
if (m && !chromiumRevisions.has(m[1])) {
@@ -44,7 +48,8 @@ export class PuppeteerBinary extends AbstractBinary {
// download LAST_CHANGE
// https://github.com/chaopeng/chromium-downloader/blob/master/get-chromium#L28
const LAST_CHANGE_URL = 'https://www.googleapis.com/download/storage/v1/b/chromium-browser-snapshots/o/Linux_x64%2FLAST_CHANGE?alt=media';
const LAST_CHANGE_URL =
'https://www.googleapis.com/download/storage/v1/b/chromium-browser-snapshots/o/Linux_x64%2FLAST_CHANGE?alt=media';
const lastRevision = await this.requestXml(LAST_CHANGE_URL);
if (lastRevision) {
chromiumRevisions.set(lastRevision, new Date().toISOString());
@@ -77,7 +82,7 @@ export class PuppeteerBinary extends AbstractBinary {
// "11.0.0":"2021-11-03T09:29:12.751Z"
chromiumRevisions.set('901912', data.time['11.0.0']);
const platforms = [ 'Linux_x64', 'Mac', 'Mac_Arm', 'Win', 'Win_x64' ];
const platforms = ['Linux_x64', 'Mac', 'Mac_Arm', 'Win', 'Win_x64'];
for (const platform of platforms) {
this.dirItems['/'].push({
name: `${platform}/`,
@@ -88,7 +93,7 @@ export class PuppeteerBinary extends AbstractBinary {
});
this.dirItems[`/${platform}/`] = [];
}
for (const [ revision, date ] of chromiumRevisions.entries()) {
for (const [revision, date] of chromiumRevisions.entries()) {
// https://github.com/puppeteer/puppeteer/blob/eebf452d38b79bb2ea1a1ba84c3d2ea6f2f9f899/src/node/BrowserFetcher.ts#L40
// chrome: {
// linux: '%s/chromium-browser-snapshots/Linux_x64/%d/%s.zip',
@@ -113,7 +118,7 @@ export class PuppeteerBinary extends AbstractBinary {
size: '-',
isDir: false,
url: `https://storage.googleapis.com/chromium-browser-snapshots/${platform}/${revision}/${name}`,
ignoreDownloadStatuses: [ 404 ],
ignoreDownloadStatuses: [404],
},
];
}
@@ -124,10 +129,7 @@ export class PuppeteerBinary extends AbstractBinary {
}
// https://github.com/puppeteer/puppeteer/blob/eebf452d38b79bb2ea1a1ba84c3d2ea6f2f9f899/src/node/BrowserFetcher.ts#L72
private archiveName(
platform: string,
revision: string,
): string {
private archiveName(platform: string, revision: string): string {
if (platform === 'Linux_x64') return 'chrome-linux';
if (platform === 'Mac' || platform === 'Mac_Arm') return 'chrome-mac';
if (platform === 'Win' || platform === 'Win_x64') {

View File

@@ -1,6 +1,7 @@
import { SingletonProto } from '@eggjs/tegg';
import { BinaryType } from '../../enum/Binary.js';
import { AbstractBinary, FetchResult, BinaryItem, BinaryAdapter } from './AbstractBinary.js';
import type { FetchResult, BinaryItem } from './AbstractBinary.js';
import { AbstractBinary, BinaryAdapter } from './AbstractBinary.js';
@SingletonProto()
@BinaryAdapter(BinaryType.Sqlcipher)
@@ -16,7 +17,8 @@ export class SqlcipherBinary extends AbstractBinary {
} = {
'/': [],
};
const s3Url = 'https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher';
const s3Url =
'https://journeyapps-node-binary.s3.amazonaws.com/@journeyapps/sqlcipher';
const pkgUrl = 'https://registry.npmjs.com/@journeyapps/sqlcipher';
const data = await this.requestJSON(pkgUrl);
// https://github.com/journeyapps/node-sqlcipher/blob/master/.circleci/config.yml#L407
@@ -48,7 +50,8 @@ export class SqlcipherBinary extends AbstractBinary {
if (major < 5) continue;
// >= 5.0.0
const pkgVersion = data.versions[version];
const napiVersions = pkgVersion.binary && pkgVersion.binary.napi_versions || [];
const napiVersions =
(pkgVersion.binary && pkgVersion.binary.napi_versions) || [];
const date = data.time[version];
dirItems['/'].push({
name: `v${version}/`,
@@ -74,7 +77,7 @@ export class SqlcipherBinary extends AbstractBinary {
size: '-',
isDir: false,
url: `${s3Url}/v${version}/${name}`,
ignoreDownloadStatuses: [ 404, 403 ],
ignoreDownloadStatuses: [404, 403],
});
}
}

View File

@@ -1,14 +1,8 @@
import {
ImplDecorator,
Inject,
QualifierImplDecoratorUtil,
} from '@eggjs/tegg';
import { RegistryType } from '../../../common/enum/Registry.js';
import { Registry } from '../../../core/entity/Registry.js';
import {
EggHttpClient,
EggLogger,
} from 'egg';
import type { ImplDecorator } from '@eggjs/tegg';
import { Inject, QualifierImplDecoratorUtil } from '@eggjs/tegg';
import type { RegistryType } from '../../../common/enum/Registry.js';
import type { Registry } from '../../../core/entity/Registry.js';
import type { EggHttpClient, EggLogger } from 'egg';
export const CHANGE_STREAM_ATTRIBUTE = 'CHANGE_STREAM_ATTRIBUTE';
export type ChangesStreamChange = {
@@ -24,9 +18,16 @@ export abstract class AbstractChangeStream {
protected httpclient: EggHttpClient;
abstract getInitialSince(registry: Registry): Promise<string>;
abstract fetchChanges(registry: Registry, since: string): AsyncGenerator<ChangesStreamChange>;
abstract fetchChanges(
registry: Registry,
since: string
): AsyncGenerator<ChangesStreamChange>;
getChangesStreamUrl(registry: Registry, since: string, limit?: number): string {
getChangesStreamUrl(
registry: Registry,
since: string,
limit?: number
): string {
const url = new URL(registry.changeStream);
url.searchParams.set('since', since);
if (limit) {
@@ -36,5 +37,10 @@ export abstract class AbstractChangeStream {
}
}
export const RegistryChangesStream: ImplDecorator<AbstractChangeStream, typeof RegistryType> =
QualifierImplDecoratorUtil.generatorDecorator(AbstractChangeStream, CHANGE_STREAM_ATTRIBUTE);
export const RegistryChangesStream: ImplDecorator<
AbstractChangeStream,
typeof RegistryType
> = QualifierImplDecoratorUtil.generatorDecorator(
AbstractChangeStream,
CHANGE_STREAM_ATTRIBUTE
);

View File

@@ -1,15 +1,17 @@
import { SingletonProto } from '@eggjs/tegg';
import { E500 } from 'egg-errors';
import { RegistryType } from '../../../common/enum/Registry.js';
import { Registry } from '../../../core/entity/Registry.js';
import { AbstractChangeStream, RegistryChangesStream } from './AbstractChangesStream.js';
import type { Registry } from '../../../core/entity/Registry.js';
import {
AbstractChangeStream,
RegistryChangesStream,
} from './AbstractChangesStream.js';
@SingletonProto()
@RegistryChangesStream(RegistryType.Cnpmcore)
export class CnpmcoreChangesStream extends AbstractChangeStream {
async getInitialSince(registry: Registry): Promise<string> {
const db = (new URL(registry.changeStream)).origin;
const db = new URL(registry.changeStream).origin;
const { status, data } = await this.httpclient.request(db, {
followRedirect: true,
timeout: 10000,
@@ -19,12 +21,17 @@ export class CnpmcoreChangesStream extends AbstractChangeStream {
throw new E500(`get getInitialSince failed: ${data.update_seq}`);
}
const since = String(data.update_seq - 10);
this.logger.warn('[NpmChangesStream.getInitialSince:firstSeq] GET %s status: %s, data: %j, since: %s',
registry.name, status, data, since);
this.logger.warn(
'[NpmChangesStream.getInitialSince:firstSeq] GET %s status: %s, data: %j, since: %s',
registry.name,
status,
data,
since
);
return since;
}
async* fetchChanges(registry: Registry, since: string) {
async *fetchChanges(registry: Registry, since: string) {
const db = this.getChangesStreamUrl(registry, since);
// json mode
const { data } = await this.httpclient.request(db, {

View File

@@ -1,8 +1,11 @@
import { SingletonProto } from '@eggjs/tegg';
import { E500 } from 'egg-errors';
import { RegistryType } from '../../../common/enum/Registry.js';
import { Registry } from '../../../core/entity/Registry.js';
import { AbstractChangeStream, RegistryChangesStream } from './AbstractChangesStream.js';
import type { Registry } from '../../../core/entity/Registry.js';
import {
AbstractChangeStream,
RegistryChangesStream,
} from './AbstractChangesStream.js';
const MAX_LIMIT = 10000;
@@ -12,25 +15,32 @@ type FetchResults = {
type: string;
id: string;
changes: Record<string, string>[];
gmt_modified: Date,
gmt_modified: Date;
}[];
};
@SingletonProto()
@RegistryChangesStream(RegistryType.Cnpmjsorg)
export class CnpmjsorgChangesStream extends AbstractChangeStream {
// cnpmjsorg 未实现 update_seq 字段
// 默认返回当前时间戳字符串
async getInitialSince(registry: Registry): Promise<string> {
const since = String((new Date()).getTime());
this.logger.warn(`[CnpmjsorgChangesStream.getInitialSince] since: ${since}, skip query ${registry.changeStream}`);
const since = String(new Date().getTime());
this.logger.warn(
`[CnpmjsorgChangesStream.getInitialSince] since: ${since}, skip query ${registry.changeStream}`
);
return since;
}
private async tryFetch(registry: Registry, since: string, limit = 1000): Promise<{ data: FetchResults }> {
private async tryFetch(
registry: Registry,
since: string,
limit = 1000
): Promise<{ data: FetchResults }> {
if (limit > MAX_LIMIT) {
throw new E500(`limit too large, current since: ${since}, limit: ${limit}`);
throw new E500(
`limit too large, current since: ${since}, limit: ${limit}`
);
}
const db = this.getChangesStreamUrl(registry, since, limit);
// json mode
@@ -42,7 +52,7 @@ export class CnpmjsorgChangesStream extends AbstractChangeStream {
});
const { results = [] } = res.data;
if (results?.length >= limit) {
const [ first ] = results;
const [first] = results;
const last = results[results.length - 1];
if (first.gmt_modified === last.gmt_modified) {
return await this.tryFetch(registry, since, limit + 1000);
@@ -52,7 +62,7 @@ export class CnpmjsorgChangesStream extends AbstractChangeStream {
return res;
}
async* fetchChanges(registry: Registry, since: string) {
async *fetchChanges(registry: Registry, since: string) {
// ref: https://github.com/cnpm/cnpmjs.org/pull/1734
// 由于 cnpmjsorg 无法计算准确的 seq
// since 是一个时间戳,需要确保一次返回的结果中首尾两个 gmtModified 不相等

View File

@@ -1,15 +1,18 @@
import { SingletonProto } from '@eggjs/tegg';
import { E500 } from 'egg-errors';
import { RegistryType } from '../../../common/enum/Registry.js';
import { Registry } from '../../../core/entity/Registry.js';
import { AbstractChangeStream, ChangesStreamChange, RegistryChangesStream } from './AbstractChangesStream.js';
import type { Registry } from '../../../core/entity/Registry.js';
import type { ChangesStreamChange } from './AbstractChangesStream.js';
import {
AbstractChangeStream,
RegistryChangesStream,
} from './AbstractChangesStream.js';
@SingletonProto()
@RegistryChangesStream(RegistryType.Npm)
export class NpmChangesStream extends AbstractChangeStream {
async getInitialSince(registry: Registry): Promise<string> {
const db = (new URL(registry.changeStream)).origin;
const db = new URL(registry.changeStream).origin;
const { status, data } = await this.httpclient.request(db, {
followRedirect: true,
timeout: 10000,
@@ -19,12 +22,18 @@ export class NpmChangesStream extends AbstractChangeStream {
if (!data.update_seq) {
throw new E500(`get getInitialSince failed: ${data.update_seq}`);
}
this.logger.warn('[NpmChangesStream.getInitialSince] GET %s status: %s, data: %j, since: %s',
registry.name, registry.changeStream, status, data, since);
this.logger.warn(
'[NpmChangesStream.getInitialSince] GET %s status: %s, data: %j, since: %s',
registry.name,
registry.changeStream,
status,
data,
since
);
return since;
}
async* fetchChanges(registry: Registry, since: string) {
async *fetchChanges(registry: Registry, since: string) {
const db = this.getChangesStreamUrl(registry, since);
const { res } = await this.httpclient.request(db, {
streaming: true,
@@ -51,5 +60,4 @@ export class NpmChangesStream extends AbstractChangeStream {
}
}
}
}

View File

@@ -1,7 +1,8 @@
import { performance } from 'node:perf_hooks';
import { Advice, AdviceContext, IAdvice } from '@eggjs/tegg/aop';
import type { AdviceContext, IAdvice } from '@eggjs/tegg/aop';
import { Advice } from '@eggjs/tegg/aop';
import { Inject } from '@eggjs/tegg';
import { EggLogger } from 'egg';
import type { EggLogger } from 'egg';
// auto print async function call performance timer log into logger
@Advice()
@@ -21,7 +22,12 @@ export class AsyncTimer implements IAdvice {
async afterFinally(ctx: AdviceContext) {
const ms = Math.floor((performance.now() - this.start) * 1000) / 1000;
this.logger.info('[%s] [%s:%s|%s]',
ms, ctx.that.constructor.name, ctx.method, this.succeed ? 'T' : 'F');
this.logger.info(
'[%s] [%s:%s|%s]',
ms,
ctx.that.constructor.name,
ctx.method,
this.succeed ? 'T' : 'F'
);
}
}

View File

@@ -1,8 +1,8 @@
import { Readable } from 'node:stream';
import { IncomingHttpHeaders } from 'node:http';
import { EggContext } from '@eggjs/tegg';
import { estypes } from '@elastic/elasticsearch';
import { CnpmcoreConfig } from '../port/config.js';
import type { Readable } from 'node:stream';
import type { IncomingHttpHeaders } from 'node:http';
import type { EggContext } from '@eggjs/tegg';
import type { estypes } from '@elastic/elasticsearch';
import type { CnpmcoreConfig } from '../port/config.js';
export interface UploadResult {
key: string;
@@ -19,8 +19,8 @@ export interface UploadOptions {
export interface AppendOptions {
key: string;
position?: string,
headers?: IncomingHttpHeaders,
position?: string;
headers?: IncomingHttpHeaders;
}
export interface DownloadOptions {
@@ -40,7 +40,11 @@ export interface NFSClient {
createDownloadStream(key: string): Promise<Readable | undefined>;
download(key: string, filepath: string, options: DownloadOptions): Promise<void>;
download(
key: string,
filepath: string,
options: DownloadOptions
): Promise<void>;
url?(key: string): string;
}

View File

@@ -1,5 +1,7 @@
import { Entity, EntityData } from './Entity.js';
import { EasyData, EntityUtil } from '../util/EntityUtil.js';
import type { EntityData } from './Entity.js';
import { Entity } from './Entity.js';
import type { EasyData } from '../util/EntityUtil.js';
import { EntityUtil } from '../util/EntityUtil.js';
interface BinaryData extends EntityData {
binaryId: string;

View File

@@ -1,5 +1,7 @@
import { Entity, EntityData } from './Entity.js';
import { EasyData, EntityUtil } from '../util/EntityUtil.js';
import type { EntityData } from './Entity.js';
import { Entity } from './Entity.js';
import type { EasyData } from '../util/EntityUtil.js';
import { EntityUtil } from '../util/EntityUtil.js';
interface ChangeData extends EntityData {
changeId: string;

View File

@@ -1,5 +1,7 @@
import { Entity, EntityData } from './Entity.js';
import { EasyData, EntityUtil } from '../util/EntityUtil.js';
import type { EntityData } from './Entity.js';
import { Entity } from './Entity.js';
import type { EasyData } from '../util/EntityUtil.js';
import { EntityUtil } from '../util/EntityUtil.js';
interface DistData extends EntityData {
distId: string;

View File

@@ -1,9 +1,14 @@
import crypto from 'node:crypto';
import { Entity, EntityData } from './Entity.js';
import { EasyData, EntityUtil } from '../util/EntityUtil.js';
import { HookType } from '../../common/enum/Hook.js';
import type { EntityData } from './Entity.js';
import { Entity } from './Entity.js';
import type { EasyData } from '../util/EntityUtil.js';
import { EntityUtil } from '../util/EntityUtil.js';
import type { HookType } from '../../common/enum/Hook.js';
export type CreateHookData = Omit<EasyData<HookData, 'hookId'>, 'enable' | 'latestTaskId'>;
export type CreateHookData = Omit<
EasyData<HookData, 'hookId'>,
'enable' | 'latestTaskId'
>;
export interface HookData extends EntityData {
hookId: string;
@@ -50,7 +55,8 @@ export class Hook extends Entity {
// payload 可能会特别大,如果做多次 stringify 浪费太多 cpu
signPayload(payload: object) {
const payloadStr = JSON.stringify(payload);
const digest = crypto.createHmac('sha256', this.secret)
const digest = crypto
.createHmac('sha256', this.secret)
.update(JSON.stringify(payload))
.digest('hex');
return {

View File

@@ -1,5 +1,7 @@
import { Entity, EntityData } from './Entity.js';
import { EasyData, EntityUtil } from '../util/EntityUtil.js';
import type { EntityData } from './Entity.js';
import { Entity } from './Entity.js';
import type { EasyData } from '../util/EntityUtil.js';
import { EntityUtil } from '../util/EntityUtil.js';
import { Dist } from './Dist.js';
import { getFullname } from '../../common/PackageUtil.js';
@@ -23,7 +25,10 @@ export enum DIST_NAMES {
}
export function isPkgManifest(fileType: DIST_NAMES) {
return fileType === DIST_NAMES.FULL_MANIFESTS || fileType === DIST_NAMES.ABBREVIATED_MANIFESTS;
return (
fileType === DIST_NAMES.FULL_MANIFESTS ||
fileType === DIST_NAMES.ABBREVIATED_MANIFESTS
);
}
interface FileInfo {

View File

@@ -1,5 +1,7 @@
import { Entity, EntityData } from './Entity.js';
import { EasyData, EntityUtil } from '../util/EntityUtil.js';
import type { EntityData } from './Entity.js';
import { Entity } from './Entity.js';
import type { EasyData } from '../util/EntityUtil.js';
import { EntityUtil } from '../util/EntityUtil.js';
interface PackageTagData extends EntityData {
packageId: string;

View File

@@ -1,6 +1,8 @@
import { Dist } from './Dist.js';
import { Entity, EntityData } from './Entity.js';
import { EasyData, EntityUtil } from '../util/EntityUtil.js';
import type { Dist } from './Dist.js';
import type { EntityData } from './Entity.js';
import { Entity } from './Entity.js';
import type { EasyData } from '../util/EntityUtil.js';
import { EntityUtil } from '../util/EntityUtil.js';
import { PaddingSemVer } from './PaddingSemVer.js';
interface PackageVersionData extends EntityData {
@@ -48,7 +50,9 @@ export class PackageVersion extends Entity {
}
}
static create(data: EasyData<PackageVersionData, 'packageVersionId'>): PackageVersion {
static create(
data: EasyData<PackageVersionData, 'packageVersionId'>
): PackageVersion {
const newData = EntityUtil.defaultData(data, 'packageVersionId');
return new PackageVersion(newData);
}

View File

@@ -1,5 +1,7 @@
import { Entity, EntityData } from './Entity.js';
import { EasyData, EntityUtil } from '../util/EntityUtil.js';
import type { EntityData } from './Entity.js';
import { Entity } from './Entity.js';
import type { EasyData } from '../util/EntityUtil.js';
import { EntityUtil } from '../util/EntityUtil.js';
interface PackageVersionBlockData extends EntityData {
packageVersionBlockId: string;
@@ -22,7 +24,9 @@ export class PackageVersionBlock extends Entity {
this.reason = data.reason;
}
static create(data: EasyData<PackageVersionBlockData, 'packageVersionBlockId'>): PackageVersionBlock {
static create(
data: EasyData<PackageVersionBlockData, 'packageVersionBlockId'>
): PackageVersionBlock {
const newData = EntityUtil.defaultData(data, 'packageVersionBlockId');
return new PackageVersionBlock(newData);
}

View File

@@ -1,6 +1,8 @@
import { Dist } from './Dist.js';
import { Entity, EntityData } from './Entity.js';
import { EasyData, EntityUtil } from '../util/EntityUtil.js';
import type { Dist } from './Dist.js';
import type { EntityData } from './Entity.js';
import { Entity } from './Entity.js';
import type { EasyData } from '../util/EntityUtil.js';
import { EntityUtil } from '../util/EntityUtil.js';
interface PackageVersionFileData extends EntityData {
packageVersionFileId: string;
@@ -33,10 +35,14 @@ export class PackageVersionFile extends Entity {
}
get path() {
return this.directory === '/' ? `/${this.name}` : `${this.directory}/${this.name}`;
return this.directory === '/'
? `/${this.name}`
: `${this.directory}/${this.name}`;
}
static create(data: EasyData<PackageVersionFileData, 'packageVersionFileId'>): PackageVersionFile {
static create(
data: EasyData<PackageVersionFileData, 'packageVersionFileId'>
): PackageVersionFile {
const newData = EntityUtil.defaultData(data, 'packageVersionFileId');
return new PackageVersionFile(newData);
}

View File

@@ -1,5 +1,7 @@
import { Entity, EntityData } from './Entity.js';
import { EasyData, EntityUtil } from '../util/EntityUtil.js';
import type { EntityData } from './Entity.js';
import { Entity } from './Entity.js';
import type { EasyData } from '../util/EntityUtil.js';
import { EntityUtil } from '../util/EntityUtil.js';
interface PackageVersionManifestData extends EntityData {
packageId: string;
@@ -22,7 +24,9 @@ export class PackageVersionManifest extends Entity {
this.manifest = data.manifest;
}
static create(data: EasyData<PackageVersionManifestData, 'packageVersionManifestId'>): PackageVersionManifest {
static create(
data: EasyData<PackageVersionManifestData, 'packageVersionManifestId'>
): PackageVersionManifest {
const newData = EntityUtil.defaultData(data, 'packageVersionManifestId');
return new PackageVersionManifest(newData);
}

View File

@@ -37,7 +37,7 @@ export class PaddingSemVer {
static paddingVersion(v: number) {
const t = String(v);
if (t.length <= 16) {
const padding = new Array(16 - t.length).fill(0)
const padding = Array.from({ length: 16 - t.length }).fill(0)
.join('');
return padding + t;
}

View File

@@ -1,6 +1,8 @@
import { Entity, EntityData } from './Entity.js';
import { EasyData } from '../util/EntityUtil.js';
import { DIST_NAMES, isPkgManifest } from './Package.js';
import type { EntityData } from './Entity.js';
import { Entity } from './Entity.js';
import type { EasyData } from '../util/EntityUtil.js';
import type { DIST_NAMES } from './Package.js';
import { isPkgManifest } from './Package.js';
import { PROXY_CACHE_DIR_NAME } from '../../common/constants.js';
interface ProxyCacheData extends EntityData {
fullname: string;
@@ -8,7 +10,10 @@ interface ProxyCacheData extends EntityData {
version?: string;
}
export type CreateProxyCacheData = Omit<EasyData<ProxyCacheData, 'id'>, 'id'| 'filePath'>;
export type CreateProxyCacheData = Omit<
EasyData<ProxyCacheData, 'id'>,
'id' | 'filePath'
>;
export class ProxyCache extends Entity {
readonly fullname: string;
@@ -37,5 +42,4 @@ export class ProxyCache extends Entity {
data.updatedAt = new Date();
return data;
}
}

View File

@@ -1,5 +1,7 @@
import { Entity, EntityData } from './Entity.js';
import { EasyData, EntityUtil } from '../util/EntityUtil.js';
import type { EntityData } from './Entity.js';
import { Entity } from './Entity.js';
import type { EasyData } from '../util/EntityUtil.js';
import { EntityUtil } from '../util/EntityUtil.js';
import type { RegistryType } from '../../common/enum/Registry.js';
interface RegistryData extends EntityData {
@@ -12,7 +14,10 @@ interface RegistryData extends EntityData {
authToken?: string;
}
export type CreateRegistryData = Omit<EasyData<RegistryData, 'registryId'>, 'id'>;
export type CreateRegistryData = Omit<
EasyData<RegistryData, 'registryId'>,
'id'
>;
export class Registry extends Entity {
name: string;
@@ -35,7 +40,10 @@ export class Registry extends Entity {
}
public static create(data: CreateRegistryData): Registry {
const newData = EntityUtil.defaultData<RegistryData, 'registryId'>(data, 'registryId');
const newData = EntityUtil.defaultData<RegistryData, 'registryId'>(
data,
'registryId'
);
return new Registry(newData);
}
}

View File

@@ -1,5 +1,7 @@
import { Entity, EntityData } from './Entity.js';
import { EasyData, EntityUtil } from '../util/EntityUtil.js';
import type { EntityData } from './Entity.js';
import { Entity } from './Entity.js';
import type { EasyData } from '../util/EntityUtil.js';
import { EntityUtil } from '../util/EntityUtil.js';
interface ScopeData extends EntityData {
name: string;

View File

@@ -1,13 +1,16 @@
import os from 'node:os';
import path from 'node:path';
import { InternalServerError } from 'egg-errors';
import { Entity, EntityData } from './Entity.js';
import { EasyData, EntityUtil } from '../util/EntityUtil.js';
import type { EntityData } from './Entity.js';
import { Entity } from './Entity.js';
import type { EasyData } from '../util/EntityUtil.js';
import { EntityUtil } from '../util/EntityUtil.js';
import { TaskType, TaskState } from '../../common/enum/Task.js';
import { PROXY_CACHE_DIR_NAME } from '../../common/constants.js';
import dayjs from '../../common/dayjs.js';
import { HookEvent } from './HookEvent.js';
import { DIST_NAMES, isPkgManifest } from './Package.js';
import type { HookEvent } from './HookEvent.js';
import type { DIST_NAMES } from './Package.js';
import { isPkgManifest } from './Package.js';
export const HOST_NAME = os.hostname();
export const PID = process.pid;
@@ -44,9 +47,9 @@ export type SyncPackageTaskOptions = {
};
export type UpdateProxyCacheTaskOptions = {
fullname: string,
version?: string,
fileType: DIST_NAMES,
fullname: string;
version?: string;
fileType: DIST_NAMES;
};
export interface CreateHookTaskData extends TaskBaseData {
@@ -68,18 +71,18 @@ export interface CreateSyncPackageTaskData extends TaskBaseData {
}
export interface CreateUpdateProxyCacheTaskData extends TaskBaseData {
fullname: string,
version?: string,
fileType: DIST_NAMES,
filePath: string
fullname: string;
version?: string;
fileType: DIST_NAMES;
filePath: string;
}
export interface ChangesStreamTaskData extends TaskBaseData {
since: string;
last_package?: string,
last_package_created?: Date,
task_count?: number,
registryId?: string,
last_package?: string;
last_package_created?: Date;
task_count?: number;
registryId?: string;
}
export interface TaskUpdateCondition {
@@ -133,12 +136,17 @@ export class Task<T extends TaskBaseData = TaskBaseData> extends Entity {
this.data.taskWorker = `${HOST_NAME}:${PID}`;
}
private static create<T extends TaskBaseData>(data: EasyData<TaskData<T>, 'taskId'>): Task<T> {
private static create<T extends TaskBaseData>(
data: EasyData<TaskData<T>, 'taskId'>
): Task<T> {
const newData = EntityUtil.defaultData(data, 'taskId');
return new Task(newData);
}
public static createSyncPackage(fullname: string, options?: SyncPackageTaskOptions): CreateSyncPackageTask {
public static createSyncPackage(
fullname: string,
options?: SyncPackageTaskOptions
): CreateSyncPackageTask {
const data = {
type: TaskType.SyncPackage,
state: TaskState.Waiting,
@@ -161,7 +169,11 @@ export class Task<T extends TaskBaseData = TaskBaseData> extends Entity {
return task;
}
public static createChangesStream(targetName: string, registryId = '', since = ''): ChangesStreamTask {
public static createChangesStream(
targetName: string,
registryId = '',
since = ''
): ChangesStreamTask {
const data = {
type: TaskType.ChangesStream,
state: TaskState.Waiting,
@@ -209,7 +221,10 @@ export class Task<T extends TaskBaseData = TaskBaseData> extends Entity {
return task;
}
public static createTriggerHookTask(hookEvent: HookEvent, hookId: string): TriggerHookTask {
public static createTriggerHookTask(
hookEvent: HookEvent,
hookId: string
): TriggerHookTask {
const data = {
type: TaskType.TriggerHook,
state: TaskState.Waiting,
@@ -249,12 +264,17 @@ export class Task<T extends TaskBaseData = TaskBaseData> extends Entity {
}
public static needMergeWhenWaiting(type: TaskType) {
return [ TaskType.SyncBinary, TaskType.SyncPackage ].includes(type);
return [TaskType.SyncBinary, TaskType.SyncPackage].includes(type);
}
public static createUpdateProxyCache(targetName: string, options: UpdateProxyCacheTaskOptions):CreateUpdateProxyCacheTask {
public static createUpdateProxyCache(
targetName: string,
options: UpdateProxyCacheTaskOptions
): CreateUpdateProxyCacheTask {
if (!isPkgManifest(options.fileType)) {
throw new InternalServerError('should not update package version manifest.');
throw new InternalServerError(
'should not update package version manifest.'
);
}
const filePath = `/${PROXY_CACHE_DIR_NAME}/${options.fullname}/${options.fileType}`;
const data = {

View File

@@ -1,6 +1,8 @@
import dayjs from 'dayjs';
import { Entity, EntityData } from './Entity.js';
import { EasyData, EntityUtil } from '../util/EntityUtil.js';
import type { EntityData } from './Entity.js';
import { Entity } from './Entity.js';
import type { EasyData } from '../util/EntityUtil.js';
import { EntityUtil } from '../util/EntityUtil.js';
export enum TokenType {
granular = 'granular',
@@ -17,7 +19,7 @@ interface BaseTokenData extends EntityData {
lastUsedAt?: Date;
}
interface ClassicTokenData extends BaseTokenData{
interface ClassicTokenData extends BaseTokenData {
isAutomation?: boolean;
}
interface GranularTokenData extends BaseTokenData {
@@ -31,7 +33,9 @@ interface GranularTokenData extends BaseTokenData {
type TokenData = ClassicTokenData | GranularTokenData;
export function isGranularToken(data: TokenData | Token): data is GranularTokenData {
export function isGranularToken(
data: TokenData | Token
): data is GranularTokenData {
return data.type === TokenType.granular;
}
@@ -79,9 +83,10 @@ export class Token extends Entity {
static create(data: EasyData<TokenData, 'tokenId'>): Token {
const newData = EntityUtil.defaultData(data, 'tokenId');
if (isGranularToken(newData) && !newData.expiredAt) {
newData.expiredAt = dayjs(newData.createdAt).add(newData.expires, 'days').toDate();
newData.expiredAt = dayjs(newData.createdAt)
.add(newData.expires, 'days')
.toDate();
}
return new Token(newData);
}
}

View File

@@ -1,5 +1,7 @@
import { Entity, EntityData } from './Entity.js';
import { EasyData, EntityUtil } from '../util/EntityUtil.js';
import type { EntityData } from './Entity.js';
import { Entity } from './Entity.js';
import type { EasyData } from '../util/EntityUtil.js';
import { EntityUtil } from '../util/EntityUtil.js';
import { cleanUserPrefix } from '../../common/PackageUtil.js';
interface UserData extends EntityData {

View File

@@ -1,5 +1,7 @@
import { Entity, EntityData } from './Entity.js';
import { EasyData, EntityUtil } from '../util/EntityUtil.js';
import type { EntityData } from './Entity.js';
import { Entity } from './Entity.js';
import type { EasyData } from '../util/EntityUtil.js';
import { EntityUtil } from '../util/EntityUtil.js';
interface WebauthnCredentialData extends EntityData {
wancId: string;
@@ -25,7 +27,9 @@ export class WebauthnCredential extends Entity {
this.browserType = data.browserType;
}
static create(data: EasyData<WebauthnCredentialData, 'wancId'>): WebauthnCredential {
static create(
data: EasyData<WebauthnCredentialData, 'wancId'>
): WebauthnCredential {
const newData = EntityUtil.defaultData(data, 'wancId');
return new WebauthnCredential(newData);
}

View File

@@ -1,8 +1,8 @@
import { Event, Inject } from '@eggjs/tegg';
import { EggLogger } from 'egg';
import type { EggLogger } from 'egg';
import { PACKAGE_VERSION_ADDED } from './index.js';
import { BUG_VERSIONS } from '../../common/constants.js';
import { BugVersionService } from '../service/BugVersionService.js';
import type { BugVersionService } from '../service/BugVersionService.js';
@Event(PACKAGE_VERSION_ADDED)
export class BugVersionFixHandler {

View File

@@ -12,7 +12,7 @@ import {
PACKAGE_MAINTAINER_REMOVED,
PACKAGE_META_CHANGED,
} from './index.js';
import { CacheService } from '../../core/service/CacheService.js';
import type { CacheService } from '../../core/service/CacheService.js';
class CacheCleanerEvent {
@Inject()

View File

@@ -1,5 +1,6 @@
import { EggAppConfig } from 'egg';
import type { EggAppConfig } from 'egg';
import { Event, Inject } from '@eggjs/tegg';
import type { PackageMetaChange } from './index.js';
import {
PACKAGE_UNPUBLISHED,
PACKAGE_VERSION_ADDED,
@@ -9,14 +10,14 @@ import {
PACKAGE_TAG_REMOVED,
PACKAGE_MAINTAINER_CHANGED,
PACKAGE_MAINTAINER_REMOVED,
PACKAGE_META_CHANGED, PackageMetaChange,
PACKAGE_META_CHANGED,
} from './index.js';
import { ChangeRepository } from '../../repository/ChangeRepository.js';
import type { ChangeRepository } from '../../repository/ChangeRepository.js';
import { Change } from '../entity/Change.js';
import { HookEvent } from '../entity/HookEvent.js';
import { Task } from '../entity/Task.js';
import { User } from '../entity/User.js';
import { TaskService } from '../service/TaskService.js';
import type { User } from '../entity/User.js';
import type { TaskService } from '../service/TaskService.js';
class ChangesStreamEvent {
@Inject()
@@ -32,7 +33,11 @@ class ChangesStreamEvent {
return this.config.cnpmcore.hookEnable;
}
protected async addChange(type: string, fullname: string, data: object): Promise<Change> {
protected async addChange(
type: string,
fullname: string,
data: object
): Promise<Change> {
const change = Change.create({
type,
targetName: fullname,
@@ -48,7 +53,9 @@ export class PackageUnpublishedChangesStreamEvent extends ChangesStreamEvent {
async handle(fullname: string) {
const change = await this.addChange(PACKAGE_UNPUBLISHED, fullname, {});
if (this.hookEnable) {
const task = Task.createCreateHookTask(HookEvent.createUnpublishEvent(fullname, change.changeId));
const task = Task.createCreateHookTask(
HookEvent.createUnpublishEvent(fullname, change.changeId)
);
await this.taskService.createTask(task, true);
}
}
@@ -57,9 +64,13 @@ export class PackageUnpublishedChangesStreamEvent extends ChangesStreamEvent {
@Event(PACKAGE_VERSION_ADDED)
export class PackageVersionAddedChangesStreamEvent extends ChangesStreamEvent {
async handle(fullname: string, version: string, tag?: string) {
const change = await this.addChange(PACKAGE_VERSION_ADDED, fullname, { version });
const change = await this.addChange(PACKAGE_VERSION_ADDED, fullname, {
version,
});
if (this.hookEnable) {
const task = Task.createCreateHookTask(HookEvent.createPublishEvent(fullname, change.changeId, version, tag));
const task = Task.createCreateHookTask(
HookEvent.createPublishEvent(fullname, change.changeId, version, tag)
);
await this.taskService.createTask(task, true);
}
}
@@ -68,9 +79,13 @@ export class PackageVersionAddedChangesStreamEvent extends ChangesStreamEvent {
@Event(PACKAGE_VERSION_REMOVED)
export class PackageVersionRemovedChangesStreamEvent extends ChangesStreamEvent {
async handle(fullname: string, version: string, tag?: string) {
const change = await this.addChange(PACKAGE_VERSION_REMOVED, fullname, { version });
const change = await this.addChange(PACKAGE_VERSION_REMOVED, fullname, {
version,
});
if (this.hookEnable) {
const task = Task.createCreateHookTask(HookEvent.createUnpublishEvent(fullname, change.changeId, version, tag));
const task = Task.createCreateHookTask(
HookEvent.createUnpublishEvent(fullname, change.changeId, version, tag)
);
await this.taskService.createTask(task, true);
}
}
@@ -81,7 +96,9 @@ export class PackageTagAddedChangesStreamEvent extends ChangesStreamEvent {
async handle(fullname: string, tag: string) {
const change = await this.addChange(PACKAGE_TAG_ADDED, fullname, { tag });
if (this.hookEnable) {
const task = Task.createCreateHookTask(HookEvent.createDistTagEvent(fullname, change.changeId, tag));
const task = Task.createCreateHookTask(
HookEvent.createDistTagEvent(fullname, change.changeId, tag)
);
await this.taskService.createTask(task, true);
}
}
@@ -92,7 +109,9 @@ export class PackageTagChangedChangesStreamEvent extends ChangesStreamEvent {
async handle(fullname: string, tag: string) {
const change = await this.addChange(PACKAGE_TAG_CHANGED, fullname, { tag });
if (this.hookEnable) {
const task = Task.createCreateHookTask(HookEvent.createDistTagEvent(fullname, change.changeId, tag));
const task = Task.createCreateHookTask(
HookEvent.createDistTagEvent(fullname, change.changeId, tag)
);
await this.taskService.createTask(task, true);
}
}
@@ -103,7 +122,9 @@ export class PackageTagRemovedChangesStreamEvent extends ChangesStreamEvent {
async handle(fullname: string, tag: string) {
const change = await this.addChange(PACKAGE_TAG_REMOVED, fullname, { tag });
if (this.hookEnable) {
const task = Task.createCreateHookTask(HookEvent.createDistTagRmEvent(fullname, change.changeId, tag));
const task = Task.createCreateHookTask(
HookEvent.createDistTagRmEvent(fullname, change.changeId, tag)
);
await this.taskService.createTask(task, true);
}
}
@@ -112,11 +133,17 @@ export class PackageTagRemovedChangesStreamEvent extends ChangesStreamEvent {
@Event(PACKAGE_MAINTAINER_CHANGED)
export class PackageMaintainerChangedChangesStreamEvent extends ChangesStreamEvent {
async handle(fullname: string, maintainers: User[]) {
const change = await this.addChange(PACKAGE_MAINTAINER_CHANGED, fullname, {});
const change = await this.addChange(
PACKAGE_MAINTAINER_CHANGED,
fullname,
{}
);
// TODO 应该比较差值,而不是全量推送
if (this.hookEnable) {
for (const maintainer of maintainers) {
const task = Task.createCreateHookTask(HookEvent.createOwnerEvent(fullname, change.changeId, maintainer.name));
const task = Task.createCreateHookTask(
HookEvent.createOwnerEvent(fullname, change.changeId, maintainer.name)
);
await this.taskService.createTask(task, true);
}
}
@@ -126,9 +153,13 @@ export class PackageMaintainerChangedChangesStreamEvent extends ChangesStreamEve
@Event(PACKAGE_MAINTAINER_REMOVED)
export class PackageMaintainerRemovedChangesStreamEvent extends ChangesStreamEvent {
async handle(fullname: string, maintainer: string) {
const change = await this.addChange(PACKAGE_MAINTAINER_REMOVED, fullname, { maintainer });
const change = await this.addChange(PACKAGE_MAINTAINER_REMOVED, fullname, {
maintainer,
});
if (this.hookEnable) {
const task = Task.createCreateHookTask(HookEvent.createOwnerRmEvent(fullname, change.changeId, maintainer));
const task = Task.createCreateHookTask(
HookEvent.createOwnerRmEvent(fullname, change.changeId, maintainer)
);
await this.taskService.createTask(task, true);
}
}
@@ -137,11 +168,19 @@ export class PackageMaintainerRemovedChangesStreamEvent extends ChangesStreamEve
@Event(PACKAGE_META_CHANGED)
export class PackageMetaChangedChangesStreamEvent extends ChangesStreamEvent {
async handle(fullname: string, meta: PackageMetaChange) {
const change = await this.addChange(PACKAGE_META_CHANGED, fullname, { ...meta });
const change = await this.addChange(PACKAGE_META_CHANGED, fullname, {
...meta,
});
const { deprecateds } = meta;
if (this.hookEnable) {
for (const deprecated of deprecateds || []) {
const task = Task.createCreateHookTask(HookEvent.createDeprecatedEvent(fullname, change.changeId, deprecated.version));
const task = Task.createCreateHookTask(
HookEvent.createDeprecatedEvent(
fullname,
change.changeId,
deprecated.version
)
);
await this.taskService.createTask(task, true);
}
}

View File

@@ -1,12 +1,10 @@
import { Event, Inject } from '@eggjs/tegg';
import {
EggAppConfig,
} from 'egg';
import type { EggAppConfig } from 'egg';
import { PACKAGE_VERSION_ADDED } from './index.js';
import { getScopeAndName } from '../../common/PackageUtil.js';
import { PackageVersionManifest as PackageVersionManifestEntity } from '../entity/PackageVersionManifest.js';
import { PackageRepository } from '../../repository/PackageRepository.js';
import { DistRepository } from '../../repository/DistRepository.js';
import type { PackageRepository } from '../../repository/PackageRepository.js';
import type { DistRepository } from '../../repository/DistRepository.js';
class StoreManifestEvent {
@Inject()
@@ -16,15 +14,25 @@ class StoreManifestEvent {
@Inject()
private readonly distRepository: DistRepository;
protected async savePackageVersionManifest(fullname: string, version: string) {
if (!this.config.cnpmcore.enableStoreFullPackageVersionManifestsToDatabase) return;
protected async savePackageVersionManifest(
fullname: string,
version: string
) {
if (!this.config.cnpmcore.enableStoreFullPackageVersionManifestsToDatabase)
return;
const [ scope, name ] = getScopeAndName(fullname);
const [scope, name] = getScopeAndName(fullname);
const packageId = await this.packageRepository.findPackageId(scope, name);
if (!packageId) return;
const packageVersion = await this.packageRepository.findPackageVersion(packageId, version);
const packageVersion = await this.packageRepository.findPackageVersion(
packageId,
version
);
if (!packageVersion) return;
const manifest = await this.distRepository.findPackageVersionManifest(packageId, version);
const manifest = await this.distRepository.findPackageVersionManifest(
packageId,
version
);
if (!manifest) return;
const entity = PackageVersionManifestEntity.create({
packageId,

View File

@@ -1,6 +1,6 @@
// TODO sync event
/* eslint-disable @typescript-eslint/no-unused-vars */
import { EggAppConfig } from 'egg';
import type { EggAppConfig } from 'egg';
import { Event, Inject } from '@eggjs/tegg';
import {
PACKAGE_UNPUBLISHED,
@@ -15,7 +15,7 @@ import {
PACKAGE_BLOCKED,
PACKAGE_UNBLOCKED,
} from './index.js';
import { PackageSearchService } from '../service/PackageSearchService.js';
import type { PackageSearchService } from '../service/PackageSearchService.js';
class SyncESPackage {
@Inject()

View File

@@ -1,12 +1,14 @@
import { Event, Inject } from '@eggjs/tegg';
import {
EggAppConfig, EggLogger,
} from 'egg';
import type { EggAppConfig, EggLogger } from 'egg';
import { ForbiddenError } from 'egg-errors';
import { PACKAGE_VERSION_ADDED, PACKAGE_TAG_ADDED, PACKAGE_TAG_CHANGED } from './index.js';
import {
PACKAGE_VERSION_ADDED,
PACKAGE_TAG_ADDED,
PACKAGE_TAG_CHANGED,
} from './index.js';
import { getScopeAndName } from '../../common/PackageUtil.js';
import { PackageManagerService } from '../service/PackageManagerService.js';
import { PackageVersionFileService } from '../service/PackageVersionFileService.js';
import type { PackageManagerService } from '../service/PackageManagerService.js';
import type { PackageVersionFileService } from '../service/PackageVersionFileService.js';
class SyncPackageVersionFileEvent {
@Inject()
@@ -23,17 +25,28 @@ class SyncPackageVersionFileEvent {
if (!this.config.cnpmcore.enableUnpkg) return;
if (!this.config.cnpmcore.enableSyncUnpkgFiles) return;
// ignore sync on unittest
if (this.config.env === 'unittest' && fullname !== '@cnpm/unittest-unpkg-demo') return;
const [ scope, name ] = getScopeAndName(fullname);
const { packageVersion } = await this.packageManagerService.showPackageVersionByVersionOrTag(
scope, name, version);
if (
this.config.env === 'unittest' &&
fullname !== '@cnpm/unittest-unpkg-demo'
)
return;
const [scope, name] = getScopeAndName(fullname);
const { packageVersion } =
await this.packageManagerService.showPackageVersionByVersionOrTag(
scope,
name,
version
);
if (!packageVersion) return;
try {
await this.packageVersionFileService.syncPackageVersionFiles(packageVersion);
await this.packageVersionFileService.syncPackageVersionFiles(
packageVersion
);
} catch (err) {
if (err instanceof ForbiddenError) {
this.logger.info('[SyncPackageVersionFileEvent.syncPackageVersionFile] ignore sync files, cause: %s',
err.message,
this.logger.info(
'[SyncPackageVersionFileEvent.syncPackageVersionFile] ignore sync files, cause: %s',
err.message
);
return;
}
@@ -42,9 +55,13 @@ class SyncPackageVersionFileEvent {
}
protected async syncPackageReadmeToLatestVersion(fullname: string) {
const [ scope, name ] = getScopeAndName(fullname);
const { pkg, packageVersion } = await this.packageManagerService.showPackageVersionByVersionOrTag(
scope, name, 'latest');
const [scope, name] = getScopeAndName(fullname);
const { pkg, packageVersion } =
await this.packageManagerService.showPackageVersionByVersionOrTag(
scope,
name,
'latest'
);
if (!pkg || !packageVersion) return;
await this.packageVersionFileService.syncPackageReadme(pkg, packageVersion);
}

View File

@@ -1,5 +1,5 @@
import '@eggjs/tegg';
import { User } from '../entity/User.js';
import type { User } from '../entity/User.js';
export const PACKAGE_UNPUBLISHED = 'PACKAGE_UNPUBLISHED';
export const PACKAGE_BLOCKED = 'PACKAGE_BLOCKED';
@@ -22,19 +22,35 @@ export interface PackageMetaChange {
deprecateds?: Array<PackageDeprecated>;
}
declare module '@eggjs/tegg' {
interface Events {
[PACKAGE_UNPUBLISHED]: (fullname: string) => Promise<void>;
[PACKAGE_BLOCKED]: (fullname: string) => Promise<void>;
[PACKAGE_UNBLOCKED]: (fullname: string) => Promise<void>;
[PACKAGE_VERSION_ADDED]: (fullname: string, version: string, tag?: string) => Promise<void>;
[PACKAGE_VERSION_REMOVED]: (fullname: string, version: string, tag?: string) => Promise<void>;
[PACKAGE_VERSION_ADDED]: (
fullname: string,
version: string,
tag?: string
) => Promise<void>;
[PACKAGE_VERSION_REMOVED]: (
fullname: string,
version: string,
tag?: string
) => Promise<void>;
[PACKAGE_TAG_ADDED]: (fullname: string, tag: string) => Promise<void>;
[PACKAGE_TAG_CHANGED]: (fullname: string, tag: string) => Promise<void>;
[PACKAGE_TAG_REMOVED]: (fullname: string, tag: string) => Promise<void>;
[PACKAGE_MAINTAINER_CHANGED]: (fullname: string, maintainers: User[]) => Promise<void>;
[PACKAGE_MAINTAINER_REMOVED]: (fullname: string, maintainer: string) => Promise<void>;
[PACKAGE_META_CHANGED]: (fullname: string, meta: PackageMetaChange) => Promise<void>;
[PACKAGE_MAINTAINER_CHANGED]: (
fullname: string,
maintainers: User[]
) => Promise<void>;
[PACKAGE_MAINTAINER_REMOVED]: (
fullname: string,
maintainer: string
) => Promise<void>;
[PACKAGE_META_CHANGED]: (
fullname: string,
meta: PackageMetaChange
) => Promise<void>;
}
}

View File

@@ -1,23 +1,19 @@
import {
AccessLevel,
SingletonProto,
Inject,
EggObjectFactory,
} from '@eggjs/tegg';
import {
EggHttpClient,
} from 'egg';
import type { EggObjectFactory } from '@eggjs/tegg';
import { AccessLevel, SingletonProto, Inject } from '@eggjs/tegg';
import type { EggHttpClient } from 'egg';
import fs from 'node:fs/promises';
import { sortBy } from 'lodash-es';
import binaries, { BinaryName, CategoryName } from '../../../config/binaries.js';
import { BinaryRepository } from '../../repository/BinaryRepository.js';
import type { BinaryName, CategoryName } from '../../../config/binaries.js';
import binaries from '../../../config/binaries.js';
import type { BinaryRepository } from '../../repository/BinaryRepository.js';
import { Task } from '../entity/Task.js';
import { Binary } from '../entity/Binary.js';
import { TaskService } from './TaskService.js';
import { NFSAdapter } from '../../common/adapter/NFSAdapter.js';
import type { TaskService } from './TaskService.js';
import type { NFSAdapter } from '../../common/adapter/NFSAdapter.js';
import { downloadToTempfile } from '../../common/FileUtil.js';
import { isTimeoutError } from '../../common/ErrorUtil.js';
import { AbstractBinary, BinaryItem } from '../../common/adapter/binary/AbstractBinary.js';
import type { BinaryItem } from '../../common/adapter/binary/AbstractBinary.js';
import { AbstractBinary } from '../../common/adapter/binary/AbstractBinary.js';
import { AbstractService } from '../../common/AbstractService.js';
import { BinaryType } from '../../common/enum/Binary.js';
import { TaskType, TaskState } from '../../common/enum/Task.js';
@@ -44,37 +40,37 @@ export class BinarySyncerService extends AbstractService {
// canvas/v2.6.1/canvas-v2.6.1-node-v57-linux-glibc-x64.tar.gz
// -> node-canvas-prebuilt/v2.6.1/node-canvas-prebuilt-v2.6.1-node-v57-linux-glibc-x64.tar.gz
// canvas 历史版本的 targetName 可能是 category 需要兼容
public async findBinary(targetName: BinaryName | CategoryName, parent: string, name: string) {
public async findBinary(
targetName: BinaryName | CategoryName,
parent: string,
name: string
) {
return await this.binaryRepository.findBinary(targetName, parent, name);
}
public async listDirBinaries(binary: Binary) {
return await this.binaryRepository.listBinaries(binary.category, `${binary.parent}${binary.name}`);
return await this.binaryRepository.listBinaries(
binary.category,
`${binary.parent}${binary.name}`
);
}
public async listRootBinaries(binaryName: BinaryName) {
// 通常 binaryName 和 category 是一样的,但是有些特殊的 binaryName 会有多个 category比如 canvas
// 所以查询 canvas 的时候,需要将 binaryName 和 category 的数据都查出来
const {
category,
} = binaries[binaryName];
const reqs = [
this.binaryRepository.listBinaries(binaryName, '/'),
];
const { category } = binaries[binaryName];
const reqs = [this.binaryRepository.listBinaries(binaryName, '/')];
if (category && category !== binaryName) {
reqs.push(this.binaryRepository.listBinaries(category, '/'));
}
const [
rootBinary,
categoryBinary,
] = await Promise.all(reqs);
const [rootBinary, categoryBinary] = await Promise.all(reqs);
const versions = rootBinary.map(b => b.name);
const versions = new Set(rootBinary.map(b => b.name));
categoryBinary?.forEach(b => {
const version = b.name;
// 只将没有的版本添加进去
if (!versions.includes(version)) {
if (!versions.has(version)) {
rootBinary.push(b);
}
});
@@ -88,9 +84,16 @@ export class BinarySyncerService extends AbstractService {
public async createTask(binaryName: BinaryName, lastData?: any) {
try {
return await this.taskService.createTask(Task.createSyncBinary(binaryName, lastData), false);
return await this.taskService.createTask(
Task.createSyncBinary(binaryName, lastData),
false
);
} catch (e) {
this.logger.error('[BinarySyncerService.createTask] binaryName: %s, error: %s', binaryName, e);
this.logger.error(
'[BinarySyncerService.createTask] binaryName: %s, error: %s',
binaryName,
e
);
}
}
@@ -111,41 +114,72 @@ export class BinarySyncerService extends AbstractService {
const binaryAdapter = await this.getBinaryAdapter(binaryName);
const logUrl = `${this.config.cnpmcore.registry}/-/binary/${binaryName}/syncs/${task.taskId}/log`;
let logs: string[] = [];
logs.push(`[${isoNow()}] 🚧🚧🚧🚧🚧 Start sync binary "${binaryName}" 🚧🚧🚧🚧🚧`);
logs.push(
`[${isoNow()}] 🚧🚧🚧🚧🚧 Start sync binary "${binaryName}" 🚧🚧🚧🚧🚧`
);
if (!binaryAdapter) {
task.error = 'unknow binaryName';
logs.push(`[${isoNow()}] ❌ Synced "${binaryName}" fail, ${task.error}, log: ${logUrl}`);
logs.push(
`[${isoNow()}] ❌ Synced "${binaryName}" fail, ${task.error}, log: ${logUrl}`
);
logs.push(`[${isoNow()}] ❌❌❌❌❌ "${binaryName}" ❌❌❌❌❌`);
this.logger.error('[BinarySyncerService.executeTask:fail] taskId: %s, targetName: %s, %s',
task.taskId, task.targetName, task.error);
this.logger.error(
'[BinarySyncerService.executeTask:fail] taskId: %s, targetName: %s, %s',
task.taskId,
task.targetName,
task.error
);
await this.taskService.finishTask(task, TaskState.Fail, logs.join('\n'));
return;
}
await this.taskService.appendTaskLog(task, logs.join('\n'));
logs = [];
this.logger.info('[BinarySyncerService.executeTask:start] taskId: %s, targetName: %s, log: %s',
task.taskId, task.targetName, logUrl);
this.logger.info(
'[BinarySyncerService.executeTask:start] taskId: %s, targetName: %s, log: %s',
task.taskId,
task.targetName,
logUrl
);
try {
const [ hasDownloadError ] = await this.syncDir(binaryAdapter, task, '/');
const [hasDownloadError] = await this.syncDir(binaryAdapter, task, '/');
logs.push(`[${isoNow()}] 🟢 log: ${logUrl}`);
logs.push(`[${isoNow()}] 🟢🟢🟢🟢🟢 "${binaryName}" 🟢🟢🟢🟢🟢`);
await this.taskService.finishTask(task, TaskState.Success, logs.join('\n'));
await this.taskService.finishTask(
task,
TaskState.Success,
logs.join('\n')
);
// 确保没有下载异常才算 success
await binaryAdapter.finishFetch(!hasDownloadError, binaryName);
this.logger.info('[BinarySyncerService.executeTask:success] taskId: %s, targetName: %s, log: %s, hasDownloadError: %s',
task.taskId, task.targetName, logUrl, hasDownloadError);
this.logger.info(
'[BinarySyncerService.executeTask:success] taskId: %s, targetName: %s, log: %s, hasDownloadError: %s',
task.taskId,
task.targetName,
logUrl,
hasDownloadError
);
} catch (err: any) {
task.error = `${err.name}: ${err.message}`;
logs.push(`[${isoNow()}] ❌ Synced "${binaryName}" fail, ${task.error}, log: ${logUrl}`);
logs.push(
`[${isoNow()}] ❌ Synced "${binaryName}" fail, ${task.error}, log: ${logUrl}`
);
logs.push(`[${isoNow()}] ❌❌❌❌❌ "${binaryName}" ❌❌❌❌❌`);
if (isTimeoutError(err)) {
this.logger.warn('[BinarySyncerService.executeTask:fail] taskId: %s, targetName: %s, %s',
task.taskId, task.targetName, task.error);
this.logger.warn(
'[BinarySyncerService.executeTask:fail] taskId: %s, targetName: %s, %s',
task.taskId,
task.targetName,
task.error
);
this.logger.warn(err);
} else {
this.logger.error('[BinarySyncerService.executeTask:fail] taskId: %s, targetName: %s, %s',
task.taskId, task.targetName, task.error);
this.logger.error(
'[BinarySyncerService.executeTask:fail] taskId: %s, targetName: %s, %s',
task.taskId,
task.targetName,
task.error
);
this.logger.error(err);
}
await binaryAdapter.finishFetch(false, binaryName);
@@ -153,7 +187,13 @@ export class BinarySyncerService extends AbstractService {
}
}
private async syncDir(binaryAdapter: AbstractBinary, task: Task, dir: string, parentIndex = '', latestVersionParent = '/') {
private async syncDir(
binaryAdapter: AbstractBinary,
task: Task,
dir: string,
parentIndex = '',
latestVersionParent = '/'
) {
const binaryName = task.targetName as BinaryName;
const result = await binaryAdapter.fetch(dir, binaryName);
let hasDownloadError = false;
@@ -161,15 +201,30 @@ export class BinarySyncerService extends AbstractService {
if (result && result.items.length > 0) {
hasItems = true;
let logs: string[] = [];
const { newItems, latestVersionDir } = await this.diff(binaryName, dir, result.items, latestVersionParent);
logs.push(`[${isoNow()}][${dir}] 🚧 Syncing diff: ${result.items.length} => ${newItems.length}, Binary class: ${binaryAdapter.constructor.name}`);
const { newItems, latestVersionDir } = await this.diff(
binaryName,
dir,
result.items,
latestVersionParent
);
logs.push(
`[${isoNow()}][${dir}] 🚧 Syncing diff: ${result.items.length} => ${newItems.length}, Binary class: ${binaryAdapter.constructor.name}`
);
// re-check latest version
for (const [ index, { item, reason }] of newItems.entries()) {
for (const [index, { item, reason }] of newItems.entries()) {
if (item.isDir) {
logs.push(`[${isoNow()}][${dir}] 🚧 [${parentIndex}${index}] Start sync dir ${JSON.stringify(item)}, reason: ${reason}`);
logs.push(
`[${isoNow()}][${dir}] 🚧 [${parentIndex}${index}] Start sync dir ${JSON.stringify(item)}, reason: ${reason}`
);
await this.taskService.appendTaskLog(task, logs.join('\n'));
logs = [];
const [ hasError, hasSubItems ] = await this.syncDir(binaryAdapter, task, `${dir}${item.name}`, `${parentIndex}${index}.`, latestVersionDir);
const [hasError, hasSubItems] = await this.syncDir(
binaryAdapter,
task,
`${dir}${item.name}`,
`${parentIndex}${index}.`,
latestVersionDir
);
if (hasError) {
hasDownloadError = true;
} else {
@@ -181,34 +236,55 @@ export class BinarySyncerService extends AbstractService {
}
} else {
// download to nfs
logs.push(`[${isoNow()}][${dir}] 🚧 [${parentIndex}${index}] Downloading ${JSON.stringify(item)}, reason: ${reason}`);
logs.push(
`[${isoNow()}][${dir}] 🚧 [${parentIndex}${index}] Downloading ${JSON.stringify(item)}, reason: ${reason}`
);
// skip exists binary file
const existsBinary = await this.binaryRepository.findBinary(item.category, item.parent, item.name);
const existsBinary = await this.binaryRepository.findBinary(
item.category,
item.parent,
item.name
);
if (existsBinary && existsBinary.date === item.date) {
logs.push(`[${isoNow()}][${dir}] 🟢 [${parentIndex}${index}] binary file exists, skip download, binaryId: ${existsBinary.binaryId}`);
this.logger.info('[BinarySyncerService.syncDir:skipDownload] binaryId: %s exists, storePath: %s',
existsBinary.binaryId, existsBinary.storePath);
logs.push(
`[${isoNow()}][${dir}] 🟢 [${parentIndex}${index}] binary file exists, skip download, binaryId: ${existsBinary.binaryId}`
);
this.logger.info(
'[BinarySyncerService.syncDir:skipDownload] binaryId: %s exists, storePath: %s',
existsBinary.binaryId,
existsBinary.storePath
);
continue;
}
await this.taskService.appendTaskLog(task, logs.join('\n'));
logs = [];
let localFile = '';
try {
const { tmpfile, headers, timing } =
await downloadToTempfile(
this.httpclient, this.config.dataDir, item.sourceUrl!, { ignoreDownloadStatuses: item.ignoreDownloadStatuses });
const { tmpfile, headers, timing } = await downloadToTempfile(
this.httpclient,
this.config.dataDir,
item.sourceUrl!,
{ ignoreDownloadStatuses: item.ignoreDownloadStatuses }
);
const log = `[${isoNow()}][${dir}] 🟢 [${parentIndex}${index}] HTTP content-length: ${headers['content-length']}, timing: ${JSON.stringify(timing)}, ${item.sourceUrl} => ${tmpfile}`;
logs.push(log);
this.logger.info('[BinarySyncerService.syncDir:downloadToTempfile] %s', log);
this.logger.info(
'[BinarySyncerService.syncDir:downloadToTempfile] %s',
log
);
localFile = tmpfile;
const binary = await this.saveBinaryItem(item, tmpfile);
logs.push(`[${isoNow()}][${dir}] 🟢 [${parentIndex}${index}] Synced file success, binaryId: ${binary.binaryId}`);
logs.push(
`[${isoNow()}][${dir}] 🟢 [${parentIndex}${index}] Synced file success, binaryId: ${binary.binaryId}`
);
await this.taskService.appendTaskLog(task, logs.join('\n'));
logs = [];
} catch (err: any) {
if (err.name === 'DownloadNotFoundError') {
this.logger.info('Not found %s, skip it', item.sourceUrl);
logs.push(`[${isoNow()}][${dir}] 🧪️ [${parentIndex}${index}] Download ${item.sourceUrl} not found, skip it`);
logs.push(
`[${isoNow()}][${dir}] 🧪️ [${parentIndex}${index}] Download ${item.sourceUrl} not found, skip it`
);
} else {
if (err.name === 'DownloadStatusInvalidError') {
this.logger.warn('Download binary %s %s', item.sourceUrl, err);
@@ -216,7 +292,9 @@ export class BinarySyncerService extends AbstractService {
this.logger.error('Download binary %s %s', item.sourceUrl, err);
}
hasDownloadError = true;
logs.push(`[${isoNow()}][${dir}] ❌ [${parentIndex}${index}] Download ${item.sourceUrl} error: ${err}`);
logs.push(
`[${isoNow()}][${dir}] ❌ [${parentIndex}${index}] Download ${item.sourceUrl} error: ${err}`
);
}
await this.taskService.appendTaskLog(task, logs.join('\n'));
logs = [];
@@ -230,20 +308,29 @@ export class BinarySyncerService extends AbstractService {
if (hasDownloadError) {
logs.push(`[${isoNow()}][${dir}] ❌ Synced dir fail`);
} else {
logs.push(`[${isoNow()}][${dir}] 🟢 Synced dir success, hasItems: ${hasItems}`);
logs.push(
`[${isoNow()}][${dir}] 🟢 Synced dir success, hasItems: ${hasItems}`
);
}
await this.taskService.appendTaskLog(task, logs.join('\n'));
}
return [ hasDownloadError, hasItems ];
return [hasDownloadError, hasItems];
}
// see https://github.com/cnpm/cnpmcore/issues/556
// 上游可能正在发布新版本、同步流程中断,导致同步的时候,文件列表不一致
// 如果的当前目录命中 latestVersionParent 父目录,那么就再校验一下当前目录
// 如果 existsItems 为空或者经过修改,那么就不需要 revalidate 了
private async diff(binaryName: BinaryName, dir: string, fetchItems: BinaryItem[], latestVersionParent = '/') {
const existsItems = await this.binaryRepository.listBinaries(binaryName, dir);
private async diff(
binaryName: BinaryName,
dir: string,
fetchItems: BinaryItem[],
latestVersionParent = '/'
) {
const existsItems = await this.binaryRepository.listBinaries(
binaryName,
dir
);
const existsMap = new Map<string, Binary>();
for (const item of existsItems) {
existsMap.set(item.name, item);
@@ -276,7 +363,7 @@ export class BinarySyncerService extends AbstractService {
existsItem.date = item.date;
} else if (dir.endsWith(latestVersionParent)) {
if (!latestItem) {
latestItem = sortBy(fetchItems, [ 'date' ]).pop();
latestItem = sortBy(fetchItems, ['date']).pop();
}
const isLatestItem = latestItem?.name === item.name;
if (isLatestItem && existsItem.isDir) {
@@ -289,7 +376,6 @@ export class BinarySyncerService extends AbstractService {
}
}
return {
newItems: diffItems,
latestVersionDir: latestVersionParent,
@@ -301,22 +387,35 @@ export class BinarySyncerService extends AbstractService {
const stat = await fs.stat(tmpfile);
binary.size = stat.size;
await this.nfsAdapter.uploadFile(binary.storePath, tmpfile);
this.logger.info('[BinarySyncerService.saveBinaryItem:uploadFile] binaryId: %s, size: %d, %s => %s',
binary.binaryId, stat.size, tmpfile, binary.storePath);
this.logger.info(
'[BinarySyncerService.saveBinaryItem:uploadFile] binaryId: %s, size: %d, %s => %s',
binary.binaryId,
stat.size,
tmpfile,
binary.storePath
);
}
await this.binaryRepository.saveBinary(binary);
return binary;
}
private async getBinaryAdapter(binaryName: BinaryName): Promise<AbstractBinary | undefined> {
private async getBinaryAdapter(
binaryName: BinaryName
): Promise<AbstractBinary | undefined> {
const config = this.config.cnpmcore;
const binaryConfig = binaries[binaryName];
let binaryAdapter: AbstractBinary;
if (config.sourceRegistryIsCNpm) {
binaryAdapter = await this.eggObjectFactory.getEggObject(AbstractBinary, BinaryType.Api);
binaryAdapter = await this.eggObjectFactory.getEggObject(
AbstractBinary,
BinaryType.Api
);
} else {
binaryAdapter = await this.eggObjectFactory.getEggObject(AbstractBinary, binaryConfig.type);
binaryAdapter = await this.eggObjectFactory.getEggObject(
AbstractBinary,
binaryConfig.type
);
}
await binaryAdapter.initFetch(binaryName);
return binaryAdapter;

View File

@@ -1,13 +1,16 @@
import { AccessLevel, SingletonProto, Inject } from '@eggjs/tegg';
import { EggLogger } from 'egg';
import type { EggLogger } from 'egg';
import pMap from 'p-map';
import { BugVersion } from '../entity/BugVersion.js';
import { PackageJSONType, PackageRepository } from '../../repository/PackageRepository.js';
import { DistRepository } from '../../repository/DistRepository.js';
import type {
PackageJSONType,
PackageRepository,
} from '../../repository/PackageRepository.js';
import type { DistRepository } from '../../repository/DistRepository.js';
import { getScopeAndName } from '../../common/PackageUtil.js';
import { CacheService } from './CacheService.js';
import type { CacheService } from './CacheService.js';
import { BUG_VERSIONS, LATEST_TAG } from '../../common/constants.js';
import { BugVersionStore } from '../../common/adapter/BugVersionStore.js';
import type { BugVersionStore } from '../../common/adapter/BugVersionStore.js';
@SingletonProto({
accessLevel: AccessLevel.PUBLIC,
@@ -33,11 +36,18 @@ export class BugVersionService {
const pkg = await this.packageRepository.findPackage('', BUG_VERSIONS);
if (!pkg) return;
/* c8 ignore next 10 */
const tag = await this.packageRepository.findPackageTag(pkg!.packageId, LATEST_TAG);
const tag = await this.packageRepository.findPackageTag(
pkg!.packageId,
LATEST_TAG
);
if (!tag) return;
let bugVersion = this.bugVersionStore.getBugVersion(tag!.version);
if (!bugVersion) {
const packageVersionJson = (await this.distRepository.findPackageVersionManifest(pkg!.packageId, tag!.version)) as PackageJSONType;
const packageVersionJson =
(await this.distRepository.findPackageVersionManifest(
pkg!.packageId,
tag!.version
)) as PackageJSONType;
if (!packageVersionJson) return;
const data = packageVersionJson.config?.['bug-versions'];
bugVersion = new BugVersion(data || {});
@@ -48,51 +58,83 @@ export class BugVersionService {
async cleanBugVersionPackageCaches(bugVersion: BugVersion) {
const fullnames = bugVersion.listAllPackagesHasBugs();
await pMap(fullnames, async fullname => {
await this.cacheService.removeCache(fullname);
}, {
concurrency: 50,
stopOnError: false,
});
await pMap(
fullnames,
async fullname => {
await this.cacheService.removeCache(fullname);
},
{
concurrency: 50,
stopOnError: false,
}
);
}
async fixPackageBugVersions(bugVersion: BugVersion, fullname: string, manifests: Record<string, any>) {
async fixPackageBugVersions(
bugVersion: BugVersion,
fullname: string,
manifests: Record<string, any>
) {
// If package all version unpublished(like pinyin-tool), versions is undefined
if (!manifests) return;
for (const manifest of Object.values(manifests)) {
this.fixPackageBugVersionWithAllVersions(fullname, bugVersion, manifest, manifests);
this.fixPackageBugVersionWithAllVersions(
fullname,
bugVersion,
manifest,
manifests
);
}
}
async fixPackageBugVersion(bugVersion: BugVersion, fullname: string, manifest: any) {
async fixPackageBugVersion(
bugVersion: BugVersion,
fullname: string,
manifest: any
) {
const advice = bugVersion.fixVersion(fullname, manifest.version);
if (!advice) {
return manifest;
}
const [ scope, name ] = getScopeAndName(fullname);
const [scope, name] = getScopeAndName(fullname);
const pkg = await this.packageRepository.findPackage(scope, name);
if (!pkg) {
return manifest;
}
const packageVersion = await this.packageRepository.findPackageVersion(pkg.packageId, advice.version);
const packageVersion = await this.packageRepository.findPackageVersion(
pkg.packageId,
advice.version
);
if (!packageVersion) {
return manifest;
}
const fixedManifest = await this.distRepository.findPackageVersionManifest(packageVersion.packageId, advice.version);
const fixedManifest = await this.distRepository.findPackageVersionManifest(
packageVersion.packageId,
advice.version
);
if (!fixedManifest) {
return manifest;
}
return bugVersion.fixManifest(manifest, fixedManifest);
}
private fixPackageBugVersionWithAllVersions(fullname: string, bugVersion: BugVersion, manifest: any, manifests: Record<string, any>) {
private fixPackageBugVersionWithAllVersions(
fullname: string,
bugVersion: BugVersion,
manifest: any,
manifests: Record<string, any>
) {
const advice = bugVersion.fixVersion(fullname, manifest.version);
if (!advice) {
return;
}
const fixedManifest = manifests[advice.version];
if (!fixedManifest) {
this.logger.warn('[BugVersionService] not found pkg for %s@%s manifest', fullname, advice.version);
this.logger.warn(
'[BugVersionService] not found pkg for %s@%s manifest',
fullname,
advice.version
);
return;
}
const newManifest = bugVersion.fixManifest(manifest, fixedManifest);

View File

@@ -1,11 +1,7 @@
import {
AccessLevel,
SingletonProto,
Inject,
} from '@eggjs/tegg';
import { CacheAdapter } from '../../common/adapter/CacheAdapter.js';
import { AccessLevel, SingletonProto, Inject } from '@eggjs/tegg';
import type { CacheAdapter } from '../../common/adapter/CacheAdapter.js';
import { AbstractService } from '../../common/AbstractService.js';
import { ChangesStreamTaskData } from '../entity/Task.js';
import type { ChangesStreamTaskData } from '../entity/Task.js';
type PackageCacheAttribute = 'etag' | 'manifests';
@@ -57,40 +53,56 @@ export class CacheService extends AbstractService {
return await this.cacheAdapter.getBytes(key);
}
public async savePackageEtagAndManifests(fullname: string, isFullManifests: boolean, etag: string, manifests: Buffer) {
public async savePackageEtagAndManifests(
fullname: string,
isFullManifests: boolean,
etag: string,
manifests: Buffer
) {
await Promise.all([
await this.cacheAdapter.set(this.cacheKey(fullname, isFullManifests, 'etag'), etag),
await this.cacheAdapter.setBytes(this.cacheKey(fullname, isFullManifests, 'manifests'), manifests),
this.cacheAdapter.set(
this.cacheKey(fullname, isFullManifests, 'etag'),
etag
),
this.cacheAdapter.setBytes(
this.cacheKey(fullname, isFullManifests, 'manifests'),
manifests
),
]);
}
public async getTotalData() {
const value = await this.cacheAdapter.get(TOTAL_DATA_KEY);
const totalData: TotalData = value ? JSON.parse(value) : {
packageCount: 0,
packageVersionCount: 0,
lastPackage: '',
lastPackageVersion: '',
download: {
today: 0,
thisweek: 0,
thismonth: 0,
thisyear: 0,
lastday: 0,
lastweek: 0,
lastmonth: 0,
lastyear: 0,
},
changesStream: {},
upstreamRegistries: [],
lastChangeId: 0,
cacheTime: '',
};
const totalData: TotalData = value
? JSON.parse(value)
: {
packageCount: 0,
packageVersionCount: 0,
lastPackage: '',
lastPackageVersion: '',
download: {
today: 0,
thisweek: 0,
thismonth: 0,
thisyear: 0,
lastday: 0,
lastweek: 0,
lastmonth: 0,
lastyear: 0,
},
changesStream: {},
upstreamRegistries: [],
lastChangeId: 0,
cacheTime: '',
};
return totalData;
}
public async saveTotalData(totalData: TotalData) {
return await this.cacheAdapter.set(TOTAL_DATA_KEY, JSON.stringify(totalData));
return await this.cacheAdapter.set(
TOTAL_DATA_KEY,
JSON.stringify(totalData)
);
}
public async removeCache(fullname: string) {
@@ -102,7 +114,11 @@ export class CacheService extends AbstractService {
]);
}
private cacheKey(fullname: string, isFullManifests: boolean, attribute: PackageCacheAttribute) {
private cacheKey(
fullname: string,
isFullManifests: boolean,
attribute: PackageCacheAttribute
) {
return `${fullname}|${isFullManifests ? 'full' : 'abbr'}:${attribute}`;
}
}

View File

@@ -1,20 +1,18 @@
import os from 'node:os';
import { setTimeout } from 'node:timers/promises';
import {
AccessLevel,
SingletonProto,
EggObjectFactory,
Inject,
} from '@eggjs/tegg';
import type { EggObjectFactory } from '@eggjs/tegg';
import { AccessLevel, SingletonProto, Inject } from '@eggjs/tegg';
import { E500 } from 'egg-errors';
import { PackageSyncerService, RegistryNotMatchError } from './PackageSyncerService.js';
import { TaskService } from './TaskService.js';
import { RegistryManagerService } from './RegistryManagerService.js';
import { ScopeManagerService } from './ScopeManagerService.js';
import { PackageRepository } from '../../repository/PackageRepository.js';
import { TaskRepository } from '../../repository/TaskRepository.js';
import { HOST_NAME, ChangesStreamTask, Task } from '../entity/Task.js';
import { Registry } from '../entity/Registry.js';
import type { PackageSyncerService } from './PackageSyncerService.js';
import { RegistryNotMatchError } from './PackageSyncerService.js';
import type { TaskService } from './TaskService.js';
import type { RegistryManagerService } from './RegistryManagerService.js';
import type { ScopeManagerService } from './ScopeManagerService.js';
import type { PackageRepository } from '../../repository/PackageRepository.js';
import type { TaskRepository } from '../../repository/TaskRepository.js';
import type { ChangesStreamTask } from '../entity/Task.js';
import { HOST_NAME, Task } from '../entity/Task.js';
import type { Registry } from '../entity/Registry.js';
import { AbstractChangeStream } from '../../common/adapter/changesStream/AbstractChangesStream.js';
import { getScopeAndName } from '../../common/PackageUtil.js';
import { isTimeoutError } from '../../common/ErrorUtil.js';
@@ -33,9 +31,9 @@ export class ChangesStreamService extends AbstractService {
@Inject()
private readonly taskService: TaskService;
@Inject()
private readonly registryManagerService : RegistryManagerService;
private readonly registryManagerService: RegistryManagerService;
@Inject()
private readonly scopeManagerService : ScopeManagerService;
private readonly scopeManagerService: ScopeManagerService;
@Inject()
private readonly eggObjectFactory: EggObjectFactory;
@Inject()
@@ -46,14 +44,22 @@ export class ChangesStreamService extends AbstractService {
// `{registryName}_WORKER`: 自定义 scope 的同步源
public async findExecuteTask(): Promise<ChangesStreamTask | null> {
const targetName = GLOBAL_WORKER;
const globalRegistryTask = await this.taskRepository.findTaskByTargetName(targetName, TaskType.ChangesStream);
const globalRegistryTask = await this.taskRepository.findTaskByTargetName(
targetName,
TaskType.ChangesStream
);
// 如果没有配置默认同步源,先进行初始化
if (!globalRegistryTask) {
await this.taskService.createTask(Task.createChangesStream(targetName), false);
await this.taskService.createTask(
Task.createChangesStream(targetName),
false
);
}
// 自定义 scope 由 admin 手动创建
// 根据 TaskType.ChangesStream 从队列中获取
return await this.taskService.findExecuteTask(TaskType.ChangesStream) as ChangesStreamTask;
return (await this.taskService.findExecuteTask(
TaskType.ChangesStream
)) as ChangesStreamTask;
}
public async suspendSync(exit = false) {
@@ -65,10 +71,16 @@ export class ChangesStreamService extends AbstractService {
}
const authorIp = os.hostname();
// 暂停当前机器所有的 changesStream 任务
const tasks = await this.taskRepository.findTaskByAuthorIpAndType(authorIp, TaskType.ChangesStream);
const tasks = await this.taskRepository.findTaskByAuthorIpAndType(
authorIp,
TaskType.ChangesStream
);
for (const task of tasks) {
if (task.state === TaskState.Processing) {
this.logger.info('[ChangesStreamService.suspendSync:suspend] taskId: %s', task.taskId);
this.logger.info(
'[ChangesStreamService.suspendSync:suspend] taskId: %s',
task.taskId
);
// 1. 更新任务状态为 waiting
// 2. 重新推入任务队列供其他机器执行
await this.taskService.retryTask(task);
@@ -93,8 +105,14 @@ export class ChangesStreamService extends AbstractService {
// allow disable changesStream dynamic
while (since && this.config.cnpmcore.enableChangesStream) {
const { lastSince, taskCount } = await this.executeSync(since, task);
this.logger.info('[ChangesStreamService.executeTask:changes] since: %s => %s, %d new tasks, taskId: %s, updatedAt: %j',
since, lastSince, taskCount, task.taskId, task.updatedAt);
this.logger.info(
'[ChangesStreamService.executeTask:changes] since: %s => %s, %d new tasks, taskId: %s, updatedAt: %j',
since,
lastSince,
taskCount,
task.taskId,
task.updatedAt
);
since = lastSince;
if (taskCount === 0 && this.config.env === 'unittest') {
break;
@@ -102,7 +120,10 @@ export class ChangesStreamService extends AbstractService {
await setTimeout(this.config.cnpmcore.checkChangesStreamInterval);
}
} catch (err) {
this.logger.warn('[ChangesStreamService.executeTask:error] %s, exit now', err.message);
this.logger.warn(
'[ChangesStreamService.executeTask:error] %s, exit now',
err.message
);
if (isTimeoutError(err)) {
this.logger.warn(err);
} else {
@@ -119,9 +140,13 @@ export class ChangesStreamService extends AbstractService {
const { registryId } = task.data || {};
// 如果已有 registryId, 查询 DB 直接获取
if (registryId) {
const registry = await this.registryManagerService.findByRegistryId(registryId);
const registry =
await this.registryManagerService.findByRegistryId(registryId);
if (!registry) {
this.logger.error('[ChangesStreamService.getRegistry:error] registryId %s not found', registryId);
this.logger.error(
'[ChangesStreamService.getRegistry:error] registryId %s not found',
registryId
);
throw new E500(`invalid change stream registry: ${registryId}`);
}
return registry;
@@ -129,7 +154,7 @@ export class ChangesStreamService extends AbstractService {
const registry = await this.registryManagerService.ensureDefaultRegistry();
task.data = {
...(task.data || {}),
...task.data,
registryId: registry.registryId,
};
await this.taskRepository.saveTask(task);
@@ -141,9 +166,15 @@ export class ChangesStreamService extends AbstractService {
// 1. 如果该包已经指定了 registryId 则以 registryId 为准
// 1. 该包的 scope 在当前 registry 下
// 2. 如果 registry 下没有配置 scope (认为是通用 registry 地址) ,且该包的 scope 不在其他 registry 下
public async needSync(registry: Registry, fullname: string): Promise<boolean> {
const [ scopeName, name ] = getScopeAndName(fullname);
const packageEntity = await this.packageRepository.findPackage(scopeName, name);
public async needSync(
registry: Registry,
fullname: string
): Promise<boolean> {
const [scopeName, name] = getScopeAndName(fullname);
const packageEntity = await this.packageRepository.findPackage(
scopeName,
name
);
// 如果包不存在,且处在 exist 模式下,则不同步
if (this.config.cnpmcore.syncMode === 'exist' && !packageEntity) {
@@ -155,18 +186,24 @@ export class ChangesStreamService extends AbstractService {
}
const scope = await this.scopeManagerService.findByName(scopeName);
const inCurrentRegistry = scope && scope?.registryId === registry.registryId;
const inCurrentRegistry =
scope && scope?.registryId === registry.registryId;
if (inCurrentRegistry) {
return true;
}
const registryScopeCount = await this.scopeManagerService.countByRegistryId(registry.registryId);
const registryScopeCount = await this.scopeManagerService.countByRegistryId(
registry.registryId
);
// 当前包没有 scope 信息,且当前 registry 下没有 scope是通用 registry需要同步
return !scope && !registryScopeCount;
}
public async getInitialSince(task: ChangesStreamTask): Promise<string> {
const registry = await this.prepareRegistry(task);
const changesStreamAdapter = await this.eggObjectFactory.getEggObject(AbstractChangeStream, registry.type) as AbstractChangeStream;
const changesStreamAdapter = (await this.eggObjectFactory.getEggObject(
AbstractChangeStream,
registry.type
)) as AbstractChangeStream;
const since = await changesStreamAdapter.getInitialSince(registry);
return since;
}
@@ -175,7 +212,10 @@ export class ChangesStreamService extends AbstractService {
// 更新任务的 since 和 taskCount 相关字段
public async executeSync(since: string, task: ChangesStreamTask) {
const registry = await this.prepareRegistry(task);
const changesStreamAdapter = await this.eggObjectFactory.getEggObject(AbstractChangeStream, registry.type) as AbstractChangeStream;
const changesStreamAdapter = (await this.eggObjectFactory.getEggObject(
AbstractChangeStream,
registry.type
)) as AbstractChangeStream;
let taskCount = 0;
let lastSince = since;
@@ -201,17 +241,29 @@ export class ChangesStreamService extends AbstractService {
skipDependencies: true,
tips,
});
this.logger.info('[ChangesStreamService.createTask:success] fullname: %s, task: %s, tips: %s',
fullname, task.id, tips);
this.logger.info(
'[ChangesStreamService.createTask:success] fullname: %s, task: %s, tips: %s',
fullname,
task.id,
tips
);
} catch (err) {
if (err instanceof RegistryNotMatchError) {
this.logger.warn('[ChangesStreamService.executeSync:skip] fullname: %s, error: %s, tips: %s',
fullname, err, tips);
this.logger.warn(
'[ChangesStreamService.executeSync:skip] fullname: %s, error: %s, tips: %s',
fullname,
err,
tips
);
continue;
}
// only log error, make sure changes still reading
this.logger.error('[ChangesStreamService.executeSync:error] fullname: %s, error: %s, tips: %s',
fullname, err, tips);
this.logger.error(
'[ChangesStreamService.executeSync:error] fullname: %s, error: %s, tips: %s',
fullname,
err,
tips
);
this.logger.error(err);
continue;
}

View File

@@ -3,12 +3,13 @@ import pMap from 'p-map';
import { AbstractService } from '../../common/AbstractService.js';
import { HookType } from '../../common/enum/Hook.js';
import { TaskState } from '../../common/enum/Task.js';
import { HookEvent } from '../entity/HookEvent.js';
import { CreateHookTask, Task } from '../entity/Task.js';
import { HookRepository } from '../../repository/HookRepository.js';
import { PackageRepository } from '../../repository/PackageRepository.js';
import { Hook } from '../entity/Hook.js';
import { TaskService } from './TaskService.js';
import type { HookEvent } from '../entity/HookEvent.js';
import type { CreateHookTask } from '../entity/Task.js';
import { Task } from '../entity/Task.js';
import type { HookRepository } from '../../repository/HookRepository.js';
import type { PackageRepository } from '../../repository/PackageRepository.js';
import type { Hook } from '../entity/Hook.js';
import type { TaskService } from './TaskService.js';
import { isoNow } from '../../common/LogUtil.js';
import { getScopeAndName } from '../../common/PackageUtil.js';
@@ -27,10 +28,14 @@ export class CreateHookTriggerService extends AbstractService {
async executeTask(task: CreateHookTask): Promise<void> {
const { hookEvent } = task.data;
const [ scope, name ] = getScopeAndName(hookEvent.fullname);
const [scope, name] = getScopeAndName(hookEvent.fullname);
const pkg = await this.packageRepository.findPackage(scope, name);
if (!pkg) {
await this.taskService.finishTask(task, TaskState.Success, `[${isoNow()}][Hooks] package ${hookEvent.fullname} not exits`);
await this.taskService.finishTask(
task,
TaskState.Success,
`[${isoNow()}][Hooks] package ${hookEvent.fullname} not exits`
);
return;
}
@@ -38,41 +43,97 @@ export class CreateHookTriggerService extends AbstractService {
`[${isoNow()}][Hooks] Start Create Trigger for ${pkg.fullname} ${task.data.hookEvent.changeId}`,
`[${isoNow()}][Hooks] change content ${JSON.stringify(task.data.hookEvent.change)}`,
];
await this.taskService.finishTask(task, TaskState.Processing, startLog.join('\n'));
await this.taskService.finishTask(
task,
TaskState.Processing,
startLog.join('\n')
);
try {
await this.taskService.appendTaskLog(task, `[${isoNow()}][Hooks] PushHooks to ${HookType.Package} ${pkg.fullname}\n`);
await this.createTriggerByMethod(task, HookType.Package, pkg.fullname, hookEvent);
await this.taskService.appendTaskLog(task, `[${isoNow()}][Hooks] PushHooks to ${HookType.Scope} ${pkg.scope}\n`);
await this.createTriggerByMethod(task, HookType.Scope, pkg.scope, hookEvent);
await this.taskService.appendTaskLog(
task,
`[${isoNow()}][Hooks] PushHooks to ${HookType.Package} ${pkg.fullname}\n`
);
await this.createTriggerByMethod(
task,
HookType.Package,
pkg.fullname,
hookEvent
);
await this.taskService.appendTaskLog(
task,
`[${isoNow()}][Hooks] PushHooks to ${HookType.Scope} ${pkg.scope}\n`
);
await this.createTriggerByMethod(
task,
HookType.Scope,
pkg.scope,
hookEvent
);
const maintainers = await this.packageRepository.listPackageMaintainers(pkg.packageId);
const maintainers = await this.packageRepository.listPackageMaintainers(
pkg.packageId
);
for (const maintainer of maintainers) {
await this.taskService.appendTaskLog(task, `[${isoNow()}][Hooks] PushHooks to ${HookType.Owner} ${maintainer.name}\n`);
await this.createTriggerByMethod(task, HookType.Owner, maintainer.name, hookEvent);
await this.taskService.appendTaskLog(
task,
`[${isoNow()}][Hooks] PushHooks to ${HookType.Owner} ${maintainer.name}\n`
);
await this.createTriggerByMethod(
task,
HookType.Owner,
maintainer.name,
hookEvent
);
}
await this.taskService.finishTask(task, TaskState.Success, `[${isoNow()}][Hooks] create trigger succeed \n`);
await this.taskService.finishTask(
task,
TaskState.Success,
`[${isoNow()}][Hooks] create trigger succeed \n`
);
} catch (e) {
e.message = 'create trigger failed: ' + e.message;
await this.taskService.finishTask(task, TaskState.Fail, `[${isoNow()}][Hooks] ${e.stack} \n`);
await this.taskService.finishTask(
task,
TaskState.Fail,
`[${isoNow()}][Hooks] ${e.stack} \n`
);
return;
}
}
private async createTriggerByMethod(task: Task, type: HookType, name: string, hookEvent: HookEvent) {
private async createTriggerByMethod(
task: Task,
type: HookType,
name: string,
hookEvent: HookEvent
) {
let hooks = await this.hookRepository.listHooksByTypeAndName(type, name);
while (hooks.length) {
await this.createTriggerTasks(hooks, hookEvent);
hooks = await this.hookRepository.listHooksByTypeAndName(type, name, hooks[hooks.length - 1].id);
await this.taskService.appendTaskLog(task,
`[${isoNow()}][Hooks] PushHooks to ${type} ${name} ${hooks.length} \n`);
hooks = await this.hookRepository.listHooksByTypeAndName(
type,
name,
hooks[hooks.length - 1].id
);
await this.taskService.appendTaskLog(
task,
`[${isoNow()}][Hooks] PushHooks to ${type} ${name} ${hooks.length} \n`
);
}
}
private async createTriggerTasks(hooks: Array<Hook>, hookEvent: HookEvent) {
await pMap(hooks, async hook => {
const triggerHookTask = Task.createTriggerHookTask(hookEvent, hook.hookId);
await this.taskService.createTask(triggerHookTask, true);
}, { concurrency: 5 });
await pMap(
hooks,
async hook => {
const triggerHookTask = Task.createTriggerHookTask(
hookEvent,
hook.hookId
);
await this.taskService.createTask(triggerHookTask, true);
},
{ concurrency: 5 }
);
}
}

View File

@@ -1,5 +1,7 @@
import { ContextEventBus, Inject } from '@eggjs/tegg';
import { Advice, IAdvice } from '@eggjs/tegg/aop';
import type { ContextEventBus } from '@eggjs/tegg';
import { Inject } from '@eggjs/tegg';
import type { IAdvice } from '@eggjs/tegg/aop';
import { Advice } from '@eggjs/tegg/aop';
@Advice()
export class EventCorkAdvice implements IAdvice {

View File

@@ -1,7 +1,7 @@
import { SingletonProto, AccessLevel, Inject } from '@eggjs/tegg';
import { EggLogger } from 'egg';
import type { EggLogger } from 'egg';
import pMap from 'p-map';
import { PackageVersionRepository } from '../../repository/PackageVersionRepository.js';
import type { PackageVersionRepository } from '../../repository/PackageVersionRepository.js';
import { PaddingSemVer } from '../entity/PaddingSemVer.js';
@SingletonProto({
@@ -17,17 +17,30 @@ export class FixNoPaddingVersionService {
async fixPaddingVersion(id?: number): Promise<void> {
// eslint-disable-next-line no-constant-condition
while (true) {
const packageVersions = await this.packageVersionRepository.findHaveNotPaddingVersion(id);
const packageVersions =
await this.packageVersionRepository.findHaveNotPaddingVersion(id);
if (packageVersions.length === 0) {
break;
}
id = packageVersions[packageVersions.length - 1].id as unknown as number + 1;
this.logger.info('[FixNoPaddingVersionService] fix padding version ids %j', packageVersions.map(t => t.id));
id =
(packageVersions[packageVersions.length - 1].id as unknown as number) +
1;
this.logger.info(
'[FixNoPaddingVersionService] fix padding version ids %j',
packageVersions.map(t => t.id)
);
await pMap(packageVersions, async packageVersion => {
const paddingSemver = new PaddingSemVer(packageVersion.version);
await this.packageVersionRepository.fixPaddingVersion(packageVersion.packageVersionId, paddingSemver);
}, { concurrency: 30 });
await pMap(
packageVersions,
async packageVersion => {
const paddingSemver = new PaddingSemVer(packageVersion.version);
await this.packageVersionRepository.fixPaddingVersion(
packageVersion.packageVersionId,
paddingSemver
);
},
{ concurrency: 30 }
);
}
}
}

View File

@@ -1,12 +1,9 @@
import { AccessLevel, SingletonProto, Inject } from '@eggjs/tegg';
import {
ForbiddenError,
NotFoundError,
} from 'egg-errors';
import { EggAppConfig } from 'egg';
import { HookRepository } from '../../repository/HookRepository.js';
import { ForbiddenError, NotFoundError } from 'egg-errors';
import type { EggAppConfig } from 'egg';
import type { HookRepository } from '../../repository/HookRepository.js';
import { Hook } from '../entity/Hook.js';
import { HookType } from '../../common/enum/Hook.js';
import type { HookType } from '../../common/enum/Hook.js';
export interface CreateHookCommand {
type: HookType;
@@ -59,7 +56,9 @@ export class HookManageService {
throw new NotFoundError(`hook ${cmd.hookId} not found`);
}
if (hook.ownerId !== cmd.operatorId) {
throw new ForbiddenError(`hook ${cmd.hookId} not belong to ${cmd.operatorId}`);
throw new ForbiddenError(
`hook ${cmd.hookId} not belong to ${cmd.operatorId}`
);
}
hook.endpoint = cmd.endpoint;
hook.secret = cmd.secret;
@@ -73,7 +72,9 @@ export class HookManageService {
throw new NotFoundError(`hook ${cmd.hookId} not found`);
}
if (hook.ownerId !== cmd.operatorId) {
throw new ForbiddenError(`hook ${cmd.hookId} not belong to ${cmd.operatorId}`);
throw new ForbiddenError(
`hook ${cmd.hookId} not belong to ${cmd.operatorId}`
);
}
await this.hookRepository.removeHook(cmd.hookId);
return hook;

View File

@@ -1,15 +1,15 @@
import { AccessLevel, SingletonProto, Inject } from '@eggjs/tegg';
import { EggContextHttpClient } from 'egg';
import { TriggerHookTask } from '../entity/Task.js';
import { HookEvent } from '../entity/HookEvent.js';
import { HookRepository } from '../../repository/HookRepository.js';
import { PackageRepository } from '../../repository/PackageRepository.js';
import { DistRepository } from '../../repository/DistRepository.js';
import { UserRepository } from '../../repository/UserRepository.js';
import { Hook } from '../entity/Hook.js';
import type { EggContextHttpClient } from 'egg';
import type { TriggerHookTask } from '../entity/Task.js';
import type { HookEvent } from '../entity/HookEvent.js';
import type { HookRepository } from '../../repository/HookRepository.js';
import type { PackageRepository } from '../../repository/PackageRepository.js';
import type { DistRepository } from '../../repository/DistRepository.js';
import type { UserRepository } from '../../repository/UserRepository.js';
import type { Hook } from '../entity/Hook.js';
import { isoNow } from '../../common/LogUtil.js';
import { TaskState } from '../../common/enum/Task.js';
import { TaskService } from './TaskService.js';
import type { TaskService } from './TaskService.js';
import { getScopeAndName } from '../../common/PackageUtil.js';
@SingletonProto({
@@ -38,24 +38,40 @@ export class HookTriggerService {
const { hookId, hookEvent } = task.data;
const hook = await this.hookRepository.findHookById(hookId);
if (!hook) {
await this.taskService.finishTask(task, TaskState.Success, `[${isoNow()}][TriggerHooks] hook ${hookId} not exits`);
await this.taskService.finishTask(
task,
TaskState.Success,
`[${isoNow()}][TriggerHooks] hook ${hookId} not exits`
);
return;
}
try {
const payload = await this.createTriggerPayload(task, hookEvent, hook);
if (!payload) {
await this.taskService.finishTask(task, TaskState.Success, `[${isoNow()}][TriggerHooks] generate payload failed \n`);
await this.taskService.finishTask(
task,
TaskState.Success,
`[${isoNow()}][TriggerHooks] generate payload failed \n`
);
return;
}
const status = await this.doExecuteTrigger(hook, payload);
hook.latestTaskId = task.taskId;
task.data.responseStatus = status;
await this.hookRepository.saveHook(hook);
await this.taskService.finishTask(task, TaskState.Success, `[${isoNow()}][TriggerHooks] trigger hook succeed ${status} \n`);
await this.taskService.finishTask(
task,
TaskState.Success,
`[${isoNow()}][TriggerHooks] trigger hook succeed ${status} \n`
);
} catch (e) {
e.message = 'trigger hook failed: ' + e.message;
task.error = e.message;
await this.taskService.finishTask(task, TaskState.Fail, `[${isoNow()}][TriggerHooks] ${e.stack} \n`);
await this.taskService.finishTask(
task,
TaskState.Fail,
`[${isoNow()}][TriggerHooks] ${e.stack} \n`
);
return;
}
}
@@ -82,19 +98,33 @@ export class HookTriggerService {
throw new Error(`hook response with ${res.status}`);
}
async createTriggerPayload(task: TriggerHookTask, hookEvent: HookEvent, hook: Hook): Promise<object | undefined> {
const [ scope, name ] = getScopeAndName(hookEvent.fullname);
async createTriggerPayload(
task: TriggerHookTask,
hookEvent: HookEvent,
hook: Hook
): Promise<object | undefined> {
const [scope, name] = getScopeAndName(hookEvent.fullname);
const pkg = await this.packageRepository.findPackage(scope, name);
if (!pkg) {
await this.taskService.finishTask(task, TaskState.Success, `[${isoNow()}][TriggerHooks] can not found pkg for ${hookEvent.fullname} \n`);
await this.taskService.finishTask(
task,
TaskState.Success,
`[${isoNow()}][TriggerHooks] can not found pkg for ${hookEvent.fullname} \n`
);
return;
}
const user = await this.userRepository.findUserByUserId(hook.ownerId);
if (!user) {
await this.taskService.finishTask(task, TaskState.Success, `[${isoNow()}][TriggerHooks] can not found user for ${hook.ownerId} \n`);
await this.taskService.finishTask(
task,
TaskState.Success,
`[${isoNow()}][TriggerHooks] can not found user for ${hook.ownerId} \n`
);
return;
}
const manifest = await this.distRepository.readDistBytesToJSON(pkg!.manifestsDist!);
const manifest = await this.distRepository.readDistBytesToJSON(
pkg!.manifestsDist!
);
return {
event: hookEvent.event,
name: pkg.fullname,

File diff suppressed because it is too large Load Diff

View File

@@ -1,14 +1,18 @@
import { AccessLevel, Inject, SingletonProto } from '@eggjs/tegg';
import { estypes, errors } from '@elastic/elasticsearch';
import type { estypes } from '@elastic/elasticsearch';
import { errors } from '@elastic/elasticsearch';
import dayjs from 'dayjs';
import { AbstractService } from '../../common/AbstractService.js';
import { formatAuthor, getScopeAndName } from '../../common/PackageUtil.js';
import { PackageManagerService } from './PackageManagerService.js';
import { SearchManifestType, SearchMappingType, SearchRepository } from '../../repository/SearchRepository.js';
import { PackageVersionDownloadRepository } from '../../repository/PackageVersionDownloadRepository.js';
import { PackageRepository } from '../../repository/PackageRepository.js';
import { PackageVersionBlockRepository } from '../../repository/PackageVersionBlockRepository.js';
import type { PackageManagerService } from './PackageManagerService.js';
import type {
SearchManifestType,
SearchMappingType,
SearchRepository,
} from '../../repository/SearchRepository.js';
import type { PackageVersionDownloadRepository } from '../../repository/PackageVersionDownloadRepository.js';
import type { PackageRepository } from '../../repository/PackageRepository.js';
import type { PackageVersionBlockRepository } from '../../repository/PackageVersionBlockRepository.js';
@SingletonProto({
accessLevel: AccessLevel.PUBLIC,
@@ -26,23 +30,39 @@ export class PackageSearchService extends AbstractService {
protected packageVersionBlockRepository: PackageVersionBlockRepository;
async syncPackage(fullname: string, isSync = true) {
const [ scope, name ] = getScopeAndName(fullname);
const fullManifests = await this.packageManagerService.listPackageFullManifests(scope, name, isSync);
const [scope, name] = getScopeAndName(fullname);
const fullManifests =
await this.packageManagerService.listPackageFullManifests(
scope,
name,
isSync
);
if (!fullManifests.data) {
this.logger.warn('[PackageSearchService.syncPackage] save package:%s not found', fullname);
this.logger.warn(
'[PackageSearchService.syncPackage] save package:%s not found',
fullname
);
return;
}
const pkg = await this.packageRepository.findPackage(scope, name);
if (!pkg) {
this.logger.warn('[PackageSearchService.syncPackage] findPackage:%s not found', fullname);
this.logger.warn(
'[PackageSearchService.syncPackage] findPackage:%s not found',
fullname
);
return;
}
const block = await this.packageVersionBlockRepository.findPackageBlock(pkg.packageId);
const block = await this.packageVersionBlockRepository.findPackageBlock(
pkg.packageId
);
if (block) {
this.logger.warn('[PackageSearchService.syncPackage] package:%s is blocked, try to remove es', fullname);
this.logger.warn(
'[PackageSearchService.syncPackage] package:%s is blocked, try to remove es',
fullname
);
await this.removePackage(fullname);
return;
}
@@ -51,7 +71,11 @@ export class PackageSearchService extends AbstractService {
const startDate = dayjs().subtract(1, 'year');
const endDate = dayjs();
const entities = await this.packageVersionDownloadRepository.query(pkg.packageId, startDate.toDate(), endDate.toDate());
const entities = await this.packageVersionDownloadRepository.query(
pkg.packageId,
startDate.toDate(),
endDate.toDate()
);
let downloadsAll = 0;
for (const entity of entities) {
for (let i = 1; i <= 31; i++) {
@@ -76,7 +100,10 @@ export class PackageSearchService extends AbstractService {
keywords: manifest.keywords || [],
versions: Object.keys(manifest.versions),
description: manifest.description,
license: typeof manifest.license === 'object' ? manifest.license?.type : manifest.license,
license:
typeof manifest.license === 'object'
? manifest.license?.type
: manifest.license,
maintainers: manifest.maintainers,
author: formatAuthor(manifest.author),
'dist-tags': manifest['dist-tags'],
@@ -112,7 +139,11 @@ export class PackageSearchService extends AbstractService {
return await this.searchRepository.upsertPackage(document);
}
async searchPackage(text: string, from: number, size: number): Promise<{ objects: (SearchManifestType | undefined)[], total: number }> {
async searchPackage(
text: string,
from: number,
size: number
): Promise<{ objects: (SearchManifestType | undefined)[]; total: number }> {
const matchQueries = this._buildMatchQueries(text);
const scriptScore = this._buildScriptScore({
text,
@@ -143,7 +174,10 @@ export class PackageSearchService extends AbstractService {
// 从 https://github.com/npm/cli/pull/7407 (npm cli v10.6.0) 开始npm cli 使用 publisher 字段(以前使用 maintainers 字段)
// 从现有数据来看_npmUser 字段和 publisher 字段是等价的
// 为了兼容老版本,不删除 _npmUser 字段
if (!item._source?.package.publisher && item._source?.package._npmUser) {
if (
!item._source?.package.publisher &&
item._source?.package._npmUser
) {
item._source.package.publisher = {
username: item._source.package._npmUser.name,
email: item._source.package._npmUser.email,
@@ -162,7 +196,10 @@ export class PackageSearchService extends AbstractService {
} catch (error) {
// if the package does not exist, returns success
if (error instanceof errors.ResponseError && error?.statusCode === 404) {
this.logger.warn('[PackageSearchService.removePackage] remove package:%s not found', fullname);
this.logger.warn(
'[PackageSearchService.removePackage] remove package:%s not found',
fullname
);
return fullname;
}
throw error;
@@ -241,7 +278,10 @@ export class PackageSearchService extends AbstractService {
];
}
private _buildScriptScore(params: { text: string | undefined, scoreEffect: number }) {
private _buildScriptScore(params: {
text: string | undefined;
scoreEffect: number;
}) {
// keep search simple, only download(popularity)
const downloads = 'doc["downloads.all"].value';
const source = `doc["package.name.raw"].value.equals(params.text) ? 100000 + ${downloads} : _score * Math.pow(${downloads}, params.scoreEffect)`;

File diff suppressed because it is too large Load Diff

View File

@@ -3,31 +3,22 @@ import { join, dirname, basename } from 'node:path';
import { randomUUID } from 'node:crypto';
// @ts-expect-error type error
import tar from '@fengmk2/tar';
import {
AccessLevel,
SingletonProto,
Inject,
} from '@eggjs/tegg';
import { AccessLevel, SingletonProto, Inject } from '@eggjs/tegg';
import { ConflictError, ForbiddenError } from 'egg-errors';
import semver from 'semver';
import { AbstractService } from '../../common/AbstractService.js';
import {
calculateIntegrity,
getFullname,
} from '../../common/PackageUtil.js';
import { calculateIntegrity, getFullname } from '../../common/PackageUtil.js';
import { createTempDir, mimeLookup } from '../../common/FileUtil.js';
import {
PackageRepository,
} from '../../repository/PackageRepository.js';
import { PackageVersionFileRepository } from '../../repository/PackageVersionFileRepository.js';
import { PackageVersionRepository } from '../../repository/PackageVersionRepository.js';
import { DistRepository } from '../../repository/DistRepository.js';
import type { PackageRepository } from '../../repository/PackageRepository.js';
import type { PackageVersionFileRepository } from '../../repository/PackageVersionFileRepository.js';
import type { PackageVersionRepository } from '../../repository/PackageVersionRepository.js';
import type { DistRepository } from '../../repository/DistRepository.js';
import { isDuplicateKeyError } from '../../repository/util/ErrorUtil.js';
import { PackageVersionFile } from '../entity/PackageVersionFile.js';
import { PackageVersion } from '../entity/PackageVersion.js';
import { Package } from '../entity/Package.js';
import { PackageManagerService } from './PackageManagerService.js';
import { CacheAdapter } from '../../common/adapter/CacheAdapter.js';
import type { PackageVersion } from '../entity/PackageVersion.js';
import type { Package } from '../entity/Package.js';
import type { PackageManagerService } from './PackageManagerService.js';
import type { CacheAdapter } from '../../common/adapter/CacheAdapter.js';
const unpkgWhiteListUrl = 'https://github.com/cnpm/unpkg-white-list';
const CHECK_TIMEOUT = process.env.NODE_ENV === 'test' ? 1 : 60000;
@@ -51,34 +42,55 @@ export class PackageVersionFileService extends AbstractService {
#unpkgWhiteListCheckTime: number = 0;
#unpkgWhiteListCurrentVersion: string = '';
#unpkgWhiteListAllowPackages: Record<string, {
version: string;
}> = {};
#unpkgWhiteListAllowPackages: Record<
string,
{
version: string;
}
> = {};
#unpkgWhiteListAllowScopes: string[] = [];
async listPackageVersionFiles(pkgVersion: PackageVersion, directory: string) {
await this.#ensurePackageVersionFilesSync(pkgVersion);
return await this.packageVersionFileRepository.listPackageVersionFiles(pkgVersion.packageVersionId, directory);
return await this.packageVersionFileRepository.listPackageVersionFiles(
pkgVersion.packageVersionId,
directory
);
}
async showPackageVersionFile(pkgVersion: PackageVersion, path: string) {
await this.#ensurePackageVersionFilesSync(pkgVersion);
const { directory, name } = this.#getDirectoryAndName(path);
return await this.packageVersionFileRepository.findPackageVersionFile(
pkgVersion.packageVersionId, directory, name);
pkgVersion.packageVersionId,
directory,
name
);
}
async #ensurePackageVersionFilesSync(pkgVersion: PackageVersion) {
const hasFiles = await this.packageVersionFileRepository.hasPackageVersionFiles(pkgVersion.packageVersionId);
const hasFiles =
await this.packageVersionFileRepository.hasPackageVersionFiles(
pkgVersion.packageVersionId
);
if (!hasFiles) {
const lockName = `${pkgVersion.packageVersionId}:syncFiles`;
const lockRes = await this.cacheAdapter.usingLock(lockName, 60, async () => {
await this.syncPackageVersionFiles(pkgVersion);
});
const lockRes = await this.cacheAdapter.usingLock(
lockName,
60,
async () => {
await this.syncPackageVersionFiles(pkgVersion);
}
);
// lock fail
if (!lockRes) {
this.logger.warn('[package:version:syncPackageVersionFiles] check lock:%s fail', lockName);
throw new ConflictError('Package version file sync is currently in progress. Please try again later.');
this.logger.warn(
'[package:version:syncPackageVersionFiles] check lock:%s fail',
lockName
);
throw new ConflictError(
'Package version file sync is currently in progress. Please try again later.'
);
}
}
}
@@ -92,27 +104,42 @@ export class PackageVersionFileService extends AbstractService {
this.#unpkgWhiteListCheckTime = Date.now();
const whiteListScope = '';
const whiteListPackageName = 'unpkg-white-list';
const whiteListPackageVersion = await this.packageVersionRepository.findVersionByTag(
whiteListScope, whiteListPackageName, 'latest');
const whiteListPackageVersion =
await this.packageVersionRepository.findVersionByTag(
whiteListScope,
whiteListPackageName,
'latest'
);
if (!whiteListPackageVersion) return;
// same version, skip update for performance
if (this.#unpkgWhiteListCurrentVersion === whiteListPackageVersion) return;
// update the new version white list
const { manifest } = await this.packageManagerService.showPackageVersionManifest(
whiteListScope, whiteListPackageName, whiteListPackageVersion, false, true);
const { manifest } =
await this.packageManagerService.showPackageVersionManifest(
whiteListScope,
whiteListPackageName,
whiteListPackageVersion,
false,
true
);
if (!manifest) return;
this.#unpkgWhiteListCurrentVersion = manifest.version;
this.#unpkgWhiteListAllowPackages = manifest.allowPackages ?? {} as any;
this.#unpkgWhiteListAllowScopes = manifest.allowScopes ?? [] as any;
this.logger.info('[PackageVersionFileService.updateUnpkgWhiteList] version:%s, total %s packages, %s scopes',
this.#unpkgWhiteListAllowPackages = manifest.allowPackages ?? ({} as any);
this.#unpkgWhiteListAllowScopes = manifest.allowScopes ?? ([] as any);
this.logger.info(
'[PackageVersionFileService.updateUnpkgWhiteList] version:%s, total %s packages, %s scopes',
whiteListPackageVersion,
Object.keys(this.#unpkgWhiteListAllowPackages).length,
this.#unpkgWhiteListAllowScopes.length,
this.#unpkgWhiteListAllowScopes.length
);
}
async checkPackageVersionInUnpkgWhiteList(pkgScope: string, pkgName: string, pkgVersion: string) {
async checkPackageVersionInUnpkgWhiteList(
pkgScope: string,
pkgName: string,
pkgVersion: string
) {
if (!this.config.cnpmcore.enableSyncUnpkgFilesWhiteList) return;
await this.#updateUnpkgWhiteList();
@@ -123,14 +150,22 @@ export class PackageVersionFileService extends AbstractService {
const fullname = getFullname(pkgScope, pkgName);
const pkgConfig = this.#unpkgWhiteListAllowPackages[fullname];
if (!pkgConfig?.version) {
throw new ForbiddenError(`"${fullname}" is not allow to unpkg files, see ${unpkgWhiteListUrl}`);
throw new ForbiddenError(
`"${fullname}" is not allow to unpkg files, see ${unpkgWhiteListUrl}`
);
}
// satisfies 默认不会包含 prerelease 版本
// https://docs.npmjs.com/about-semantic-versioning#using-semantic-versioning-to-specify-update-types-your-package-can-accept
// [x, *] 代表任意版本,这里统一通过 semver 来判断
if (!semver.satisfies(pkgVersion, pkgConfig.version, { includePrerelease: true })) {
throw new ForbiddenError(`"${fullname}@${pkgVersion}" not satisfies "${pkgConfig.version}" to unpkg files, see ${unpkgWhiteListUrl}`);
if (
!semver.satisfies(pkgVersion, pkgConfig.version, {
includePrerelease: true,
})
) {
throw new ForbiddenError(
`"${fullname}@${pkgVersion}" not satisfies "${pkgConfig.version}" to unpkg files, see ${unpkgWhiteListUrl}`
);
}
}
@@ -141,10 +176,21 @@ export class PackageVersionFileService extends AbstractService {
const tarFile = `${tmpdir}.tgz`;
const readmeFilenames: string[] = [];
try {
this.logger.info('[PackageVersionFileService.syncPackageReadme:download-start] dist:%s(path:%s, size:%s) => tarFile:%s',
latestPkgVersion.tarDist.distId, latestPkgVersion.tarDist.path, latestPkgVersion.tarDist.size, tarFile);
await this.distRepository.downloadDistToFile(latestPkgVersion.tarDist, tarFile);
this.logger.info('[PackageVersionFileService.syncPackageReadme:extract-start] tmpdir:%s', tmpdir);
this.logger.info(
'[PackageVersionFileService.syncPackageReadme:download-start] dist:%s(path:%s, size:%s) => tarFile:%s',
latestPkgVersion.tarDist.distId,
latestPkgVersion.tarDist.path,
latestPkgVersion.tarDist.size,
tarFile
);
await this.distRepository.downloadDistToFile(
latestPkgVersion.tarDist,
tarFile
);
this.logger.info(
'[PackageVersionFileService.syncPackageReadme:extract-start] tmpdir:%s',
tmpdir
);
await tar.extract({
file: tarFile,
cwd: tmpdir,
@@ -163,8 +209,13 @@ export class PackageVersionFileService extends AbstractService {
await this.packageManagerService.savePackageReadme(pkg, readmeFile);
}
} catch (err) {
this.logger.warn('[PackageVersionFileService.syncPackageReadme:error] packageVersionId: %s, readmeFilenames: %j, tmpdir: %s, error: %s',
latestPkgVersion.packageVersionId, readmeFilenames, tmpdir, err);
this.logger.warn(
'[PackageVersionFileService.syncPackageReadme:error] packageVersionId: %s, readmeFilenames: %j, tmpdir: %s, error: %s',
latestPkgVersion.packageVersionId,
readmeFilenames,
tmpdir,
err
);
// ignore TAR_BAD_ARCHIVE error
if (err.code === 'TAR_BAD_ARCHIVE') return;
throw err;
@@ -173,8 +224,11 @@ export class PackageVersionFileService extends AbstractService {
await fs.rm(tarFile, { force: true });
await fs.rm(tmpdir, { recursive: true, force: true });
} catch (err) {
this.logger.warn('[PackageVersionFileService.syncPackageReadme:warn] remove tmpdir: %s, error: %s',
tmpdir, err);
this.logger.warn(
'[PackageVersionFileService.syncPackageReadme:warn] remove tmpdir: %s, error: %s',
tmpdir,
err
);
}
}
}
@@ -185,11 +239,17 @@ export class PackageVersionFileService extends AbstractService {
if (!this.config.cnpmcore.enableUnpkg) return files;
if (!this.config.cnpmcore.enableSyncUnpkgFiles) return files;
const pkg = await this.packageRepository.findPackageByPackageId(pkgVersion.packageId);
const pkg = await this.packageRepository.findPackageByPackageId(
pkgVersion.packageId
);
if (!pkg) return files;
// check unpkg white list
await this.checkPackageVersionInUnpkgWhiteList(pkg.scope, pkg.name, pkgVersion.version);
await this.checkPackageVersionInUnpkgWhiteList(
pkg.scope,
pkg.name,
pkgVersion.version
);
const dirname = `unpkg_${pkg.fullname.replace('/', '_')}@${pkgVersion.version}_${randomUUID()}`;
const tmpdir = await createTempDir(this.config.dataDir, dirname);
@@ -197,10 +257,18 @@ export class PackageVersionFileService extends AbstractService {
const paths: string[] = [];
const readmeFilenames: string[] = [];
try {
this.logger.info('[PackageVersionFileService.syncPackageVersionFiles:download-start] dist:%s(path:%s, size:%s) => tarFile:%s',
pkgVersion.tarDist.distId, pkgVersion.tarDist.path, pkgVersion.tarDist.size, tarFile);
this.logger.info(
'[PackageVersionFileService.syncPackageVersionFiles:download-start] dist:%s(path:%s, size:%s) => tarFile:%s',
pkgVersion.tarDist.distId,
pkgVersion.tarDist.path,
pkgVersion.tarDist.size,
tarFile
);
await this.distRepository.downloadDistToFile(pkgVersion.tarDist, tarFile);
this.logger.info('[PackageVersionFileService.syncPackageVersionFiles:extract-start] tmpdir:%s', tmpdir);
this.logger.info(
'[PackageVersionFileService.syncPackageVersionFiles:extract-start] tmpdir:%s',
tmpdir
);
await tar.extract({
file: tarFile,
cwd: tmpdir,
@@ -216,20 +284,38 @@ export class PackageVersionFileService extends AbstractService {
});
for (const path of paths) {
const localFile = join(tmpdir, path);
const file = await this.#savePackageVersionFile(pkg, pkgVersion, path, localFile);
const file = await this.#savePackageVersionFile(
pkg,
pkgVersion,
path,
localFile
);
files.push(file);
}
this.logger.info('[PackageVersionFileService.syncPackageVersionFiles:success] packageVersionId: %s, %d paths, %d files, tmpdir: %s',
pkgVersion.packageVersionId, paths.length, files.length, tmpdir);
this.logger.info(
'[PackageVersionFileService.syncPackageVersionFiles:success] packageVersionId: %s, %d paths, %d files, tmpdir: %s',
pkgVersion.packageVersionId,
paths.length,
files.length,
tmpdir
);
if (readmeFilenames.length > 0) {
const readmeFilename = this.#preferMarkdownReadme(readmeFilenames);
const readmeFile = join(tmpdir, readmeFilename);
await this.packageManagerService.savePackageVersionReadme(pkgVersion, readmeFile);
await this.packageManagerService.savePackageVersionReadme(
pkgVersion,
readmeFile
);
}
return files;
} catch (err) {
this.logger.warn('[PackageVersionFileService.syncPackageVersionFiles:error] packageVersionId: %s, %d paths, tmpdir: %s, error: %s',
pkgVersion.packageVersionId, paths.length, tmpdir, err);
this.logger.warn(
'[PackageVersionFileService.syncPackageVersionFiles:error] packageVersionId: %s, %d paths, tmpdir: %s, error: %s',
pkgVersion.packageVersionId,
paths.length,
tmpdir,
err
);
// ignore TAR_BAD_ARCHIVE error
if (err.code === 'TAR_BAD_ARCHIVE') return files;
throw err;
@@ -238,16 +324,27 @@ export class PackageVersionFileService extends AbstractService {
await fs.rm(tarFile, { force: true });
await fs.rm(tmpdir, { recursive: true, force: true });
} catch (err) {
this.logger.warn('[PackageVersionFileService.syncPackageVersionFiles:warn] remove tmpdir: %s, error: %s',
tmpdir, err);
this.logger.warn(
'[PackageVersionFileService.syncPackageVersionFiles:warn] remove tmpdir: %s, error: %s',
tmpdir,
err
);
}
}
}
async #savePackageVersionFile(pkg: Package, pkgVersion: PackageVersion, path: string, localFile: string) {
async #savePackageVersionFile(
pkg: Package,
pkgVersion: PackageVersion,
path: string,
localFile: string
) {
const { directory, name } = this.#getDirectoryAndName(path);
let file = await this.packageVersionFileRepository.findPackageVersionFile(
pkgVersion.packageVersionId, directory, name);
pkgVersion.packageVersionId,
directory,
name
);
if (file) return file;
const stat = await fs.stat(localFile);
const distIntegrity = await calculateIntegrity(localFile);
@@ -270,8 +367,12 @@ export class PackageVersionFileService extends AbstractService {
});
try {
await this.packageVersionFileRepository.createPackageVersionFile(file);
this.logger.info('[PackageVersionFileService.#savePackageVersionFile:success] fileId: %s, size: %s, path: %s',
file.packageVersionFileId, dist.size, file.path);
this.logger.info(
'[PackageVersionFileService.#savePackageVersionFile:success] fileId: %s, size: %s, path: %s',
file.packageVersionFileId,
dist.size,
file.path
);
} catch (err) {
// ignore Duplicate entry
if (isDuplicateKeyError(err)) {
@@ -302,7 +403,7 @@ export class PackageVersionFileService extends AbstractService {
#matchReadmeFilename(filename: string) {
// support README,README.*
// https://github.com/npm/read-package-json/blob/main/lib/read-json.js#L280
return (/^README(\.\w{1,20}|$)/i.test(filename));
return /^README(\.\w{1,20}|$)/i.test(filename);
}
// https://github.com/npm/read-package-json/blob/main/lib/read-json.js#L280

View File

@@ -1,14 +1,15 @@
import { AccessLevel, SingletonProto, Inject } from '@eggjs/tegg';
import semver, { Range } from 'semver';
import { Result, AliasResult } from 'npm-package-arg';
import { PackageVersionRepository } from '../../repository/PackageVersionRepository.js';
import type { Result, AliasResult } from 'npm-package-arg';
import type { PackageVersionRepository } from '../../repository/PackageVersionRepository.js';
import { getScopeAndName } from '../../common/PackageUtil.js';
import { SqlRange } from '../entity/SqlRange.js';
import { BugVersionService } from './BugVersionService.js';
import { type PackageJSONType, PackageRepository } from '../../repository/PackageRepository.js';
import { DistRepository } from '../../repository/DistRepository.js';
import { BugVersionAdvice } from '../entity/BugVersion.js';
import { PackageVersionBlockRepository } from '../../repository/PackageVersionBlockRepository.js';
import type { BugVersionService } from './BugVersionService.js';
import type { PackageRepository } from '../../repository/PackageRepository.js';
import { type PackageJSONType } from '../../repository/PackageRepository.js';
import type { DistRepository } from '../../repository/DistRepository.js';
import type { BugVersionAdvice } from '../entity/BugVersion.js';
import type { PackageVersionBlockRepository } from '../../repository/PackageVersionBlockRepository.js';
@SingletonProto({
accessLevel: AccessLevel.PUBLIC,
@@ -29,16 +30,23 @@ export class PackageVersionService {
@Inject()
private readonly distRepository: DistRepository;
async readManifest(pkgId: string, spec: Result, isFullManifests: boolean, withBugVersion = true): Promise<PackageJSONType | undefined> {
async readManifest(
pkgId: string,
spec: Result,
isFullManifests: boolean,
withBugVersion = true
): Promise<PackageJSONType | undefined> {
const realSpec = this.findRealSpec(spec);
let version = await this.getVersion(realSpec, false);
if (!version) {
return undefined;
}
let bugVersionAdvice: {
advice: BugVersionAdvice,
version: string,
} | undefined;
let bugVersionAdvice:
| {
advice: BugVersionAdvice;
version: string;
}
| undefined;
if (withBugVersion) {
const bugVersion = await this.bugVersionService.getBugVersion();
if (bugVersion) {
@@ -54,9 +62,15 @@ export class PackageVersionService {
}
let manifest;
if (isFullManifests) {
manifest = await this.distRepository.findPackageVersionManifest(pkgId, version);
manifest = await this.distRepository.findPackageVersionManifest(
pkgId,
version
);
} else {
manifest = await this.distRepository.findPackageAbbreviatedManifest(pkgId, version);
manifest = await this.distRepository.findPackageAbbreviatedManifest(
pkgId,
version
);
}
if (manifest && bugVersionAdvice) {
manifest.deprecated = `[WARNING] Use ${bugVersionAdvice.advice.version} instead of ${bugVersionAdvice.version}, reason: ${bugVersionAdvice.advice.reason}`;
@@ -82,12 +96,19 @@ export class PackageVersionService {
return realSpec;
}
async getVersion(spec: Result, withBugVersion = true): Promise<string | undefined | null> {
async getVersion(
spec: Result,
withBugVersion = true
): Promise<string | undefined | null> {
let version: string | undefined | null;
const [ scope, name ] = getScopeAndName(spec.name!);
const [scope, name] = getScopeAndName(spec.name!);
// 优先通过 tag 来进行判断
if (spec.type === 'tag') {
version = await this.packageVersionRepository.findVersionByTag(scope, name, spec.fetchSpec!);
version = await this.packageVersionRepository.findVersionByTag(
scope,
name,
spec.fetchSpec!
);
} else if (spec.type === 'version') {
// 1.0.0
// '=1.0.0' => '1.0.0'
@@ -97,17 +118,31 @@ export class PackageVersionService {
// a@1.1 情况下1.1 会解析为 range如果有对应的 distTag 时会失效
// 这里需要进行兼容
// 仅当 spec 不为 version 时才查询,减少请求次数
const versionMatchTag = await this.packageVersionRepository.findVersionByTag(scope, name, spec.fetchSpec!);
const versionMatchTag =
await this.packageVersionRepository.findVersionByTag(
scope,
name,
spec.fetchSpec!
);
if (versionMatchTag) {
version = versionMatchTag;
} else {
const range = new Range(spec.fetchSpec!);
const paddingSemVer = new SqlRange(range);
if (paddingSemVer.containPreRelease) {
const versions = await this.packageVersionRepository.findSatisfyVersionsWithPrerelease(scope, name, paddingSemVer);
const versions =
await this.packageVersionRepository.findSatisfyVersionsWithPrerelease(
scope,
name,
paddingSemVer
);
version = semver.maxSatisfying(versions, range);
} else {
version = await this.packageVersionRepository.findMaxSatisfyVersion(scope, name, paddingSemVer);
version = await this.packageVersionRepository.findMaxSatisfyVersion(
scope,
name,
paddingSemVer
);
}
}
}
@@ -124,7 +159,7 @@ export class PackageVersionService {
}
async findBlockInfo(fullname: string) {
const [ scope, name ] = getScopeAndName(fullname);
const [scope, name] = getScopeAndName(fullname);
const packageId = await this.packageRepository.findPackageId(scope, name);
if (!packageId) {
return null;

View File

@@ -1,20 +1,32 @@
import { EggHttpClient, HttpClientRequestOptions, HttpClientResponse, Context } from 'egg';
import type {
EggHttpClient,
HttpClientRequestOptions,
HttpClientResponse,
Context,
} from 'egg';
import { ForbiddenError } from 'egg-errors';
import { SingletonProto, AccessLevel, Inject } from '@eggjs/tegg';
import { BackgroundTaskHelper } from '@eggjs/tegg-background-task';
import type { BackgroundTaskHelper } from '@eggjs/tegg-background-task';
import { valid as semverValid } from 'semver';
import { AbstractService } from '../../common/AbstractService.js';
import { TaskService } from './TaskService.js';
import { CacheService } from './CacheService.js';
import { RegistryManagerService } from './RegistryManagerService.js';
import { NPMRegistry } from '../../common/adapter/NPMRegistry.js';
import { NFSAdapter } from '../../common/adapter/NFSAdapter.js';
import type { TaskService } from './TaskService.js';
import type { CacheService } from './CacheService.js';
import type { RegistryManagerService } from './RegistryManagerService.js';
import type { NPMRegistry } from '../../common/adapter/NPMRegistry.js';
import type { NFSAdapter } from '../../common/adapter/NFSAdapter.js';
import { ProxyCache } from '../entity/ProxyCache.js';
import { Task, UpdateProxyCacheTaskOptions, CreateUpdateProxyCacheTask } from '../entity/Task.js';
import { ProxyCacheRepository } from '../../repository/ProxyCacheRepository.js';
import type {
UpdateProxyCacheTaskOptions,
CreateUpdateProxyCacheTask,
} from '../entity/Task.js';
import { Task } from '../entity/Task.js';
import type { ProxyCacheRepository } from '../../repository/ProxyCacheRepository.js';
import { TaskType, TaskState } from '../../common/enum/Task.js';
import { calculateIntegrity } from '../../common/PackageUtil.js';
import { ABBREVIATED_META_TYPE, PROXY_CACHE_DIR_NAME } from '../../common/constants.js';
import {
ABBREVIATED_META_TYPE,
PROXY_CACHE_DIR_NAME,
} from '../../common/constants.js';
import { DIST_NAMES, isPkgManifest } from '../entity/Package.js';
import type {
AbbreviatedPackageManifestType,
@@ -27,9 +39,13 @@ function isoNow() {
return new Date().toISOString();
}
type GetSourceManifestAndCacheReturnType<T> = T extends DIST_NAMES.ABBREVIATED | DIST_NAMES.MANIFEST ? AbbreviatedPackageJSONType | PackageJSONType :
T extends DIST_NAMES.FULL_MANIFESTS | DIST_NAMES.ABBREVIATED_MANIFESTS ? AbbreviatedPackageManifestType|PackageManifestType : never;
type GetSourceManifestAndCacheReturnType<T> = T extends
| DIST_NAMES.ABBREVIATED
| DIST_NAMES.MANIFEST
? AbbreviatedPackageJSONType | PackageJSONType
: T extends DIST_NAMES.FULL_MANIFESTS | DIST_NAMES.ABBREVIATED_MANIFESTS
? AbbreviatedPackageManifestType | PackageManifestType
: never;
@SingletonProto({
accessLevel: AccessLevel.PUBLIC,
@@ -50,33 +66,54 @@ export class ProxyCacheService extends AbstractService {
@Inject()
private readonly cacheService: CacheService;
@Inject()
private readonly backgroundTaskHelper:BackgroundTaskHelper;
private readonly backgroundTaskHelper: BackgroundTaskHelper;
async getPackageVersionTarResponse(fullname: string, ctx: Context): Promise<HttpClientResponse> {
async getPackageVersionTarResponse(
fullname: string,
ctx: Context
): Promise<HttpClientResponse> {
if (this.config.cnpmcore.syncPackageBlockList.includes(fullname)) {
throw new ForbiddenError(`stop proxy by block list: ${JSON.stringify(this.config.cnpmcore.syncPackageBlockList)}`);
throw new ForbiddenError(
`stop proxy by block list: ${JSON.stringify(this.config.cnpmcore.syncPackageBlockList)}`
);
}
return await this.getProxyResponse(ctx);
}
async getPackageManifest(fullname: string, fileType: DIST_NAMES.FULL_MANIFESTS| DIST_NAMES.ABBREVIATED_MANIFESTS): Promise<AbbreviatedPackageManifestType|PackageManifestType> {
async getPackageManifest(
fullname: string,
fileType: DIST_NAMES.FULL_MANIFESTS | DIST_NAMES.ABBREVIATED_MANIFESTS
): Promise<AbbreviatedPackageManifestType | PackageManifestType> {
const isFullManifests = fileType === DIST_NAMES.FULL_MANIFESTS;
const cachedStoreKey = (await this.proxyCacheRepository.findProxyCache(fullname, fileType))?.filePath;
const cachedStoreKey = (
await this.proxyCacheRepository.findProxyCache(fullname, fileType)
)?.filePath;
if (cachedStoreKey) {
try {
const nfsBytes = await this.nfsAdapter.getBytes(cachedStoreKey);
if (!nfsBytes) throw new Error('not found proxy cache, try again later.');
if (!nfsBytes)
throw new Error('not found proxy cache, try again later.');
const nfsBuffer = Buffer.from(nfsBytes);
const { shasum: etag } = await calculateIntegrity(nfsBytes);
await this.cacheService.savePackageEtagAndManifests(fullname, isFullManifests, etag, nfsBuffer);
await this.cacheService.savePackageEtagAndManifests(
fullname,
isFullManifests,
etag,
nfsBuffer
);
const nfsString = nfsBuffer.toString();
const nfsPkgManifest = JSON.parse(nfsString);
return nfsPkgManifest as AbbreviatedPackageManifestType|PackageManifestType;
return nfsPkgManifest as
| AbbreviatedPackageManifestType
| PackageManifestType;
} catch (error) {
/* c8 ignore next 6 */
if (error.message.includes('not found proxy cache') || error.message.includes('Unexpected token : in JSON at')) {
if (
error.message.includes('not found proxy cache') ||
error.message.includes('Unexpected token : in JSON at')
) {
await this.nfsAdapter.remove(cachedStoreKey);
await this.proxyCacheRepository.removeProxyCache(fullname, fileType);
}
@@ -84,7 +121,10 @@ export class ProxyCacheService extends AbstractService {
}
}
const manifest = await this.getRewrittenManifest<typeof fileType>(fullname, fileType);
const manifest = await this.getRewrittenManifest<typeof fileType>(
fullname,
fileType
);
this.backgroundTaskHelper.run(async () => {
await this.storeRewrittenManifest(manifest, fullname, fileType);
const cachedFiles = ProxyCache.create({ fullname, fileType });
@@ -94,32 +134,55 @@ export class ProxyCacheService extends AbstractService {
}
// used by GET /:fullname/:versionOrTag
async getPackageVersionManifest(fullname: string, fileType: DIST_NAMES.ABBREVIATED | DIST_NAMES.MANIFEST, versionOrTag: string): Promise<AbbreviatedPackageJSONType|PackageJSONType> {
async getPackageVersionManifest(
fullname: string,
fileType: DIST_NAMES.ABBREVIATED | DIST_NAMES.MANIFEST,
versionOrTag: string
): Promise<AbbreviatedPackageJSONType | PackageJSONType> {
let version;
if (semverValid(versionOrTag)) {
version = versionOrTag;
} else {
const pkgManifest = await this.getPackageManifest(fullname, DIST_NAMES.ABBREVIATED_MANIFESTS);
const pkgManifest = await this.getPackageManifest(
fullname,
DIST_NAMES.ABBREVIATED_MANIFESTS
);
const distTags = pkgManifest['dist-tags'] || {};
version = distTags[versionOrTag] ? distTags[versionOrTag] : versionOrTag;
}
const cachedStoreKey = (await this.proxyCacheRepository.findProxyCache(fullname, fileType, version))?.filePath;
const cachedStoreKey = (
await this.proxyCacheRepository.findProxyCache(
fullname,
fileType,
version
)
)?.filePath;
if (cachedStoreKey) {
try {
const nfsBytes = await this.nfsAdapter.getBytes(cachedStoreKey);
if (!nfsBytes) throw new Error('not found proxy cache, try again later.');
if (!nfsBytes)
throw new Error('not found proxy cache, try again later.');
const nfsString = Buffer.from(nfsBytes!).toString();
return JSON.parse(nfsString) as PackageJSONType | AbbreviatedPackageJSONType;
return JSON.parse(nfsString) as
| PackageJSONType
| AbbreviatedPackageJSONType;
} catch (error) {
/* c8 ignore next 6 */
if (error.message.includes('not found proxy cache') || error.message.includes('Unexpected token : in JSON at')) {
if (
error.message.includes('not found proxy cache') ||
error.message.includes('Unexpected token : in JSON at')
) {
await this.nfsAdapter.remove(cachedStoreKey);
await this.proxyCacheRepository.removeProxyCache(fullname, fileType);
}
throw error;
}
}
const manifest = await this.getRewrittenManifest(fullname, fileType, versionOrTag);
const manifest = await this.getRewrittenManifest(
fullname,
fileType,
versionOrTag
);
this.backgroundTaskHelper.run(async () => {
await this.storeRewrittenManifest(manifest, fullname, fileType);
const cachedFiles = ProxyCache.create({ fullname, fileType, version });
@@ -128,28 +191,46 @@ export class ProxyCacheService extends AbstractService {
return manifest;
}
async removeProxyCache(fullname: string, fileType: DIST_NAMES, version?: string) {
async removeProxyCache(
fullname: string,
fileType: DIST_NAMES,
version?: string
) {
const storeKey = isPkgManifest(fileType)
? `/${PROXY_CACHE_DIR_NAME}/${fullname}/${fileType}`
: `/${PROXY_CACHE_DIR_NAME}/${fullname}/${version}/${fileType}`;
await this.nfsAdapter.remove(storeKey);
await this.proxyCacheRepository.removeProxyCache(fullname, fileType, version);
await this.proxyCacheRepository.removeProxyCache(
fullname,
fileType,
version
);
}
replaceTarballUrl<T extends DIST_NAMES>(manifest: GetSourceManifestAndCacheReturnType<T>, fileType: T) {
replaceTarballUrl<T extends DIST_NAMES>(
manifest: GetSourceManifestAndCacheReturnType<T>,
fileType: T
) {
const { sourceRegistry, registry } = this.config.cnpmcore;
if (isPkgManifest(fileType)) {
// pkg manifest
const versionMap = (manifest as AbbreviatedPackageManifestType|PackageManifestType)?.versions;
const versionMap = (
manifest as AbbreviatedPackageManifestType | PackageManifestType
)?.versions;
for (const key in versionMap) {
const versionItem = versionMap[key];
if (versionItem?.dist?.tarball) {
versionItem.dist.tarball = versionItem.dist.tarball.replace(sourceRegistry, registry);
versionItem.dist.tarball = versionItem.dist.tarball.replace(
sourceRegistry,
registry
);
}
}
} else {
// pkg version manifest
const distItem = (manifest as AbbreviatedPackageJSONType | PackageJSONType).dist;
const distItem = (
manifest as AbbreviatedPackageJSONType | PackageJSONType
).dist;
if (distItem?.tarball) {
distItem.tarball = distItem.tarball.replace(sourceRegistry, registry);
}
@@ -157,8 +238,14 @@ export class ProxyCacheService extends AbstractService {
return manifest;
}
async createTask(targetName: string, options: UpdateProxyCacheTaskOptions): Promise<CreateUpdateProxyCacheTask> {
return await this.taskService.createTask(Task.createUpdateProxyCache(targetName, options), false) as CreateUpdateProxyCacheTask;
async createTask(
targetName: string,
options: UpdateProxyCacheTaskOptions
): Promise<CreateUpdateProxyCacheTask> {
return (await this.taskService.createTask(
Task.createUpdateProxyCache(targetName, options),
false
)) as CreateUpdateProxyCacheTask;
}
async findExecuteTask() {
@@ -170,58 +257,115 @@ export class ProxyCacheService extends AbstractService {
const fullname = (task as CreateUpdateProxyCacheTask).data.fullname;
const { fileType, version } = (task as CreateUpdateProxyCacheTask).data;
let cachedManifest;
logs.push(`[${isoNow()}] 🚧🚧🚧🚧🚧 Start update "${fullname}-${fileType}" 🚧🚧🚧🚧🚧`);
logs.push(
`[${isoNow()}] 🚧🚧🚧🚧🚧 Start update "${fullname}-${fileType}" 🚧🚧🚧🚧🚧`
);
try {
const cachedFiles = await this.proxyCacheRepository.findProxyCache(fullname, fileType);
if (!cachedFiles) throw new Error('task params error, can not found record in repo.');
cachedManifest = await this.getRewrittenManifest<typeof fileType>(fullname, fileType);
const cachedFiles = await this.proxyCacheRepository.findProxyCache(
fullname,
fileType
);
if (!cachedFiles)
throw new Error('task params error, can not found record in repo.');
cachedManifest = await this.getRewrittenManifest<typeof fileType>(
fullname,
fileType
);
await this.storeRewrittenManifest(cachedManifest, fullname, fileType);
ProxyCache.update(cachedFiles);
await this.proxyCacheRepository.saveProxyCache(cachedFiles);
} catch (error) {
task.error = error;
logs.push(`[${isoNow()}] ❌ ${task.error}`);
logs.push(`[${isoNow()}] ❌❌❌❌❌ ${fullname}-${fileType} ${version ?? ''} ❌❌❌❌❌`);
logs.push(
`[${isoNow()}] ❌❌❌❌❌ ${fullname}-${fileType} ${version ?? ''} ❌❌❌❌❌`
);
await this.taskService.finishTask(task, TaskState.Fail, logs.join('\n'));
this.logger.info('[ProxyCacheService.executeTask:fail] taskId: %s, targetName: %s, %s',
task.taskId, task.targetName, task.error);
this.logger.info(
'[ProxyCacheService.executeTask:fail] taskId: %s, targetName: %s, %s',
task.taskId,
task.targetName,
task.error
);
return;
}
logs.push(`[${isoNow()}] 🟢 Update Success.`);
const isFullManifests = fileType === DIST_NAMES.FULL_MANIFESTS;
const cachedKey = await this.cacheService.getPackageEtag(fullname, isFullManifests);
const cachedKey = await this.cacheService.getPackageEtag(
fullname,
isFullManifests
);
if (cachedKey) {
const cacheBytes = Buffer.from(JSON.stringify(cachedManifest));
const { shasum: etag } = await calculateIntegrity(cacheBytes);
await this.cacheService.savePackageEtagAndManifests(fullname, isFullManifests, etag, cacheBytes);
await this.cacheService.savePackageEtagAndManifests(
fullname,
isFullManifests,
etag,
cacheBytes
);
logs.push(`[${isoNow()}] 🟢 Update Cache Success.`);
}
await this.taskService.finishTask(task, TaskState.Success, logs.join('\n'));
}
// only used by schedule task
private async getRewrittenManifest<T extends DIST_NAMES>(fullname:string, fileType: T, versionOrTag?:string): Promise<GetSourceManifestAndCacheReturnType<T>> {
private async getRewrittenManifest<T extends DIST_NAMES>(
fullname: string,
fileType: T,
versionOrTag?: string
): Promise<GetSourceManifestAndCacheReturnType<T>> {
let responseResult;
const USER_AGENT = 'npm_service.cnpmjs.org/cnpmcore';
switch (fileType) {
case DIST_NAMES.FULL_MANIFESTS: {
const url = `/${encodeURIComponent(fullname)}?t=${Date.now()}&cache=0`;
responseResult = await this.getProxyResponse({ url, headers: { accept: 'application/json', 'user-agent': USER_AGENT } }, { dataType: 'json' });
responseResult = await this.getProxyResponse(
{
url,
headers: { accept: 'application/json', 'user-agent': USER_AGENT },
},
{ dataType: 'json' }
);
break;
}
case DIST_NAMES.ABBREVIATED_MANIFESTS: {
const url = `/${encodeURIComponent(fullname)}?t=${Date.now()}&cache=0`;
responseResult = await this.getProxyResponse({ url, headers: { accept: ABBREVIATED_META_TYPE, 'user-agent': USER_AGENT } }, { dataType: 'json' });
responseResult = await this.getProxyResponse(
{
url,
headers: {
accept: ABBREVIATED_META_TYPE,
'user-agent': USER_AGENT,
},
},
{ dataType: 'json' }
);
break;
}
case DIST_NAMES.MANIFEST: {
const url = `/${encodeURIComponent(fullname)}/${encodeURIComponent(versionOrTag!)}`;
responseResult = await this.getProxyResponse({ url, headers: { accept: 'application/json', 'user-agent': USER_AGENT } }, { dataType: 'json' });
responseResult = await this.getProxyResponse(
{
url,
headers: { accept: 'application/json', 'user-agent': USER_AGENT },
},
{ dataType: 'json' }
);
break;
}
case DIST_NAMES.ABBREVIATED: {
const url = `/${encodeURIComponent(fullname)}/${encodeURIComponent(versionOrTag!)}`;
responseResult = await this.getProxyResponse({ url, headers: { accept: ABBREVIATED_META_TYPE, 'user-agent': USER_AGENT } }, { dataType: 'json' });
responseResult = await this.getProxyResponse(
{
url,
headers: {
accept: ABBREVIATED_META_TYPE,
'user-agent': USER_AGENT,
},
},
{ dataType: 'json' }
);
break;
}
default:
@@ -233,7 +377,11 @@ export class ProxyCacheService extends AbstractService {
return manifest;
}
private async storeRewrittenManifest(manifest: any, fullname: string, fileType: DIST_NAMES) {
private async storeRewrittenManifest(
manifest: any,
fullname: string,
fileType: DIST_NAMES
) {
let storeKey: string;
if (isPkgManifest(fileType)) {
storeKey = `/${PROXY_CACHE_DIR_NAME}/${fullname}/${fileType}`;
@@ -245,14 +393,19 @@ export class ProxyCacheService extends AbstractService {
await this.nfsAdapter.uploadBytes(storeKey, nfsBytes);
}
async getProxyResponse(ctx: Partial<Context>, options?: HttpClientRequestOptions): Promise<HttpClientResponse> {
async getProxyResponse(
ctx: Partial<Context>,
options?: HttpClientRequestOptions
): Promise<HttpClientResponse> {
const registry = this.npmRegistry.registry;
const remoteAuthToken = await this.registryManagerService.getAuthTokenByRegistryHost(registry);
const authorization = this.npmRegistry.genAuthorizationHeader(remoteAuthToken);
const remoteAuthToken =
await this.registryManagerService.getAuthTokenByRegistryHost(registry);
const authorization =
this.npmRegistry.genAuthorizationHeader(remoteAuthToken);
const url = `${this.npmRegistry.registry}${ctx.url}`;
const res = await this.httpclient.request(url, {
const res = (await this.httpclient.request(url, {
timing: true,
followRedirect: true,
// once redirection is also count as a retry
@@ -268,8 +421,12 @@ export class ProxyCacheService extends AbstractService {
'x-forwarded-for': ctx?.ip,
via: `1.1, ${this.config.cnpmcore.registry}`,
},
}) as HttpClientResponse;
this.logger.info('[ProxyCacheService:getProxyStreamResponse] %s, status: %s', url, res.status);
})) as HttpClientResponse;
this.logger.info(
'[ProxyCacheService:getProxyStreamResponse] %s, status: %s',
url,
res.status
);
return res;
}
}

View File

@@ -1,23 +1,30 @@
import {
AccessLevel,
SingletonProto,
Inject,
} from '@eggjs/tegg';
import { AccessLevel, SingletonProto, Inject } from '@eggjs/tegg';
import { E400, NotFoundError } from 'egg-errors';
import { RegistryRepository } from '../../repository/RegistryRepository.js';
import type { RegistryRepository } from '../../repository/RegistryRepository.js';
import { AbstractService } from '../../common/AbstractService.js';
import { Registry } from '../entity/Registry.js';
import { PageOptions, PageResult } from '../util/EntityUtil.js';
import { ScopeManagerService } from './ScopeManagerService.js';
import { TaskService } from './TaskService.js';
import type { PageOptions, PageResult } from '../util/EntityUtil.js';
import type { ScopeManagerService } from './ScopeManagerService.js';
import type { TaskService } from './TaskService.js';
import { Task } from '../entity/Task.js';
import { ChangesStreamMode, PresetRegistryName } from '../../common/constants.js';
import {
ChangesStreamMode,
PresetRegistryName,
} from '../../common/constants.js';
import { RegistryType } from '../../common/enum/Registry.js';
export interface CreateRegistryCmd extends Pick<Registry, 'changeStream' | 'host' | 'userPrefix' | 'type' | 'name' | 'authToken' > {
export interface CreateRegistryCmd
extends Pick<
Registry,
'changeStream' | 'host' | 'userPrefix' | 'type' | 'name' | 'authToken'
> {
operatorId?: string;
}
export interface UpdateRegistryCmd extends Pick<Registry, 'changeStream' | 'host' | 'type' | 'name' | 'authToken' > {
export interface UpdateRegistryCmd
extends Pick<
Registry,
'changeStream' | 'host' | 'type' | 'name' | 'authToken'
> {
operatorId?: string;
}
export interface RemoveRegistryCmd extends Pick<Registry, 'registryId'> {
@@ -43,26 +50,50 @@ export class RegistryManagerService extends AbstractService {
async createSyncChangesStream(startSyncCmd: StartSyncCmd): Promise<void> {
const { registryId, operatorId = '-', since } = startSyncCmd;
this.logger.info('[RegistryManagerService.startSyncChangesStream:prepare] operatorId: %s, registryId: %s, since: %s', operatorId, registryId, since);
const registry = await this.registryRepository.findRegistryByRegistryId(registryId);
this.logger.info(
'[RegistryManagerService.startSyncChangesStream:prepare] operatorId: %s, registryId: %s, since: %s',
operatorId,
registryId,
since
);
const registry =
await this.registryRepository.findRegistryByRegistryId(registryId);
if (!registry) {
throw new NotFoundError(`registry ${registryId} not found`);
}
// 防止和 GLOBAL_WORKER 冲突,只能有一个默认的全局 registry
const scopesCount = await this.scopeManagerService.countByRegistryId(registryId);
const scopesCount =
await this.scopeManagerService.countByRegistryId(registryId);
if (scopesCount === 0) {
throw new E400(`registry ${registryId} has no scopes, please create scopes first`);
throw new E400(
`registry ${registryId} has no scopes, please create scopes first`
);
}
// 启动 changeStream
const targetName = `${registry.name.toUpperCase()}_WORKER`;
await this.taskService.createTask(Task.createChangesStream(targetName, registryId, since), false);
await this.taskService.createTask(
Task.createChangesStream(targetName, registryId, since),
false
);
}
async createRegistry(createCmd: CreateRegistryCmd): Promise<Registry> {
const { name, changeStream = '', host, userPrefix = '', type, operatorId = '-', authToken } = createCmd;
this.logger.info('[RegistryManagerService.createRegistry:prepare] operatorId: %s, createCmd: %j', operatorId, createCmd);
const {
name,
changeStream = '',
host,
userPrefix = '',
type,
operatorId = '-',
authToken,
} = createCmd;
this.logger.info(
'[RegistryManagerService.createRegistry:prepare] operatorId: %s, createCmd: %j',
operatorId,
createCmd
);
const registry = Registry.create({
name,
changeStream,
@@ -78,9 +109,21 @@ export class RegistryManagerService extends AbstractService {
// 更新部分 registry 信息
// 不允许 userPrefix 字段变更
async updateRegistry(registryId: string, updateCmd: UpdateRegistryCmd) {
const { name, changeStream, host, type, operatorId = '-', authToken } = updateCmd;
this.logger.info('[RegistryManagerService.updateRegistry:prepare] operatorId: %s, updateCmd: %j', operatorId, updateCmd);
const registry = await this.registryRepository.findRegistryByRegistryId(registryId);
const {
name,
changeStream,
host,
type,
operatorId = '-',
authToken,
} = updateCmd;
this.logger.info(
'[RegistryManagerService.updateRegistry:prepare] operatorId: %s, updateCmd: %j',
operatorId,
updateCmd
);
const registry =
await this.registryRepository.findRegistryByRegistryId(registryId);
if (!registry) {
throw new NotFoundError(`registry ${registryId} not found`);
}
@@ -108,7 +151,9 @@ export class RegistryManagerService extends AbstractService {
}
async findByRegistryHost(host?: string): Promise<Registry | null> {
return host ? await this.registryRepository.findRegistryByRegistryHost(host) : null;
return host
? await this.registryRepository.findRegistryByRegistryHost(host)
: null;
}
// 删除 Registry 方法
@@ -116,13 +161,22 @@ export class RegistryManagerService extends AbstractService {
// 同时删除对应的 scope 数据
async remove(removeCmd: RemoveRegistryCmd): Promise<void> {
const { registryId, operatorId = '-' } = removeCmd;
this.logger.info('[RegistryManagerService.remove:prepare] operatorId: %s, registryId: %s', operatorId, registryId);
this.logger.info(
'[RegistryManagerService.remove:prepare] operatorId: %s, registryId: %s',
operatorId,
registryId
);
await this.registryRepository.removeRegistry(registryId);
await this.scopeManagerService.removeByRegistryId({ registryId, operatorId });
await this.scopeManagerService.removeByRegistryId({
registryId,
operatorId,
});
}
async ensureSelfRegistry(): Promise<Registry> {
const existRegistry = await this.registryRepository.findRegistry(PresetRegistryName.self);
const existRegistry = await this.registryRepository.findRegistry(
PresetRegistryName.self
);
if (existRegistry) {
return existRegistry;
}
@@ -138,18 +192,26 @@ export class RegistryManagerService extends AbstractService {
});
return newRegistry;
}
async ensureDefaultRegistry(): Promise<Registry> {
const existRegistry = await this.registryRepository.findRegistry(PresetRegistryName.default);
const existRegistry = await this.registryRepository.findRegistry(
PresetRegistryName.default
);
if (existRegistry) {
return existRegistry;
}
// 从配置文件默认生成
const { changesStreamRegistryMode, changesStreamRegistry: changesStreamHost, sourceRegistry: host } = this.config.cnpmcore;
const type = changesStreamRegistryMode === ChangesStreamMode.json ? RegistryType.Cnpmcore : RegistryType.Npm;
const {
changesStreamRegistryMode,
changesStreamRegistry: changesStreamHost,
sourceRegistry: host,
} = this.config.cnpmcore;
const type =
changesStreamRegistryMode === ChangesStreamMode.json
? RegistryType.Cnpmcore
: RegistryType.Npm;
const registry = await this.createRegistry({
name: PresetRegistryName.default,
type,
@@ -159,15 +221,13 @@ export class RegistryManagerService extends AbstractService {
});
return registry;
}
async getAuthTokenByRegistryHost(host: string): Promise<string|undefined> {
async getAuthTokenByRegistryHost(host: string): Promise<string | undefined> {
const registry = await this.findByRegistryHost(host);
if (!registry) {
return undefined;
}
return registry.authToken;
}
}

View File

@@ -1,17 +1,14 @@
import {
AccessLevel,
SingletonProto,
Inject,
} from '@eggjs/tegg';
import { ScopeRepository } from '../../repository/ScopeRepository.js';
import { AccessLevel, SingletonProto, Inject } from '@eggjs/tegg';
import type { ScopeRepository } from '../../repository/ScopeRepository.js';
import { AbstractService } from '../../common/AbstractService.js';
import { Scope } from '../entity/Scope.js';
import { PageOptions, PageResult } from '../util/EntityUtil.js';
import type { PageOptions, PageResult } from '../util/EntityUtil.js';
export interface CreateScopeCmd extends Pick<Scope, 'name' | 'registryId'> {
operatorId?: string;
}
export interface UpdateRegistryCmd extends Pick<Scope, 'name' | 'scopeId' | 'registryId'> {
export interface UpdateRegistryCmd
extends Pick<Scope, 'name' | 'scopeId' | 'registryId'> {
operatorId?: string;
}
@@ -43,7 +40,11 @@ export class ScopeManagerService extends AbstractService {
async createScope(createCmd: CreateScopeCmd): Promise<Scope> {
const { name, registryId, operatorId } = createCmd;
this.logger.info('[ScopeManagerService.CreateScope:prepare] operatorId: %s, createCmd: %s', operatorId, createCmd);
this.logger.info(
'[ScopeManagerService.CreateScope:prepare] operatorId: %s, createCmd: %s',
operatorId,
createCmd
);
const scope = Scope.create({
name,
registryId,
@@ -56,19 +57,32 @@ export class ScopeManagerService extends AbstractService {
return await this.scopeRepository.listScopes(page);
}
async listScopesByRegistryId(registryId: string, page: PageOptions): Promise<PageResult<Scope>> {
async listScopesByRegistryId(
registryId: string,
page: PageOptions
): Promise<PageResult<Scope>> {
return await this.scopeRepository.listScopesByRegistryId(registryId, page);
}
async removeByRegistryId(removeCmd: RemoveScopeByRegistryIdCmd): Promise<void> {
async removeByRegistryId(
removeCmd: RemoveScopeByRegistryIdCmd
): Promise<void> {
const { registryId, operatorId } = removeCmd;
this.logger.info('[ScopeManagerService.remove:prepare] operatorId: %s, registryId: %s', operatorId, registryId);
this.logger.info(
'[ScopeManagerService.remove:prepare] operatorId: %s, registryId: %s',
operatorId,
registryId
);
return await this.scopeRepository.removeScopeByRegistryId(registryId);
}
async remove(removeCmd: RemoveScopeCmd): Promise<void> {
const { scopeId, operatorId } = removeCmd;
this.logger.info('[ScopeManagerService.remove:prepare] operatorId: %s, scopeId: %s', operatorId, scopeId);
this.logger.info(
'[ScopeManagerService.remove:prepare] operatorId: %s, scopeId: %s',
operatorId,
scopeId
);
return await this.scopeRepository.removeScope(scopeId);
}
}

View File

@@ -1,14 +1,11 @@
import {
AccessLevel,
SingletonProto,
Inject,
} from '@eggjs/tegg';
import { NFSAdapter } from '../../common/adapter/NFSAdapter.js';
import { AccessLevel, SingletonProto, Inject } from '@eggjs/tegg';
import type { NFSAdapter } from '../../common/adapter/NFSAdapter.js';
import { TaskState, TaskType } from '../../common/enum/Task.js';
import { AbstractService } from '../../common/AbstractService.js';
import { TaskRepository } from '../../repository/TaskRepository.js';
import { Task, CreateSyncPackageTaskData } from '../entity/Task.js';
import { QueueAdapter } from '../../common/typing.js';
import type { TaskRepository } from '../../repository/TaskRepository.js';
import type { CreateSyncPackageTaskData } from '../entity/Task.js';
import { Task } from '../entity/Task.js';
import type { QueueAdapter } from '../../common/typing.js';
@SingletonProto({
accessLevel: AccessLevel.PUBLIC,
@@ -26,7 +23,10 @@ export class TaskService extends AbstractService {
}
public async createTask(task: Task, addTaskQueueOnExists: boolean) {
const existsTask = await this.taskRepository.findTaskByTargetName(task.targetName, task.type);
const existsTask = await this.taskRepository.findTaskByTargetName(
task.targetName,
task.type
);
// 只在包同步场景下做任务合并,其余场景通过 bizId 来进行任务幂等
if (existsTask && Task.needMergeWhenWaiting(task.type)) {
@@ -35,15 +35,23 @@ export class TaskService extends AbstractService {
if (existsTask.state === TaskState.Waiting) {
if (task.type === TaskType.SyncPackage) {
// 如果是specificVersions的任务则可能可以和存量任务进行合并
const specificVersions = (task as Task<CreateSyncPackageTaskData>).data?.specificVersions;
const existsTaskSpecificVersions = (existsTask as Task<CreateSyncPackageTaskData>).data?.specificVersions;
const specificVersions = (task as Task<CreateSyncPackageTaskData>)
.data?.specificVersions;
const existsTaskSpecificVersions = (
existsTask as Task<CreateSyncPackageTaskData>
).data?.specificVersions;
if (existsTaskSpecificVersions) {
if (specificVersions) {
// 存量的任务和新增任务都是同步指定版本的任务,合并两者版本至存量任务
await this.taskRepository.updateSpecificVersionsOfWaitingTask(existsTask, specificVersions);
await this.taskRepository.updateSpecificVersionsOfWaitingTask(
existsTask,
specificVersions
);
} else {
// 新增任务是全量同步任务,移除存量任务中的指定版本使其成为全量同步任务
await this.taskRepository.updateSpecificVersionsOfWaitingTask(existsTask);
await this.taskRepository.updateSpecificVersionsOfWaitingTask(
existsTask
);
}
}
// 存量任务是全量同步任务,直接提高任务优先级
@@ -54,8 +62,13 @@ export class TaskService extends AbstractService {
if (queueLength < this.config.cnpmcore.taskQueueHighWaterSize) {
// make sure waiting task in queue
await this.queueAdapter.push<string>(task.type, existsTask.taskId);
this.logger.info('[TaskService.createTask:exists-to-queue] taskType: %s, targetName: %s, taskId: %s, queue size: %s',
task.type, task.targetName, task.taskId, queueLength);
this.logger.info(
'[TaskService.createTask:exists-to-queue] taskType: %s, targetName: %s, taskId: %s, queue size: %s',
task.type,
task.targetName,
task.taskId,
queueLength
);
}
}
}
@@ -64,8 +77,13 @@ export class TaskService extends AbstractService {
await this.taskRepository.saveTask(task);
await this.queueAdapter.push<string>(task.type, task.taskId);
const queueLength = await this.getTaskQueueLength(task.type);
this.logger.info('[TaskService.createTask:new] taskType: %s, targetName: %s, taskId: %s, queue size: %s',
task.type, task.targetName, task.taskId, queueLength);
this.logger.info(
'[TaskService.createTask:new] taskType: %s, targetName: %s, taskId: %s, queue size: %s',
task.type,
task.targetName,
task.taskId,
queueLength
);
return task;
}
@@ -77,8 +95,13 @@ export class TaskService extends AbstractService {
await this.taskRepository.saveTask(task);
await this.queueAdapter.push<string>(task.type, task.taskId);
const queueLength = await this.getTaskQueueLength(task.type);
this.logger.info('[TaskService.retryTask:save] taskType: %s, targetName: %s, taskId: %s, queue size: %s',
task.type, task.targetName, task.taskId, queueLength);
this.logger.info(
'[TaskService.retryTask:save] taskType: %s, targetName: %s, taskId: %s, queue size: %s',
task.type,
task.targetName,
task.taskId,
queueLength
);
}
public async findTask(taskId: string) {
@@ -108,7 +131,10 @@ export class TaskService extends AbstractService {
}
const condition = task.start();
const saveSucceed = await this.taskRepository.idempotentSaveTask(task, condition);
const saveSucceed = await this.taskRepository.idempotentSaveTask(
task,
condition
);
if (!saveSucceed) {
taskId = await this.queueAdapter.pop<string>(taskType);
continue;
@@ -121,7 +147,10 @@ export class TaskService extends AbstractService {
public async retryExecuteTimeoutTasks() {
// try processing timeout tasks in 10 mins
const tasks = await this.taskRepository.findTimeoutTasks(TaskState.Processing, 60000 * 10);
const tasks = await this.taskRepository.findTimeoutTasks(
TaskState.Processing,
60000 * 10
);
for (const task of tasks) {
try {
// ignore ChangesStream task, it won't timeout
@@ -129,7 +158,11 @@ export class TaskService extends AbstractService {
await this.finishTask(task, TaskState.Timeout);
this.logger.warn(
'[TaskService.retryExecuteTimeoutTasks:timeout] taskType: %s, targetName: %s, taskId: %s, attempts %s set to fail',
task.type, task.targetName, task.taskId, task.attempts);
task.type,
task.targetName,
task.taskId,
task.attempts
);
continue;
}
if (task.attempts >= 1) {
@@ -139,26 +172,44 @@ export class TaskService extends AbstractService {
await this.retryTask(task);
this.logger.info(
'[TaskService.retryExecuteTimeoutTasks:retry] taskType: %s, targetName: %s, taskId: %s, attempts %s will retry again',
task.type, task.targetName, task.taskId, task.attempts);
task.type,
task.targetName,
task.taskId,
task.attempts
);
} catch (e) {
this.logger.error(
'[TaskService.retryExecuteTimeoutTasks:error] processing task, taskType: %s, targetName: %s, taskId: %s, attempts %s will retry again',
task.type, task.targetName, task.taskId, task.attempts);
task.type,
task.targetName,
task.taskId,
task.attempts
);
this.logger.error(e);
}
}
// try waiting timeout tasks in 30 mins
const waitingTasks = await this.taskRepository.findTimeoutTasks(TaskState.Waiting, 60000 * 30);
const waitingTasks = await this.taskRepository.findTimeoutTasks(
TaskState.Waiting,
60000 * 30
);
for (const task of waitingTasks) {
try {
await this.retryTask(task);
this.logger.warn(
'[TaskService.retryExecuteTimeoutTasks:retryWaiting] taskType: %s, targetName: %s, taskId: %s waiting too long',
task.type, task.targetName, task.taskId);
task.type,
task.targetName,
task.taskId
);
} catch (e) {
this.logger.error(
'[TaskService.retryExecuteTimeoutTasks:error] waiting task, taskType: %s, targetName: %s, taskId: %s, attempts %s will retry again',
task.type, task.targetName, task.taskId, task.attempts);
task.type,
task.targetName,
task.taskId,
task.attempts
);
this.logger.error(e);
}
}
@@ -173,7 +224,11 @@ export class TaskService extends AbstractService {
await this.taskRepository.saveTask(task);
}
public async finishTask(task: Task, taskState: TaskState, appendLog?: string) {
public async finishTask(
task: Task,
taskState: TaskState,
appendLog?: string
) {
if (appendLog) {
await this.appendLogToNFS(task, appendLog);
}
@@ -189,7 +244,7 @@ export class TaskService extends AbstractService {
task.logStorePosition,
{
'Content-Type': 'text/plain; charset=utf-8',
},
}
);
if (nextPosition) {
task.logStorePosition = nextPosition;
@@ -197,11 +252,14 @@ export class TaskService extends AbstractService {
} catch (err: any) {
// [PositionNotEqualToLengthError]: Position is not equal to file length, status: 409
// [ObjectNotAppendableError]: The object is not appendable
if (err.code === 'PositionNotEqualToLength' || err.code === 'ObjectNotAppendable') {
if (
err.code === 'PositionNotEqualToLength' ||
err.code === 'ObjectNotAppendable'
) {
// override exists log file
await this.nfsAdapter.uploadBytes(
task.logPath,
Buffer.from(appendLog + '\n'),
Buffer.from(appendLog + '\n')
);
return;
}

View File

@@ -1,20 +1,17 @@
import dayjs from 'dayjs';
import {
AccessLevel,
SingletonProto,
Inject,
} from '@eggjs/tegg';
import { AccessLevel, SingletonProto, Inject } from '@eggjs/tegg';
import { isEmpty } from 'lodash-es';
import { ForbiddenError, UnauthorizedError } from 'egg-errors';
import { AbstractService } from '../../common/AbstractService.js';
import { Token, isGranularToken } from '../entity/Token.js';
import { TokenPackage as TokenPackageModel } from '../../../app/repository/model/TokenPackage.js';
import { Package as PackageModel } from '../../../app/repository/model/Package.js';
import type { Token } from '../entity/Token.js';
import { isGranularToken } from '../entity/Token.js';
import type { TokenPackage as TokenPackageModel } from '../../../app/repository/model/TokenPackage.js';
import type { Package as PackageModel } from '../../../app/repository/model/Package.js';
import { ModelConvertor } from '../../../app/repository/util/ModelConvertor.js';
import { Package as PackageEntity } from '../entity/Package.js';
import { getScopeAndName } from '../../../app/common/PackageUtil.js';
import { sha512 } from '../../../app/common/UserUtil.js';
import { UserRepository } from '../../../app/repository/UserRepository.js';
import type { UserRepository } from '../../../app/repository/UserRepository.js';
@SingletonProto({
accessLevel: AccessLevel.PUBLIC,
@@ -30,8 +27,12 @@ export class TokenService extends AbstractService {
public async listTokenPackages(token: Token) {
if (isGranularToken(token)) {
const models = await this.TokenPackage.find({ tokenId: token.tokenId });
const packages = await this.Package.find({ packageId: models.map(m => m.packageId) });
return packages.map(pkg => ModelConvertor.convertModelToEntity(pkg, PackageEntity));
const packages = await this.Package.find({
packageId: models.map(m => m.packageId),
});
return packages.map(pkg =>
ModelConvertor.convertModelToEntity(pkg, PackageEntity)
);
}
return null;
}
@@ -48,7 +49,7 @@ export class TokenService extends AbstractService {
public async checkGranularTokenAccess(token: Token, fullname: string) {
// check for scope whitelist
const [ scope, name ] = getScopeAndName(fullname);
const [scope, name] = getScopeAndName(fullname);
// check for packages whitelist
const allowedPackages = await this.listTokenPackages(token);
@@ -57,7 +58,9 @@ export class TokenService extends AbstractService {
return true;
}
const existPkgConfig = allowedPackages?.find(pkg => pkg.scope === scope && pkg.name === name);
const existPkgConfig = allowedPackages?.find(
pkg => pkg.scope === scope && pkg.name === name
);
if (existPkgConfig) {
return true;
}
@@ -68,7 +71,6 @@ export class TokenService extends AbstractService {
}
throw new ForbiddenError(`can't access package "${fullname}"`);
}
async getUserAndToken(authorization: string) {
@@ -77,8 +79,8 @@ export class TokenService extends AbstractService {
if (!matchs) return null;
const tokenValue = matchs[1];
const tokenKey = sha512(tokenValue);
const authorizedUserAndToken = await this.userRepository.findUserAndTokenByTokenKey(tokenKey);
const authorizedUserAndToken =
await this.userRepository.findUserAndTokenByTokenKey(tokenKey);
return authorizedUserAndToken;
}
}

View File

@@ -1,22 +1,24 @@
import crypto from 'node:crypto';
import {
AccessLevel,
SingletonProto,
Inject,
} from '@eggjs/tegg';
import { AccessLevel, SingletonProto, Inject } from '@eggjs/tegg';
import { NotFoundError, ForbiddenError } from 'egg-errors';
import { UserRepository } from '../../repository/UserRepository.js';
import type { UserRepository } from '../../repository/UserRepository.js';
import { User as UserEntity } from '../entity/User.js';
import { Token as TokenEntity, TokenType } from '../entity/Token.js';
import type { TokenType } from '../entity/Token.js';
import { Token as TokenEntity } from '../entity/Token.js';
import { WebauthnCredential as WebauthnCredentialEntity } from '../entity/WebauthnCredential.js';
import { LoginResultCode } from '../../common/enum/User.js';
import { integrity, checkIntegrity, randomToken, sha512 } from '../../common/UserUtil.js';
import {
integrity,
checkIntegrity,
randomToken,
sha512,
} from '../../common/UserUtil.js';
import { AbstractService } from '../../common/AbstractService.js';
import { RegistryManagerService } from './RegistryManagerService.js';
import type { RegistryManagerService } from './RegistryManagerService.js';
import { getPrefixedName } from '../../common/PackageUtil.js';
import { Registry } from '../entity/Registry.js';
import type { Registry } from '../entity/Registry.js';
type Optional<T, K extends keyof T> = Omit < T, K > & Partial<T> ;
type Optional<T, K extends keyof T> = Omit<T, K> & Partial<T>;
type CreateUser = {
name: string;
@@ -77,19 +79,29 @@ export class UserService extends AbstractService {
}
const selfRegistry = await this.registryManagerService.ensureSelfRegistry();
const selfUser = await this.findUserByName(getPrefixedName(selfRegistry.userPrefix, name));
const selfUser = await this.findUserByName(
getPrefixedName(selfRegistry.userPrefix, name)
);
if (selfUser) {
return selfUser;
}
const defaultRegistry = await this.registryManagerService.ensureDefaultRegistry();
const defaultUser = await this.findUserByName(getPrefixedName(defaultRegistry.userPrefix, name));
const defaultRegistry =
await this.registryManagerService.ensureDefaultRegistry();
const defaultUser = await this.findUserByName(
getPrefixedName(defaultRegistry.userPrefix, name)
);
return defaultUser;
}
async findInRegistry(registry:Registry, name: string): Promise<UserEntity | null> {
return await this.findUserByName(getPrefixedName(registry.userPrefix, name));
async findInRegistry(
registry: Registry,
name: string
): Promise<UserEntity | null> {
return await this.findUserByName(
getPrefixedName(registry.userPrefix, name)
);
}
async findUserByName(name: string): Promise<UserEntity | null> {
@@ -106,7 +118,12 @@ export class UserService extends AbstractService {
return { code: LoginResultCode.Success, user, token };
}
async findOrCreateUser({ name, email, ip, password = crypto.randomUUID() }: Optional<CreateUser, 'password'>) {
async findOrCreateUser({
name,
email,
ip,
password = crypto.randomUUID(),
}: Optional<CreateUser, 'password'>) {
let user = await this.userRepository.findUserByName(name);
if (!user) {
const createRes = await this.create({
@@ -144,7 +161,11 @@ export class UserService extends AbstractService {
return { user: userEntity, token };
}
async saveUser(userPrefix = 'npm:', name: string, email: string): Promise<{ changed: boolean, user: UserEntity }> {
async saveUser(
userPrefix = 'npm:',
name: string,
email: string
): Promise<{ changed: boolean; user: UserEntity }> {
const storeName = name.startsWith('name:') ? name : `${userPrefix}${name}`;
let user = await this.userRepository.findUserByName(storeName);
if (!user) {
@@ -189,26 +210,41 @@ export class UserService extends AbstractService {
}
async removeToken(userId: string, tokenKeyOrTokenValue: string) {
let token = await this.userRepository.findTokenByTokenKey(tokenKeyOrTokenValue);
let token =
await this.userRepository.findTokenByTokenKey(tokenKeyOrTokenValue);
if (!token) {
// tokenKeyOrTokenValue is token value, sha512 and find again
token = await this.userRepository.findTokenByTokenKey(sha512(tokenKeyOrTokenValue));
token = await this.userRepository.findTokenByTokenKey(
sha512(tokenKeyOrTokenValue)
);
}
if (!token) {
throw new NotFoundError(`Token "${tokenKeyOrTokenValue}" not exists`);
}
if (token.userId !== userId) {
throw new ForbiddenError(`Not authorized to remove token "${tokenKeyOrTokenValue}"`);
throw new ForbiddenError(
`Not authorized to remove token "${tokenKeyOrTokenValue}"`
);
}
await this.userRepository.removeToken(token.tokenId);
}
async findWebauthnCredential(userId: string, browserType: string | undefined | null) {
const credential = await this.userRepository.findCredentialByUserIdAndBrowserType(userId, browserType || null);
async findWebauthnCredential(
userId: string,
browserType: string | undefined | null
) {
const credential =
await this.userRepository.findCredentialByUserIdAndBrowserType(
userId,
browserType || null
);
return credential;
}
async createWebauthnCredential(userId: string | undefined, options: CreateWebauthnCredentialOptions) {
async createWebauthnCredential(
userId: string | undefined,
options: CreateWebauthnCredentialOptions
) {
const credentialEntity = WebauthnCredentialEntity.create({
userId: userId as string,
credentialId: options.credentialId,
@@ -220,10 +256,13 @@ export class UserService extends AbstractService {
}
async removeWebauthnCredential(userId?: string, browserType?: string) {
const credential = await this.userRepository.findCredentialByUserIdAndBrowserType(userId, browserType || null);
const credential =
await this.userRepository.findCredentialByUserIdAndBrowserType(
userId,
browserType || null
);
if (credential) {
await this.userRepository.removeCredential(credential.wancId);
}
}
}

View File

@@ -1,11 +1,14 @@
import ObjectID from 'bson-objectid';
import { E400 } from 'egg-errors';
import { EntityData } from '../entity/Entity.js';
import type { EntityData } from '../entity/Entity.js';
type PartialBy<T, K extends keyof T> = Omit<T, K> & Partial<Pick<T, K>>;
export type EasyData<T extends EntityData, Id extends keyof T> = PartialBy<T, 'createdAt' | 'updatedAt' | Id>;
export type EasyData<T extends EntityData, Id extends keyof T> = PartialBy<
T,
'createdAt' | 'updatedAt' | Id
>;
const MAX_PAGE_SIZE = 100 as const;
export interface PageOptions {
@@ -14,7 +17,7 @@ export interface PageOptions {
}
export interface PageResult<T> {
count: number;
data: Array<T>
data: Array<T>;
}
export interface PageLimitOptions {
offset: number;
@@ -22,7 +25,10 @@ export interface PageLimitOptions {
}
export class EntityUtil {
static defaultData<T extends EntityData, Id extends keyof T>(data: EasyData<T, Id>, id: Id): T {
static defaultData<T extends EntityData, Id extends keyof T>(
data: EasyData<T, Id>,
id: Id
): T {
Reflect.set(data, id, EntityUtil.createId());
data.createdAt = data.createdAt || new Date();
data.updatedAt = data.updatedAt || new Date();

View File

@@ -1,13 +1,13 @@
import {
AccessLevel,
EggContext,
Inject,
SingletonProto,
} from '@eggjs/tegg';
import { Redis } from 'ioredis';
import type { EggContext } from '@eggjs/tegg';
import { AccessLevel, Inject, SingletonProto } from '@eggjs/tegg';
import type { Redis } from 'ioredis';
import { randomUUID } from 'node:crypto';
import { AuthClient, AuthUrlResult, userResult } from '../common/typing.js';
import type {
AuthClient,
AuthUrlResult,
userResult,
} from '../common/typing.js';
const ONE_DAY = 3600 * 24;
@@ -50,5 +50,4 @@ export class AuthAdapter implements AuthClient {
}
return null;
}
}

View File

@@ -1,14 +1,20 @@
import { Readable } from 'node:stream';
import type { Readable } from 'node:stream';
import {
AccessLevel,
LifecycleInit,
Inject,
SingletonProto,
} from '@eggjs/tegg';
import { EggAppConfig, EggLogger } from 'egg';
import type { EggAppConfig, EggLogger } from 'egg';
import FSClient from 'fs-cnpm';
import { AppendResult, NFSClient, UploadOptions, UploadResult, DownloadOptions } from '../common/typing.js';
import type {
AppendResult,
NFSClient,
UploadOptions,
UploadResult,
DownloadOptions,
} from '../common/typing.js';
@SingletonProto({
name: 'nfsClient',
@@ -36,11 +42,16 @@ export class NFSClientAdapter implements NFSClient {
this._client = this.config.nfs.client;
} else {
if (this.config.env === 'prod') {
throw new Error('[NFSAdapter] Can\'t use local fs NFS on production env');
throw new Error(
"[NFSAdapter] Can't use local fs NFS on production env"
);
}
// try to use fs-cnpm, don't use it on production env
this.logger.warn('[NFSAdapter] Don\'t use local fs NFS on production env, store on %s', this.config.nfs.dir);
this.logger.warn(
"[NFSAdapter] Don't use local fs NFS on production env, store on %s",
this.config.nfs.dir
);
this._client = new FSClient({ dir: this.config.nfs.dir });
}
@@ -49,7 +60,10 @@ export class NFSClientAdapter implements NFSClient {
}
}
async appendBytes(bytes: Uint8Array, options: UploadOptions): Promise<AppendResult> {
async appendBytes(
bytes: Uint8Array,
options: UploadOptions
): Promise<AppendResult> {
if (this._client.appendBytes) {
return await this._client.appendBytes(bytes, options);
}
@@ -68,14 +82,20 @@ export class NFSClientAdapter implements NFSClient {
return await this._client.remove(key);
}
async upload(filePath: string, options: UploadOptions): Promise<UploadResult> {
async upload(
filePath: string,
options: UploadOptions
): Promise<UploadResult> {
if (this.config.nfs.removeBeforeUpload) {
await this.remove(options.key);
}
return await this._client.upload(filePath, options);
}
async uploadBytes(bytes: Uint8Array, options: UploadOptions): Promise<UploadResult> {
async uploadBytes(
bytes: Uint8Array,
options: UploadOptions
): Promise<UploadResult> {
if (this.config.nfs.removeBeforeUpload) {
await this.remove(options.key);
}
@@ -85,7 +105,11 @@ export class NFSClientAdapter implements NFSClient {
return await this._client.uploadBuffer(bytes, options);
}
async download(key: string, filePath: string, options: DownloadOptions): Promise<void> {
async download(
key: string,
filePath: string,
options: DownloadOptions
): Promise<void> {
return await this._client.download(key, filePath, options);
}
}

View File

@@ -1,11 +1,7 @@
import {
AccessLevel,
Inject,
SingletonProto,
} from '@eggjs/tegg';
import { Redis } from 'ioredis';
import { AccessLevel, Inject, SingletonProto } from '@eggjs/tegg';
import type { Redis } from 'ioredis';
import { QueueAdapter } from '../common/typing.js';
import type { QueueAdapter } from '../common/typing.js';
/**
* Use sort set to keep queue in order and keep same value only insert once
@@ -32,12 +28,16 @@ export class RedisQueueAdapter implements QueueAdapter {
*/
async push<T>(key: string, item: T): Promise<boolean> {
const score = await this.redis.incr(this.getQueueScoreName(key));
const res = await this.redis.zadd(this.getQueueName(key), score, JSON.stringify(item));
const res = await this.redis.zadd(
this.getQueueName(key),
score,
JSON.stringify(item)
);
return res !== 0;
}
async pop<T>(key: string) {
const [ json ] = await this.redis.zpopmin(this.getQueueName(key));
const [json] = await this.redis.zpopmin(this.getQueueName(key));
if (!json) return null;
return JSON.parse(json) as T;
}

View File

@@ -1,12 +1,11 @@
import {
AccessLevel,
Inject,
SingletonProto,
} from '@eggjs/tegg';
import { EggAppConfig } from 'egg';
import { Client as ElasticsearchClient, estypes } from '@elastic/elasticsearch';
import { AccessLevel, Inject, SingletonProto } from '@eggjs/tegg';
import type { EggAppConfig } from 'egg';
import type {
Client as ElasticsearchClient,
estypes,
} from '@elastic/elasticsearch';
import { SearchAdapter } from '../common/typing.js';
import type { SearchAdapter } from '../common/typing.js';
/**
* Use elasticsearch to search the huge npm packages.
@@ -23,7 +22,9 @@ export class ESSearchAdapter implements SearchAdapter {
private readonly elasticsearch: ElasticsearchClient; // 由 elasticsearch 插件引入
async search<T>(query: any): Promise<estypes.SearchHitsMetadata<T>> {
const { cnpmcore: { elasticsearchIndex: index } } = this.config;
const {
cnpmcore: { elasticsearchIndex: index },
} = this.config;
const result = await this.elasticsearch.search<T>({
index,
...query,
@@ -32,7 +33,9 @@ export class ESSearchAdapter implements SearchAdapter {
}
async upsert<T>(id: string, document: T): Promise<string> {
const { cnpmcore: { elasticsearchIndex: index } } = this.config;
const {
cnpmcore: { elasticsearchIndex: index },
} = this.config;
const res = await this.elasticsearch.index({
id,
index,
@@ -42,7 +45,9 @@ export class ESSearchAdapter implements SearchAdapter {
}
async delete(id: string): Promise<string> {
const { cnpmcore: { elasticsearchIndex: index } } = this.config;
const {
cnpmcore: { elasticsearchIndex: index },
} = this.config;
const res = await this.elasticsearch.delete({
index,
id,

View File

@@ -1,19 +1,15 @@
import {
AccessLevel,
Inject,
EggContext,
ContextProto,
} from '@eggjs/tegg';
import { EggAppConfig, EggLogger } from 'egg';
import type { EggContext } from '@eggjs/tegg';
import { AccessLevel, Inject, ContextProto } from '@eggjs/tegg';
import type { EggAppConfig, EggLogger } from 'egg';
import { UnauthorizedError, ForbiddenError } from 'egg-errors';
import { PackageRepository } from '../repository/PackageRepository.js';
import { Package as PackageEntity } from '../core/entity/Package.js';
import { User as UserEntity } from '../core/entity/User.js';
import { Token as TokenEntity } from '../core/entity/Token.js';
import type { PackageRepository } from '../repository/PackageRepository.js';
import type { Package as PackageEntity } from '../core/entity/Package.js';
import type { User as UserEntity } from '../core/entity/User.js';
import type { Token as TokenEntity } from '../core/entity/Token.js';
import { getScopeAndName } from '../common/PackageUtil.js';
import { RegistryManagerService } from '../core/service/RegistryManagerService.js';
import { TokenService } from '../core/service/TokenService.js';
import type { RegistryManagerService } from '../core/service/RegistryManagerService.js';
import type { TokenService } from '../core/service/TokenService.js';
// https://docs.npmjs.com/creating-and-viewing-access-tokens#creating-tokens-on-the-website
export type TokenRole = 'read' | 'publish' | 'setting';
@@ -44,7 +40,6 @@ export class UserRoleManager {
// 3. pkg scope is allowed to publish
// use AbstractController#ensurePublishAccess ensure pkg exists;
public async checkPublishAccess(ctx: EggContext, fullname: string) {
const user = await this.requiredAuthorizedUser(ctx, 'publish');
// 1. admin has all access
@@ -59,7 +54,7 @@ export class UserRoleManager {
await this.tokenService.checkGranularTokenAccess(token, fullname);
// 3. has published in current registry
const [ scope, name ] = getScopeAndName(fullname);
const [scope, name] = getScopeAndName(fullname);
const pkg = await this.packageRepository.findPackage(scope, name);
const selfRegistry = await this.registryManagerService.ensureSelfRegistry();
const inSelfRegistry = pkg?.registryId === selfRegistry.registryId;
@@ -108,7 +103,8 @@ export class UserRoleManager {
this.handleAuthorized = true;
const authorization = ctx.get<string>('authorization');
if (!authorization) return null;
const authorizedUserAndToken = await this.tokenService.getUserAndToken(authorization);
const authorizedUserAndToken =
await this.tokenService.getUserAndToken(authorization);
if (!authorizedUserAndToken) {
return null;
}
@@ -130,9 +126,14 @@ export class UserRoleManager {
}
const { user, token } = authorizedUserAndToken;
// only enable npm client and version check setting will go into this condition
if (this.config.cnpmcore.enableNpmClientAndVersionCheck && role === 'publish') {
if (
this.config.cnpmcore.enableNpmClientAndVersionCheck &&
role === 'publish'
) {
if (token.isReadonly) {
throw new ForbiddenError(`Read-only Token "${token.tokenMark}" can't publish`);
throw new ForbiddenError(
`Read-only Token "${token.tokenMark}" can't publish`
);
}
// only support npm >= 7.0.0 allow publish action
// user-agent: "npm/6.14.12 node/v10.24.1 darwin x64"
@@ -147,22 +148,29 @@ export class UserRoleManager {
}
if (role === 'setting') {
if (token.isReadonly) {
throw new ForbiddenError(`Read-only Token "${token.tokenMark}" can't setting`);
throw new ForbiddenError(
`Read-only Token "${token.tokenMark}" can't setting`
);
}
if (token.isAutomation) {
throw new ForbiddenError(`Automation Token "${token.tokenMark}" can't setting`);
throw new ForbiddenError(
`Automation Token "${token.tokenMark}" can't setting`
);
}
}
return user;
}
public async requiredPackageMaintainer(pkg: PackageEntity, user: UserEntity) {
const maintainers = await this.packageRepository.listPackageMaintainers(pkg.packageId);
const maintainers = await this.packageRepository.listPackageMaintainers(
pkg.packageId
);
const maintainer = maintainers.find(m => m.userId === user.userId);
if (!maintainer) {
const names = maintainers.map(m => m.name).join(', ');
throw new ForbiddenError(`"${user.name}" not authorized to modify ${pkg.fullname}, please contact maintainers: "${names}"`);
throw new ForbiddenError(
`"${user.name}" not authorized to modify ${pkg.fullname}, please contact maintainers: "${names}"`
);
}
}
@@ -173,10 +181,14 @@ export class UserRoleManager {
}
const allowScopes = user.scopes ?? cnpmcoreConfig.allowScopes;
if (!scope) {
throw new ForbiddenError(`Package scope required, legal scopes: "${allowScopes.join(', ')}"`);
throw new ForbiddenError(
`Package scope required, legal scopes: "${allowScopes.join(', ')}"`
);
}
if (!allowScopes.includes(scope)) {
throw new ForbiddenError(`Scope "${scope}" not match legal scopes: "${allowScopes.join(', ')}"`);
throw new ForbiddenError(
`Scope "${scope}" not match legal scopes: "${allowScopes.join(', ')}"`
);
}
}

View File

@@ -1,39 +1,43 @@
import { SyncDeleteMode, SyncMode, ChangesStreamMode } from '../common/constants.js';
import { DATABASE_TYPE } from '../../config/database.js';
import type {
SyncDeleteMode,
SyncMode,
ChangesStreamMode,
} from '../common/constants.js';
import type { DATABASE_TYPE } from '../../config/database.js';
export { cnpmcoreConfig } from '../../config/config.default.js';
export type CnpmcoreConfig = {
name: string,
name: string;
/**
* enable hook or not
*/
hookEnable: boolean,
hookEnable: boolean;
/**
* mac custom hooks count
*/
hooksLimit: number,
hooksLimit: number;
/**
* upstream registry url
*/
sourceRegistry: string,
sourceRegistry: string;
/**
* upstream registry is base on `cnpmcore` or not
* if your upstream is official npm registry, please turn it off
*/
sourceRegistryIsCNpm: boolean,
sourceRegistryIsCNpm: boolean;
/**
* sync upstream first
*/
syncUpstreamFirst: boolean,
syncUpstreamFirst: boolean;
/**
* sync upstream timeout, default is 3mins
*/
sourceRegistrySyncTimeout: number,
sourceRegistrySyncTimeout: number;
/**
* sync task high water size, default is 100
*/
taskQueueHighWaterSize: number,
taskQueueHighWaterSize: number;
/**
* sync mode
* - none: don't sync npm package
@@ -41,111 +45,111 @@ export type CnpmcoreConfig = {
* - all: sync all npm packages
* - exist: only sync exist packages, effected when `enableCheckRecentlyUpdated` or `enableChangesStream` is enabled
*/
syncMode: SyncMode,
syncDeleteMode: SyncDeleteMode,
syncPackageWorkerMaxConcurrentTasks: number,
triggerHookWorkerMaxConcurrentTasks: number,
createTriggerHookWorkerMaxConcurrentTasks: number,
syncMode: SyncMode;
syncDeleteMode: SyncDeleteMode;
syncPackageWorkerMaxConcurrentTasks: number;
triggerHookWorkerMaxConcurrentTasks: number;
createTriggerHookWorkerMaxConcurrentTasks: number;
/**
* stop syncing these packages in future
*/
syncPackageBlockList: string[],
syncPackageBlockList: string[];
/**
* check recently from https://www.npmjs.com/browse/updated, if use set changesStreamRegistry to cnpmcore,
* maybe you should disable it
*/
enableCheckRecentlyUpdated: boolean,
enableCheckRecentlyUpdated: boolean;
/**
* mirror binary, default is false
*/
enableSyncBinary: boolean,
enableSyncBinary: boolean;
/**
* sync binary source api, default is `${sourceRegistry}/-/binary`
*/
syncBinaryFromAPISource: string,
syncBinaryFromAPISource: string;
/**
* enable sync downloads data from source registry https://github.com/cnpm/cnpmcore/issues/108
* all three parameters must be configured at the same time to take effect
*/
enableSyncDownloadData: boolean,
syncDownloadDataSourceRegistry: string,
enableSyncDownloadData: boolean;
syncDownloadDataSourceRegistry: string;
/**
* should be YYYY-MM-DD format
*/
syncDownloadDataMaxDate: string,
syncDownloadDataMaxDate: string;
/**
* @see https://github.com/npm/registry-follower-tutorial
*/
enableChangesStream: boolean,
checkChangesStreamInterval: number,
changesStreamRegistry: string,
enableChangesStream: boolean;
checkChangesStreamInterval: number;
changesStreamRegistry: string;
/**
* handle _changes request mode, default is 'streaming', please set it to 'json' when on cnpmcore registry
*/
changesStreamRegistryMode: ChangesStreamMode,
changesStreamRegistryMode: ChangesStreamMode;
/**
* registry url
*/
registry: string,
registry: string;
/**
* https://docs.npmjs.com/cli/v6/using-npm/config#always-auth npm <= 6
* if `alwaysAuth=true`, all api request required access token
*/
alwaysAuth: boolean,
alwaysAuth: boolean;
/**
* white scope list
*/
allowScopes: string[],
allowScopes: string[];
/**
* allow publish non-scope package, disable by default
*/
allowPublishNonScopePackage: boolean,
allowPublishNonScopePackage: boolean;
/**
* Public registration is allowed, otherwise only admins can login
*/
allowPublicRegistration: boolean,
allowPublicRegistration: boolean;
/**
* default system admins
*/
admins: Record<string, string>,
admins: Record<string, string>;
/**
* use webauthn for login, https://webauthn.guide/
* only support platform authenticators, browser support: https://webauthn.me/browser-support
*/
enableWebAuthn: boolean,
enableWebAuthn: boolean;
/**
* http response cache control header
*/
enableCDN: boolean,
enableCDN: boolean;
/**
* if you are using CDN, can override it
* it meaning cache 300s on CDN server and client side.
*/
cdnCacheControlHeader: string,
cdnCacheControlHeader: string;
/**
* if you are using CDN, can set it to 'Accept, Accept-Encoding'
*/
cdnVaryHeader: string,
cdnVaryHeader: string;
/**
* store full package version manifests data to database table(package_version_manifests), default is false
*/
enableStoreFullPackageVersionManifestsToDatabase: boolean,
enableStoreFullPackageVersionManifestsToDatabase: boolean;
/**
* only support npm as client and npm >= 7.0.0 allow publish action
*/
enableNpmClientAndVersionCheck: boolean,
enableNpmClientAndVersionCheck: boolean;
/**
* sync when package not found, only effect when syncMode = all/exist
*/
syncNotFound: boolean,
syncNotFound: boolean;
/**
* redirect to source registry when package not found
*/
redirectNotFound: boolean,
redirectNotFound: boolean;
/**
* enable unpkg features, https://github.com/cnpm/cnpmcore/issues/452
*/
enableUnpkg: boolean,
enableUnpkg: boolean;
/**
* enable sync unpkg files
*/
@@ -158,29 +162,29 @@ export type CnpmcoreConfig = {
* enable this would make sync specific version task not append latest version into this task automatically,it would mark the local latest stable version as latest tag.
* in most cases, you should set to false to keep the same behavior as source registry.
*/
strictSyncSpecivicVersion: boolean,
strictSyncSpecivicVersion: boolean;
/**
* enable elasticsearch
*/
enableElasticsearch: boolean,
* enable elasticsearch
*/
enableElasticsearch: boolean;
/**
* elasticsearch index. if enableElasticsearch is true, you must set a index to write es doc.
*/
elasticsearchIndex: string,
* elasticsearch index. if enableElasticsearch is true, you must set a index to write es doc.
*/
elasticsearchIndex: string;
/**
* strictly enforces/validates manifest and tgz when publish, https://github.com/cnpm/cnpmcore/issues/542
*/
strictValidateTarballPkg?: boolean,
strictValidateTarballPkg?: boolean;
/**
* strictly enforces/validates dependencies version when publish or sync
*/
strictValidatePackageDeps?: boolean,
strictValidatePackageDeps?: boolean;
/**
* database config
*/
database: {
type: DATABASE_TYPE | string,
},
type: DATABASE_TYPE | string;
};
};

View File

@@ -1,27 +1,17 @@
import {
NotFoundError,
UnavailableForLegalReasonsError,
} from 'egg-errors';
import {
Inject,
EggContext,
} from '@eggjs/tegg';
import {
EggLogger,
EggAppConfig,
} from 'egg';
import { NotFoundError, UnavailableForLegalReasonsError } from 'egg-errors';
import type { EggContext } from '@eggjs/tegg';
import { Inject } from '@eggjs/tegg';
import type { EggLogger, EggAppConfig } from 'egg';
import { MiddlewareController } from '../middleware/index.js';
import { UserRoleManager } from '../UserRoleManager.js';
import { PackageRepository } from '../../repository/PackageRepository.js';
import { UserRepository } from '../../repository/UserRepository.js';
import type { UserRoleManager } from '../UserRoleManager.js';
import type { PackageRepository } from '../../repository/PackageRepository.js';
import type { UserRepository } from '../../repository/UserRepository.js';
import { getFullname, getScopeAndName } from '../../common/PackageUtil.js';
import { Package as PackageEntity } from '../../core/entity/Package.js';
import { PackageVersion as PackageVersionEntity } from '../../core/entity/PackageVersion.js';
import { UserService } from '../../core/service/UserService.js';
import {
VersionRule,
} from '../typebox.js';
import type { Package as PackageEntity } from '../../core/entity/Package.js';
import type { PackageVersion as PackageVersionEntity } from '../../core/entity/PackageVersion.js';
import type { UserService } from '../../core/service/UserService.js';
import { VersionRule } from '../typebox.js';
import { SyncMode } from '../../common/constants.js';
class PackageNotFoundError extends NotFoundError {
@@ -62,11 +52,15 @@ export abstract class AbstractController extends MiddlewareController {
return scope && this.config.cnpmcore.allowScopes.includes(scope);
}
protected async ensurePublishAccess(ctx: EggContext, fullname: string, checkPkgExist = true) {
protected async ensurePublishAccess(
ctx: EggContext,
fullname: string,
checkPkgExist = true
) {
const user = await this.userRoleManager.checkPublishAccess(ctx, fullname);
let pkg: PackageEntity | null = null;
if (checkPkgExist) {
const [ scope, name ] = getScopeAndName(fullname);
const [scope, name] = getScopeAndName(fullname);
pkg = await this.packageRepository.findPackage(scope, name);
if (!pkg) {
throw this.createPackageNotFoundError(fullname, undefined);
@@ -109,19 +103,28 @@ export abstract class AbstractController extends MiddlewareController {
}
protected createPackageNotFoundError(fullname: string, version?: string) {
const message = version ? `${fullname}@${version} not found` : `${fullname} not found`;
const message = version
? `${fullname}@${version} not found`
: `${fullname} not found`;
const err = new PackageNotFoundError(message);
return err;
}
protected createPackageNotFoundErrorWithRedirect(fullname: string, version?: string, allowSync = false) {
protected createPackageNotFoundErrorWithRedirect(
fullname: string,
version?: string,
allowSync = false
) {
// const err = new PackageNotFoundError(message);
const err = this.createPackageNotFoundError(fullname, version);
const [ scope ] = getScopeAndName(fullname);
const [scope] = getScopeAndName(fullname);
// dont sync private scope
if (!this.isPrivateScope(scope)) {
// syncMode = none/admin, redirect public package to source registry
if (!this.enableSync && this.config.cnpmcore.syncMode !== SyncMode.admin) {
if (
!this.enableSync &&
this.config.cnpmcore.syncMode !== SyncMode.admin
) {
if (this.redirectNotFound) {
err.redirectToSourceRegistry = this.sourceRegistry;
}
@@ -142,35 +145,67 @@ export abstract class AbstractController extends MiddlewareController {
return err;
}
protected createPackageBlockError(reason: string, fullname: string, version?: string) {
const message = version ? `${fullname}@${version} was blocked` : `${fullname} was blocked`;
protected createPackageBlockError(
reason: string,
fullname: string,
version?: string
) {
const message = version
? `${fullname}@${version} was blocked`
: `${fullname} was blocked`;
return new UnavailableForLegalReasonsError(`${message}, reason: ${reason}`);
}
protected async getPackageEntityByFullname(fullname: string, allowSync?: boolean): Promise<PackageEntity> {
const [ scope, name ] = getScopeAndName(fullname);
protected async getPackageEntityByFullname(
fullname: string,
allowSync?: boolean
): Promise<PackageEntity> {
const [scope, name] = getScopeAndName(fullname);
return await this.getPackageEntity(scope, name, allowSync);
}
// try to get package entity, throw NotFoundError when package not exists
protected async getPackageEntity(scope: string, name: string, allowSync?:boolean): Promise<PackageEntity> {
protected async getPackageEntity(
scope: string,
name: string,
allowSync?: boolean
): Promise<PackageEntity> {
const packageEntity = await this.packageRepository.findPackage(scope, name);
if (!packageEntity) {
const fullname = getFullname(scope, name);
throw this.createPackageNotFoundErrorWithRedirect(fullname, undefined, allowSync);
throw this.createPackageNotFoundErrorWithRedirect(
fullname,
undefined,
allowSync
);
}
return packageEntity;
}
protected async getPackageVersionEntity(pkg: PackageEntity, version: string, allowSync?: boolean): Promise<PackageVersionEntity> {
const packageVersion = await this.packageRepository.findPackageVersion(pkg.packageId, version);
protected async getPackageVersionEntity(
pkg: PackageEntity,
version: string,
allowSync?: boolean
): Promise<PackageVersionEntity> {
const packageVersion = await this.packageRepository.findPackageVersion(
pkg.packageId,
version
);
if (!packageVersion) {
throw this.createPackageNotFoundErrorWithRedirect(pkg.fullname, version, allowSync);
throw this.createPackageNotFoundErrorWithRedirect(
pkg.fullname,
version,
allowSync
);
}
return packageVersion;
}
protected getAndCheckVersionFromFilename(ctx: EggContext, fullname: string, filenameWithVersion: string) {
protected getAndCheckVersionFromFilename(
ctx: EggContext,
fullname: string,
filenameWithVersion: string
) {
const scopeAndName = getScopeAndName(fullname);
const name = scopeAndName[1];
// @foo/bar/-/bar-1.0.0 == filename: bar ==> 1.0.0

View File

@@ -1,19 +1,20 @@
import type { EggContext } from '@eggjs/tegg';
import {
HTTPController,
HTTPMethod,
HTTPMethodEnum,
HTTPParam,
Context,
EggContext,
Inject,
} from '@eggjs/tegg';
import path from 'node:path';
import { NotFoundError } from 'egg-errors';
import { AbstractController } from './AbstractController.js';
import { BinarySyncerService } from '../../core/service/BinarySyncerService.js';
import { Binary } from '../../core/entity/Binary.js';
import binaries, { BinaryName } from '../../../config/binaries.js';
import type { BinarySyncerService } from '../../core/service/BinarySyncerService.js';
import type { Binary } from '../../core/entity/Binary.js';
import type { BinaryName } from '../../../config/binaries.js';
import binaries from '../../../config/binaries.js';
import { BinaryNameRule, BinarySubpathRule } from '../typebox.js';
@HTTPController()
@@ -35,13 +36,15 @@ export class BinarySyncController extends AbstractController {
method: HTTPMethodEnum.GET,
})
async listBinaries() {
return Object.entries(binaries).map(([ binaryName, binaryConfig ]) => {
return Object.entries(binaries).map(([binaryName, binaryConfig]) => {
return {
name: `${binaryName}/`,
category: `${binaryConfig.category}/`,
description: binaryConfig.description,
distUrl: binaryConfig.distUrl,
repoUrl: /^https?:\/\//.test(binaryConfig.repo) ? binaryConfig.repo : `https://github.com/${binaryConfig.repo}`,
repoUrl: /^https?:\/\//.test(binaryConfig.repo)
? binaryConfig.repo
: `https://github.com/${binaryConfig.repo}`,
type: 'dir',
url: `${this.config.cnpmcore.registry}/-/binary/${binaryConfig.category}/`,
};
@@ -52,7 +55,11 @@ export class BinarySyncController extends AbstractController {
path: '/-/binary/:binaryName(@[^/]{1,220}/[^/]{1,220}|[^@/]{1,220})/:subpath(.*)',
method: HTTPMethodEnum.GET,
})
async showBinary(@Context() ctx: EggContext, @HTTPParam() binaryName: BinaryName, @HTTPParam() subpath: string) {
async showBinary(
@Context() ctx: EggContext,
@HTTPParam() binaryName: BinaryName,
@HTTPParam() subpath: string
) {
// check binaryName valid
try {
ctx.tValidate(BinaryNameRule, binaryName);
@@ -74,14 +81,22 @@ export class BinarySyncController extends AbstractController {
const parent = parsed.dir === '/' ? '/' : `${parsed.dir}/`;
const name = subpath.endsWith('/') ? `${parsed.base}/` : parsed.base;
// 首先查询 binary === category 的情况
let binary = await this.binarySyncerService.findBinary(binaryName, parent, name);
let binary = await this.binarySyncerService.findBinary(
binaryName,
parent,
name
);
if (!binary) {
// 查询不到再去查询 mergeCategory 的情况
const category = binaries?.[binaryName]?.category;
if (category) {
// canvas/v2.6.1/canvas-v2.6.1-node-v57-linux-glibc-x64.tar.gz
// -> node-canvas-prebuilt/v2.6.1/node-canvas-prebuilt-v2.6.1-node-v57-linux-glibc-x64.tar.gz
binary = await this.binarySyncerService.findBinary(category, parent, name.replace(new RegExp(`^${binaryName}-`), `${category}-`));
binary = await this.binarySyncerService.findBinary(
category,
parent,
name.replace(new RegExp(`^${binaryName}-`), `${category}-`)
);
}
}
@@ -110,7 +125,10 @@ export class BinarySyncController extends AbstractController {
path: '/-/binary/:binaryName(@[^/]{1,220}/[^/]{1,220}|[^@/]{1,220})',
method: HTTPMethodEnum.GET,
})
async showBinaryIndex(@Context() ctx: EggContext, @HTTPParam() binaryName: BinaryName) {
async showBinaryIndex(
@Context() ctx: EggContext,
@HTTPParam() binaryName: BinaryName
) {
// check binaryName valid
try {
ctx.tValidate(BinaryNameRule, binaryName);

View File

@@ -1,3 +1,4 @@
import type { EggContext } from '@eggjs/tegg';
import {
HTTPController,
HTTPMethod,
@@ -5,12 +6,11 @@ import {
HTTPQuery,
Inject,
Context,
EggContext,
} from '@eggjs/tegg';
import { Type } from 'egg-typebox-validate/typebox';
import { AbstractController } from './AbstractController.js';
import { ChangeRepository } from '../../repository/ChangeRepository.js';
import type { ChangeRepository } from '../../repository/ChangeRepository.js';
const ChangeRule = Type.Object({
since: Type.Integer({ minimum: 0 }),
@@ -37,10 +37,9 @@ export class ChangesStreamController extends AbstractController {
seq: change.id,
type: change.type,
id: change.targetName,
changes: [ change.data ],
changes: [change.data],
};
});
return { results };
}
}

View File

@@ -8,9 +8,12 @@ import {
import { UnprocessableEntityError, NotFoundError } from 'egg-errors';
import { AbstractController } from './AbstractController.js';
import { FULLNAME_REG_STRING, getScopeAndName } from '../../common/PackageUtil.js';
import {
FULLNAME_REG_STRING,
getScopeAndName,
} from '../../common/PackageUtil.js';
import dayjs from '../../common/dayjs.js';
import { PackageVersionDownloadRepository } from '../../repository/PackageVersionDownloadRepository.js';
import type { PackageVersionDownloadRepository } from '../../repository/PackageVersionDownloadRepository.js';
const DATE_FORMAT = 'YYYY-MM-DD';
@@ -23,17 +26,25 @@ export class DownloadController extends AbstractController {
path: `/downloads/range/:range/:fullname(${FULLNAME_REG_STRING})`,
method: HTTPMethodEnum.GET,
})
async showPackageDownloads(@HTTPParam() fullname: string, @HTTPParam() range: string) {
const [ startDate, endDate ] = this.checkAndGetRange(range);
const [ scope, name ] = getScopeAndName(fullname);
async showPackageDownloads(
@HTTPParam() fullname: string,
@HTTPParam() range: string
) {
const [startDate, endDate] = this.checkAndGetRange(range);
const [scope, name] = getScopeAndName(fullname);
const pkg = await this.packageRepository.findPackage(scope, name);
if (!pkg) throw new NotFoundError(`${fullname} not found`);
const entities = await this.packageVersionDownloadRepository.query(pkg.packageId, startDate.toDate(), endDate.toDate());
const entities = await this.packageVersionDownloadRepository.query(
pkg.packageId,
startDate.toDate(),
endDate.toDate()
);
const days: Record<string, number> = {};
const versions: Record<string, { day: string, downloads: number }[]> = {};
const versions: Record<string, { day: string; downloads: number }[]> = {};
for (const entity of entities) {
const yearMonth = String(entity.yearMonth);
const prefix = yearMonth.substring(0, 4) + '-' + yearMonth.substring(4, 6);
const prefix =
yearMonth.substring(0, 4) + '-' + yearMonth.substring(4, 6);
for (let i = 1; i <= 31; i++) {
const day = String(i).padStart(2, '0');
const field = `d${day}` as keyof typeof entity;
@@ -64,13 +75,21 @@ export class DownloadController extends AbstractController {
path: '/downloads/:scope/:range',
method: HTTPMethodEnum.GET,
})
async showTotalDownloads(@HTTPParam() scope: string, @HTTPParam() range: string) {
const [ startDate, endDate ] = this.checkAndGetRange(range);
const entities = await this.packageVersionDownloadRepository.query(scope, startDate.toDate(), endDate.toDate());
async showTotalDownloads(
@HTTPParam() scope: string,
@HTTPParam() range: string
) {
const [startDate, endDate] = this.checkAndGetRange(range);
const entities = await this.packageVersionDownloadRepository.query(
scope,
startDate.toDate(),
endDate.toDate()
);
const days: Record<string, number> = {};
for (const entity of entities) {
const yearMonth = String(entity.yearMonth);
const prefix = yearMonth.substring(0, 4) + '-' + yearMonth.substring(4, 6);
const prefix =
yearMonth.substring(0, 4) + '-' + yearMonth.substring(4, 6);
for (let i = 1; i <= 31; i++) {
const day = String(i).padStart(2, '0');
const field = `d${day}` as keyof typeof entity;
@@ -93,14 +112,18 @@ export class DownloadController extends AbstractController {
private checkAndGetRange(range: string) {
const matchs = /^(\d{4}-\d{2}-\d{2}):(\d{4}-\d{2}-\d{2})$/.exec(range);
if (!matchs) {
throw new UnprocessableEntityError(`range(${range}) format invalid, must be "${DATE_FORMAT}:${DATE_FORMAT}" style`);
throw new UnprocessableEntityError(
`range(${range}) format invalid, must be "${DATE_FORMAT}:${DATE_FORMAT}" style`
);
}
const start = matchs[1];
const end = matchs[2];
let startDate = dayjs(start, DATE_FORMAT, true);
let endDate = dayjs(end, DATE_FORMAT, true);
if (!startDate.isValid() || !endDate.isValid()) {
throw new UnprocessableEntityError(`range(${range}) format invalid, must be "${DATE_FORMAT}:${DATE_FORMAT}" style`);
throw new UnprocessableEntityError(
`range(${range}) format invalid, must be "${DATE_FORMAT}:${DATE_FORMAT}" style`
);
}
if (endDate.isBefore(startDate)) {
const tmp = startDate;
@@ -111,8 +134,9 @@ export class DownloadController extends AbstractController {
const maxDate = startDate.add(1, 'year');
if (endDate.isAfter(maxDate)) {
throw new UnprocessableEntityError(
`range(${range}) beyond the processable range, max up to "${maxDate.format(DATE_FORMAT)}"`);
`range(${range}) beyond the processable range, max up to "${maxDate.format(DATE_FORMAT)}"`
);
}
return [ startDate, endDate ];
return [startDate, endDate];
}
}

View File

@@ -1,17 +1,21 @@
import { performance } from 'node:perf_hooks';
import type { EggContext } from '@eggjs/tegg';
import {
HTTPController,
HTTPMethod,
HTTPMethodEnum,
Context,
EggContext,
Inject,
} from '@eggjs/tegg';
import pkg from 'egg/package.json' with { type: 'json' };
import { AbstractController } from './AbstractController.js';
import { CacheService, DownloadInfo, UpstreamRegistryInfo } from '../../core/service/CacheService.js';
import { HomeService } from '../../core/service/HomeService.js';
import type {
CacheService,
DownloadInfo,
UpstreamRegistryInfo,
} from '../../core/service/CacheService.js';
import type { HomeService } from '../../core/service/HomeService.js';
const EggVersion = pkg.version;
const startTime = new Date();
@@ -24,9 +28,9 @@ const startTime = new Date();
type SiteTotalData = LegacyInfo & SiteEnvInfo & TotalInfo;
type LegacyInfo = {
source_registry: string,
changes_stream_registry: string,
sync_changes_steam: any,
source_registry: string;
changes_stream_registry: string;
sync_changes_steam: any;
};
type SiteEnvInfo = {
@@ -50,7 +54,6 @@ type TotalInfo = {
upstream_registries?: UpstreamRegistryInfo[];
};
@HTTPController()
export class HomeController extends AbstractController {
@Inject()
@@ -124,5 +127,4 @@ export class HomeController extends AbstractController {
async miscGet(@Context() ctx: EggContext) {
await this.homeService.misc(ctx.path);
}
}

View File

@@ -1,6 +1,6 @@
import type { EggContext } from '@eggjs/tegg';
import {
Context,
EggContext,
HTTPBody,
HTTPController,
HTTPMethod,
@@ -9,11 +9,11 @@ import {
Inject,
} from '@eggjs/tegg';
import { HookManageService } from '../../core/service/HookManageService.js';
import { TaskService } from '../../core/service/TaskService.js';
import { UserRoleManager } from '../UserRoleManager.js';
import { HookType } from '../../common/enum/Hook.js';
import { TriggerHookTask } from '../../core/entity/Task.js';
import type { HookManageService } from '../../core/service/HookManageService.js';
import type { TaskService } from '../../core/service/TaskService.js';
import type { UserRoleManager } from '../UserRoleManager.js';
import type { HookType } from '../../common/enum/Hook.js';
import type { TriggerHookTask } from '../../core/entity/Task.js';
import { HookConvertor } from './convertor/HookConvertor.js';
import { CreateHookRequestRule, UpdateHookRequestRule } from '../typebox.js';
@@ -46,9 +46,15 @@ export class HookController {
path: '/v1/hooks/hook',
method: HTTPMethodEnum.POST,
})
async createHook(@Context() ctx: EggContext, @HTTPBody() req: CreateHookRequest) {
async createHook(
@Context() ctx: EggContext,
@HTTPBody() req: CreateHookRequest
) {
ctx.tValidate(CreateHookRequestRule, req);
const user = await this.userRoleManager.requiredAuthorizedUser(ctx, 'setting');
const user = await this.userRoleManager.requiredAuthorizedUser(
ctx,
'setting'
);
const hook = await this.hookManageService.createHook({
ownerId: user.userId,
type: req.type as HookType,
@@ -63,9 +69,16 @@ export class HookController {
path: '/v1/hooks/hook/:id',
method: HTTPMethodEnum.PUT,
})
async updateHook(@Context() ctx: EggContext, @HTTPParam() id: string, @HTTPBody() req: UpdateHookRequest) {
async updateHook(
@Context() ctx: EggContext,
@HTTPParam() id: string,
@HTTPBody() req: UpdateHookRequest
) {
ctx.tValidate(UpdateHookRequestRule, req);
const user = await this.userRoleManager.requiredAuthorizedUser(ctx, 'setting');
const user = await this.userRoleManager.requiredAuthorizedUser(
ctx,
'setting'
);
const hook = await this.hookManageService.updateHook({
operatorId: user.userId,
hookId: id,
@@ -74,7 +87,9 @@ export class HookController {
});
let task: TriggerHookTask | null = null;
if (hook.latestTaskId) {
task = await this.taskService.findTask(hook.latestTaskId) as TriggerHookTask;
task = (await this.taskService.findTask(
hook.latestTaskId
)) as TriggerHookTask;
}
return HookConvertor.convertToHookVo(hook, user, task);
}
@@ -84,14 +99,19 @@ export class HookController {
method: HTTPMethodEnum.DELETE,
})
async deleteHook(@Context() ctx: EggContext, @HTTPParam() id: string) {
const user = await this.userRoleManager.requiredAuthorizedUser(ctx, 'setting');
const user = await this.userRoleManager.requiredAuthorizedUser(
ctx,
'setting'
);
const hook = await this.hookManageService.deleteHook({
operatorId: user.userId,
hookId: id,
});
let task: TriggerHookTask | null = null;
if (hook.latestTaskId) {
task = await this.taskService.findTask(hook.latestTaskId) as TriggerHookTask;
task = (await this.taskService.findTask(
hook.latestTaskId
)) as TriggerHookTask;
}
return HookConvertor.convertToDeleteHookVo(hook, user, task);
}
@@ -103,9 +123,13 @@ export class HookController {
async listHooks(@Context() ctx: EggContext) {
const user = await this.userRoleManager.requiredAuthorizedUser(ctx, 'read');
const hooks = await this.hookManageService.listHooksByOwnerId(user.userId);
const tasks = await this.taskService.findTasks(hooks.map(t => t.latestTaskId).filter((t): t is string => !!t));
const tasks = await this.taskService.findTasks(
hooks.map(t => t.latestTaskId).filter((t): t is string => !!t)
);
const res = hooks.map(hook => {
const task = tasks.find(t => t.taskId === hook.latestTaskId) as TriggerHookTask;
const task = tasks.find(
t => t.taskId === hook.latestTaskId
) as TriggerHookTask;
return HookConvertor.convertToHookVo(hook, user, task);
});
return {
@@ -122,7 +146,9 @@ export class HookController {
const hook = await this.hookManageService.getHookByOwnerId(id, user.userId);
let task: TriggerHookTask | null = null;
if (hook.latestTaskId) {
task = await this.taskService.findTask(hook.latestTaskId) as TriggerHookTask;
task = (await this.taskService.findTask(
hook.latestTaskId
)) as TriggerHookTask;
}
return HookConvertor.convertToHookVo(hook, user, task);
}

View File

@@ -1,3 +1,4 @@
import type { EggContext } from '@eggjs/tegg';
import {
HTTPController,
HTTPMethod,
@@ -5,7 +6,6 @@ import {
HTTPParam,
HTTPBody,
Context,
EggContext,
Inject,
Middleware,
} from '@eggjs/tegg';
@@ -13,9 +13,10 @@ import { ForbiddenError } from 'egg-errors';
import { AbstractController } from './AbstractController.js';
import { FULLNAME_REG_STRING } from '../../common/PackageUtil.js';
import { PackageManagerService } from '../../core/service/PackageManagerService.js';
import { PackageVersionBlockRepository } from '../../repository/PackageVersionBlockRepository.js';
import { BlockPackageRule, BlockPackageType } from '../typebox.js';
import type { PackageManagerService } from '../../core/service/PackageManagerService.js';
import type { PackageVersionBlockRepository } from '../../repository/PackageVersionBlockRepository.js';
import type { BlockPackageType } from '../typebox.js';
import { BlockPackageRule } from '../typebox.js';
import { AdminAccess } from '../middleware/AdminAccess.js';
@HTTPController()
@@ -31,19 +32,34 @@ export class PackageBlockController extends AbstractController {
method: HTTPMethodEnum.PUT,
})
@Middleware(AdminAccess)
async blockPackage(@Context() ctx: EggContext, @HTTPParam() fullname: string, @HTTPBody() data: BlockPackageType) {
async blockPackage(
@Context() ctx: EggContext,
@HTTPParam() fullname: string,
@HTTPBody() data: BlockPackageType
) {
const params = { fullname, reason: data.reason };
ctx.tValidate(BlockPackageRule, params);
const packageEntity = await this.getPackageEntityByFullname(params.fullname);
const packageEntity = await this.getPackageEntityByFullname(
params.fullname
);
if (packageEntity.isPrivate) {
throw new ForbiddenError(`Can't block private package "${params.fullname}"`);
throw new ForbiddenError(
`Can't block private package "${params.fullname}"`
);
}
const authorized = await this.userRoleManager.getAuthorizedUserAndToken(ctx);
const block = await this.packageManagerService.blockPackage(packageEntity,
`${params.reason} (operator: ${authorized?.user.name}/${authorized?.user.userId})`);
ctx.logger.info('[PackageBlockController.blockPackage:success] fullname: %s, packageId: %s, packageVersionBlockId: %s',
fullname, packageEntity.packageId, block.packageVersionBlockId);
const authorized =
await this.userRoleManager.getAuthorizedUserAndToken(ctx);
const block = await this.packageManagerService.blockPackage(
packageEntity,
`${params.reason} (operator: ${authorized?.user.name}/${authorized?.user.userId})`
);
ctx.logger.info(
'[PackageBlockController.blockPackage:success] fullname: %s, packageId: %s, packageVersionBlockId: %s',
fullname,
packageEntity.packageId,
block.packageVersionBlockId
);
ctx.status = 201;
return {
ok: true,
@@ -58,15 +74,21 @@ export class PackageBlockController extends AbstractController {
method: HTTPMethodEnum.DELETE,
})
@Middleware(AdminAccess)
async unblockPackage(@Context() ctx: EggContext, @HTTPParam() fullname: string) {
async unblockPackage(
@Context() ctx: EggContext,
@HTTPParam() fullname: string
) {
const packageEntity = await this.getPackageEntityByFullname(fullname);
if (packageEntity.isPrivate) {
throw new ForbiddenError(`Can't unblock private package "${fullname}"`);
}
await this.packageManagerService.unblockPackage(packageEntity);
ctx.logger.info('[PackageBlockController.unblockPackage:success] fullname: %s, packageId: %s',
fullname, packageEntity.packageId);
ctx.logger.info(
'[PackageBlockController.unblockPackage:success] fullname: %s, packageId: %s',
fullname,
packageEntity.packageId
);
return {
ok: true,
};
@@ -79,7 +101,10 @@ export class PackageBlockController extends AbstractController {
})
async listPackageBlocks(@HTTPParam() fullname: string) {
const packageEntity = await this.getPackageEntityByFullname(fullname);
const blocks = await this.packageVersionBlockRepository.listPackageVersionBlocks(packageEntity.packageId);
const blocks =
await this.packageVersionBlockRepository.listPackageVersionBlocks(
packageEntity.packageId
);
return {
data: blocks.map(block => {
return {

View File

@@ -1,3 +1,4 @@
import type { EggContext, BackgroundTaskHelper } from '@eggjs/tegg';
import {
HTTPController,
HTTPMethod,
@@ -5,20 +6,22 @@ import {
HTTPParam,
HTTPBody,
Context,
EggContext,
Inject,
HTTPQuery,
BackgroundTaskHelper,
} from '@eggjs/tegg';
import { ForbiddenError, NotFoundError } from 'egg-errors';
import { AbstractController } from './AbstractController.js';
import { FULLNAME_REG_STRING, getScopeAndName } from '../../common/PackageUtil.js';
import { Task } from '../../core/entity/Task.js';
import { PackageSyncerService } from '../../core/service/PackageSyncerService.js';
import { RegistryManagerService } from '../../core/service/RegistryManagerService.js';
import {
FULLNAME_REG_STRING,
getScopeAndName,
} from '../../common/PackageUtil.js';
import type { Task } from '../../core/entity/Task.js';
import type { PackageSyncerService } from '../../core/service/PackageSyncerService.js';
import type { RegistryManagerService } from '../../core/service/RegistryManagerService.js';
import { TaskState } from '../../common/enum/Task.js';
import { SyncPackageTaskRule, SyncPackageTaskType } from '../typebox.js';
import type { SyncPackageTaskType } from '../typebox.js';
import { SyncPackageTaskRule } from '../typebox.js';
import { SyncMode } from '../../common/constants.js';
@HTTPController()
@@ -34,9 +37,15 @@ export class PackageSyncController extends AbstractController {
private async _executeTaskAsync(task: Task) {
const startTime = Date.now();
this.logger.info('[PackageSyncController:executeTask:start] taskId: %s, targetName: %s, attempts: %s, params: %j, updatedAt: %s, delay %sms',
task.taskId, task.targetName, task.attempts, task.data, task.updatedAt,
startTime - task.updatedAt.getTime());
this.logger.info(
'[PackageSyncController:executeTask:start] taskId: %s, targetName: %s, attempts: %s, params: %j, updatedAt: %s, delay %sms',
task.taskId,
task.targetName,
task.attempts,
task.data,
task.updatedAt,
startTime - task.updatedAt.getTime()
);
let result = 'success';
try {
await this.packageSyncerService.executeTask(task);
@@ -45,8 +54,13 @@ export class PackageSyncController extends AbstractController {
this.logger.error(err);
} finally {
const use = Date.now() - startTime;
this.logger.info('[PackageSyncController:executeTask:%s] taskId: %s, targetName: %s, use %sms',
result, task.taskId, task.targetName, use);
this.logger.info(
'[PackageSyncController:executeTask:%s] taskId: %s, targetName: %s, use %sms',
result,
task.taskId,
task.targetName,
use
);
}
}
@@ -55,11 +69,17 @@ export class PackageSyncController extends AbstractController {
path: `/-/package/:fullname(${FULLNAME_REG_STRING})/syncs`,
method: HTTPMethodEnum.PUT,
})
async createSyncTask(@Context() ctx: EggContext, @HTTPParam() fullname: string, @HTTPBody() data: SyncPackageTaskType) {
async createSyncTask(
@Context() ctx: EggContext,
@HTTPParam() fullname: string,
@HTTPBody() data: SyncPackageTaskType
) {
if (!this.enableSync) {
throw new ForbiddenError('Not allow to sync package');
}
const tips = data.tips || `Sync cause by "${ctx.href}", parent traceId: ${ctx.tracer.traceId}`;
const tips =
data.tips ||
`Sync cause by "${ctx.href}", parent traceId: ${ctx.tracer.traceId}`;
const isAdmin = await this.userRoleManager.isAdmin(ctx);
if (this.config.cnpmcore.syncMode === SyncMode.admin && !isAdmin) {
@@ -77,23 +97,39 @@ export class PackageSyncController extends AbstractController {
specificVersions: data.specificVersions,
};
ctx.tValidate(SyncPackageTaskRule, params);
const [ scope, name ] = getScopeAndName(params.fullname);
const [scope, name] = getScopeAndName(params.fullname);
const packageEntity = await this.packageRepository.findPackage(scope, name);
const registry = await this.registryManagerService.findByRegistryName(data?.registryName);
const registry = await this.registryManagerService.findByRegistryName(
data?.registryName
);
if (!registry && data.registryName) {
throw new ForbiddenError(`Can't find target registry "${data.registryName}"`);
throw new ForbiddenError(
`Can't find target registry "${data.registryName}"`
);
}
if (packageEntity?.isPrivate && !registry) {
throw new ForbiddenError(`Can't sync private package "${params.fullname}"`);
throw new ForbiddenError(
`Can't sync private package "${params.fullname}"`
);
}
if (params.syncDownloadData && !this.packageSyncerService.allowSyncDownloadData) {
if (
params.syncDownloadData &&
!this.packageSyncerService.allowSyncDownloadData
) {
throw new ForbiddenError('Not allow to sync package download data');
}
if (registry && packageEntity?.registryId && packageEntity.registryId !== registry.registryId) {
throw new ForbiddenError(`The package is synced from ${packageEntity.registryId}`);
if (
registry &&
packageEntity?.registryId &&
packageEntity.registryId !== registry.registryId
) {
throw new ForbiddenError(
`The package is synced from ${packageEntity.registryId}`
);
}
const authorized = await this.userRoleManager.getAuthorizedUserAndToken(ctx);
const authorized =
await this.userRoleManager.getAuthorizedUserAndToken(ctx);
const task = await this.packageSyncerService.createTask(params.fullname, {
authorIp: ctx.ip,
authorId: authorized?.user.userId,
@@ -102,17 +138,23 @@ export class PackageSyncController extends AbstractController {
syncDownloadData: params.syncDownloadData,
forceSyncHistory: params.forceSyncHistory,
registryId: registry?.registryId,
specificVersions: params.specificVersions && JSON.parse(params.specificVersions),
specificVersions:
params.specificVersions && JSON.parse(params.specificVersions),
});
ctx.logger.info('[PackageSyncController.createSyncTask:success] taskId: %s, fullname: %s',
task.taskId, fullname);
ctx.logger.info(
'[PackageSyncController.createSyncTask:success] taskId: %s, fullname: %s',
task.taskId,
fullname
);
if (data.force) {
if (isAdmin) {
// set background task timeout to 5min
this.backgroundTaskHelper.timeout = 1000 * 60 * 5;
this.backgroundTaskHelper.run(async () => {
ctx.logger.info('[PackageSyncController.createSyncTask:execute-immediately] taskId: %s',
task.taskId);
ctx.logger.info(
'[PackageSyncController.createSyncTask:execute-immediately] taskId: %s',
task.taskId
);
// execute task in background
await this._executeTaskAsync(task);
});
@@ -133,9 +175,15 @@ export class PackageSyncController extends AbstractController {
path: `/-/package/:fullname(${FULLNAME_REG_STRING})/syncs/:taskId`,
method: HTTPMethodEnum.GET,
})
async showSyncTask(@HTTPParam() fullname: string, @HTTPParam() taskId: string) {
async showSyncTask(
@HTTPParam() fullname: string,
@HTTPParam() taskId: string
) {
const task = await this.packageSyncerService.findTask(taskId);
if (!task) throw new NotFoundError(`Package "${fullname}" sync task "${taskId}" not found`);
if (!task)
throw new NotFoundError(
`Package "${fullname}" sync task "${taskId}" not found`
);
let logUrl: string | undefined;
if (task.state !== TaskState.Waiting) {
logUrl = `${this.config.cnpmcore.registry}/-/package/${fullname}/syncs/${taskId}/log`;
@@ -157,13 +205,26 @@ export class PackageSyncController extends AbstractController {
path: `/-/package/:fullname(${FULLNAME_REG_STRING})/syncs/:taskId/log`,
method: HTTPMethodEnum.GET,
})
async showSyncTaskLog(@Context() ctx: EggContext, @HTTPParam() fullname: string, @HTTPParam() taskId: string) {
async showSyncTaskLog(
@Context() ctx: EggContext,
@HTTPParam() fullname: string,
@HTTPParam() taskId: string
) {
const task = await this.packageSyncerService.findTask(taskId);
if (!task) throw new NotFoundError(`Package "${fullname}" sync task "${taskId}" not found`);
if (task.state === TaskState.Waiting) throw new NotFoundError(`Package "${fullname}" sync task "${taskId}" log not found`);
if (!task)
throw new NotFoundError(
`Package "${fullname}" sync task "${taskId}" not found`
);
if (task.state === TaskState.Waiting)
throw new NotFoundError(
`Package "${fullname}" sync task "${taskId}" log not found`
);
const logUrlOrStream = await this.packageSyncerService.findTaskLog(task);
if (!logUrlOrStream) throw new NotFoundError(`Package "${fullname}" sync task "${taskId}" log not found`);
if (!logUrlOrStream)
throw new NotFoundError(
`Package "${fullname}" sync task "${taskId}" log not found`
);
if (typeof logUrlOrStream === 'string') {
ctx.redirect(logUrlOrStream);
return;
@@ -179,7 +240,11 @@ export class PackageSyncController extends AbstractController {
path: `/:fullname(${FULLNAME_REG_STRING})/sync`,
method: HTTPMethodEnum.PUT,
})
async deprecatedCreateSyncTask(@Context() ctx: EggContext, @HTTPParam() fullname: string, @HTTPQuery() nodeps: string) {
async deprecatedCreateSyncTask(
@Context() ctx: EggContext,
@HTTPParam() fullname: string,
@HTTPQuery() nodeps: string
) {
const options: SyncPackageTaskType = {
fullname,
tips: `Sync cause by "${ctx.href}", parent traceId: ${ctx.tracer.traceId}`,
@@ -201,9 +266,13 @@ export class PackageSyncController extends AbstractController {
path: `/:fullname(${FULLNAME_REG_STRING})/sync/log/:taskId`,
method: HTTPMethodEnum.GET,
})
async deprecatedShowSyncTask(@HTTPParam() fullname: string, @HTTPParam() taskId: string) {
async deprecatedShowSyncTask(
@HTTPParam() fullname: string,
@HTTPParam() taskId: string
) {
const task = await this.showSyncTask(fullname, taskId);
const syncDone = task.state !== TaskState.Waiting && task.state !== TaskState.Processing;
const syncDone =
task.state !== TaskState.Waiting && task.state !== TaskState.Processing;
const stateMessage = syncDone ? '[done]' : '[processing]';
// https://github.com/cnpm/cnpm/blob/cadd3cd54c22b1a157810a43ab10febdb2410ca6/bin/cnpm-sync#L82
const log = `[${new Date().toISOString()}] ${stateMessage} Sync ${fullname} data: ${JSON.stringify(task)}`;

View File

@@ -1,3 +1,4 @@
import type { EggContext } from '@eggjs/tegg';
import {
HTTPController,
HTTPMethod,
@@ -5,14 +6,13 @@ import {
HTTPParam,
HTTPBody,
Context,
EggContext,
Inject,
} from '@eggjs/tegg';
import { ForbiddenError } from 'egg-errors';
import { AbstractController } from './AbstractController.js';
import { FULLNAME_REG_STRING } from '../../common/PackageUtil.js';
import { PackageManagerService } from '../../core/service/PackageManagerService.js';
import type { PackageManagerService } from '../../core/service/PackageManagerService.js';
import { TagRule, TagWithVersionRule } from '../typebox.js';
@HTTPController()
@@ -29,7 +29,9 @@ export class PackageTagController extends AbstractController {
})
async showTags(@HTTPParam() fullname: string) {
const packageEntity = await this.getPackageEntityByFullname(fullname);
const tagEntities = await this.packageRepository.listPackageTags(packageEntity.packageId);
const tagEntities = await this.packageRepository.listPackageTags(
packageEntity.packageId
);
const tags: Record<string, string> = {};
for (const entity of tagEntities) {
tags[entity.tag] = entity.version;
@@ -44,13 +46,25 @@ export class PackageTagController extends AbstractController {
path: `/-/package/:fullname(${FULLNAME_REG_STRING})/dist-tags/:tag`,
method: HTTPMethodEnum.PUT,
})
async saveTag(@Context() ctx: EggContext, @HTTPParam() fullname: string, @HTTPParam() tag: string, @HTTPBody() version: string) {
async saveTag(
@Context() ctx: EggContext,
@HTTPParam() fullname: string,
@HTTPParam() tag: string,
@HTTPBody() version: string
) {
const data = { tag, version };
ctx.tValidate(TagWithVersionRule, data);
const ensureRes = await this.ensurePublishAccess(ctx, fullname, true);
const pkg = ensureRes.pkg!;
const packageVersion = await this.getPackageVersionEntity(pkg, data.version);
await this.packageManagerService.savePackageTag(pkg, data.tag, packageVersion.version);
const packageVersion = await this.getPackageVersionEntity(
pkg,
data.version
);
await this.packageManagerService.savePackageTag(
pkg,
data.tag,
packageVersion.version
);
return { ok: true };
}
@@ -60,7 +74,11 @@ export class PackageTagController extends AbstractController {
path: `/-/package/:fullname(${FULLNAME_REG_STRING})/dist-tags/:tag`,
method: HTTPMethodEnum.DELETE,
})
async removeTag(@Context() ctx: EggContext, @HTTPParam() fullname: string, @HTTPParam() tag: string) {
async removeTag(
@Context() ctx: EggContext,
@HTTPParam() fullname: string,
@HTTPParam() tag: string
) {
const data = { tag };
ctx.tValidate(TagRule, data);
if (tag === 'latest') {

View File

@@ -1,4 +1,5 @@
import { join } from 'node:path';
import type { EggContext } from '@eggjs/tegg';
import {
HTTPController,
HTTPMethod,
@@ -7,34 +8,36 @@ import {
HTTPQuery,
Inject,
Context,
EggContext,
Middleware,
} from '@eggjs/tegg';
import { NotFoundError } from 'egg-errors';
import { AbstractController } from './AbstractController.js';
import { AdminAccess } from '../middleware/AdminAccess.js';
import { getScopeAndName, FULLNAME_REG_STRING } from '../../common/PackageUtil.js';
import { PackageVersionFileService } from '../../core/service/PackageVersionFileService.js';
import { PackageManagerService } from '../../core/service/PackageManagerService.js';
import { PackageVersionFile } from '../../core/entity/PackageVersionFile.js';
import { PackageVersion } from '../../core/entity/PackageVersion.js';
import { DistRepository } from '../../repository/DistRepository.js';
import {
getScopeAndName,
FULLNAME_REG_STRING,
} from '../../common/PackageUtil.js';
import type { PackageVersionFileService } from '../../core/service/PackageVersionFileService.js';
import type { PackageManagerService } from '../../core/service/PackageManagerService.js';
import type { PackageVersionFile } from '../../core/entity/PackageVersionFile.js';
import type { PackageVersion } from '../../core/entity/PackageVersion.js';
import type { DistRepository } from '../../repository/DistRepository.js';
import { Spec } from '../typebox.js';
type FileItem = {
path: string,
type: 'file',
contentType: string,
path: string;
type: 'file';
contentType: string;
integrity: string;
lastModified: Date,
size: number,
lastModified: Date;
size: number;
};
type DirectoryItem = {
path: string,
type: 'directory',
files: (DirectoryItem | FileItem)[],
path: string;
type: 'directory';
files: (DirectoryItem | FileItem)[];
};
function formatFileItem(file: PackageVersionFile): FileItem {
@@ -72,16 +75,27 @@ export class PackageVersionFileController extends AbstractController {
method: HTTPMethodEnum.PUT,
})
@Middleware(AdminAccess)
async sync(@Context() ctx: EggContext, @HTTPParam() fullname: string, @HTTPParam() versionSpec: string) {
async sync(
@Context() ctx: EggContext,
@HTTPParam() fullname: string,
@HTTPParam() versionSpec: string
) {
ctx.tValidate(Spec, `${fullname}@${versionSpec}`);
this.#requireUnpkgEnable();
const [ scope, name ] = getScopeAndName(fullname);
const { packageVersion } = await this.packageManagerService.showPackageVersionByVersionOrTag(
scope, name, versionSpec);
const [scope, name] = getScopeAndName(fullname);
const { packageVersion } =
await this.packageManagerService.showPackageVersionByVersionOrTag(
scope,
name,
versionSpec
);
if (!packageVersion) {
throw new NotFoundError(`${fullname}@${versionSpec} not found`);
}
const files = await this.packageVersionFileService.syncPackageVersionFiles(packageVersion);
const files =
await this.packageVersionFileService.syncPackageVersionFiles(
packageVersion
);
return files.map(file => formatFileItem(file));
}
@@ -92,15 +106,23 @@ export class PackageVersionFileController extends AbstractController {
path: `/:fullname(${FULLNAME_REG_STRING})/:versionSpec/files`,
method: HTTPMethodEnum.GET,
})
async listFiles(@Context() ctx: EggContext,
@HTTPParam() fullname: string,
@HTTPParam() versionSpec: string,
@HTTPQuery() meta: string) {
async listFiles(
@Context() ctx: EggContext,
@HTTPParam() fullname: string,
@HTTPParam() versionSpec: string,
@HTTPQuery() meta: string
) {
this.#requireUnpkgEnable();
ctx.tValidate(Spec, `${fullname}@${versionSpec}`);
ctx.vary(this.config.cnpmcore.cdnVaryHeader);
const [ scope, name ] = getScopeAndName(fullname);
const packageVersion = await this.#getPackageVersion(ctx, fullname, scope, name, versionSpec);
const [scope, name] = getScopeAndName(fullname);
const packageVersion = await this.#getPackageVersion(
ctx,
fullname,
scope,
name,
versionSpec
);
ctx.set('cache-control', META_CACHE_CONTROL);
const hasMeta = typeof meta === 'string' || ctx.path.endsWith('/files/');
// meta request
@@ -111,7 +133,14 @@ export class PackageVersionFileController extends AbstractController {
}
return files;
}
const { manifest } = await this.packageManagerService.showPackageVersionManifest(scope, name, versionSpec, false, true);
const { manifest } =
await this.packageManagerService.showPackageVersionManifest(
scope,
name,
versionSpec,
false,
true
);
// GET /foo/1.0.0/files => /foo/1.0.0/files/{main}
// ignore empty entry exp: @types/node@20.2.5/
const indexFile = manifest?.main || 'index.js';
@@ -124,40 +153,62 @@ export class PackageVersionFileController extends AbstractController {
path: `/:fullname(${FULLNAME_REG_STRING})/:versionSpec/files/:path(.+)`,
method: HTTPMethodEnum.GET,
})
async raw(@Context() ctx: EggContext,
@HTTPParam() fullname: string,
@HTTPParam() versionSpec: string,
@HTTPParam() path: string,
@HTTPQuery() meta: string) {
async raw(
@Context() ctx: EggContext,
@HTTPParam() fullname: string,
@HTTPParam() versionSpec: string,
@HTTPParam() path: string,
@HTTPQuery() meta: string
) {
this.#requireUnpkgEnable();
ctx.tValidate(Spec, `${fullname}@${versionSpec}`);
ctx.vary(this.config.cnpmcore.cdnVaryHeader);
const [ scope, name ] = getScopeAndName(fullname);
const [scope, name] = getScopeAndName(fullname);
path = `/${path}`;
const packageVersion = await this.#getPackageVersion(ctx, fullname, scope, name, versionSpec);
const packageVersion = await this.#getPackageVersion(
ctx,
fullname,
scope,
name,
versionSpec
);
if (path.endsWith('/')) {
const directory = path.substring(0, path.length - 1);
const files = await this.#listFilesByDirectory(packageVersion, directory);
if (!files) {
throw new NotFoundError(`${fullname}@${versionSpec}/files${directory} not found`);
throw new NotFoundError(
`${fullname}@${versionSpec}/files${directory} not found`
);
}
ctx.set('cache-control', META_CACHE_CONTROL);
return files;
}
await this.packageVersionFileService.checkPackageVersionInUnpkgWhiteList(scope, name, packageVersion.version);
const file = await this.packageVersionFileService.showPackageVersionFile(packageVersion, path);
await this.packageVersionFileService.checkPackageVersionInUnpkgWhiteList(
scope,
name,
packageVersion.version
);
const file = await this.packageVersionFileService.showPackageVersionFile(
packageVersion,
path
);
const hasMeta = typeof meta === 'string';
if (!file) {
const possibleFile = await this.#searchPossibleEntries(packageVersion, path);
const possibleFile = await this.#searchPossibleEntries(
packageVersion,
path
);
if (possibleFile) {
const route = `/${fullname}/${versionSpec}/files${possibleFile.path}${hasMeta ? '?meta' : ''}`;
ctx.redirect(route);
return;
}
throw new NotFoundError(`File ${fullname}@${versionSpec}${path} not found`);
throw new NotFoundError(
`File ${fullname}@${versionSpec}${path} not found`
);
}
if (hasMeta) {
@@ -178,13 +229,21 @@ export class PackageVersionFileController extends AbstractController {
* 2. if given path is directory and has `index.js` file, redirect to it. e.g. using `lib` alias to access `lib/index.js` or `lib/index.json`
* @param {PackageVersion} packageVersion packageVersion
* @param {String} path filepath
* @return {Promise<PackageVersionFile | undefined>} return packageVersionFile or null
* @returns {Promise<PackageVersionFile | undefined>} return packageVersionFile or null
*/
async #searchPossibleEntries(packageVersion: PackageVersion, path: string) {
const possiblePath = [ `${path}.js`, `${path}.json`, `${path}/index.js`, `${path}/index.json` ];
const possiblePath = [
`${path}.js`,
`${path}.json`,
`${path}/index.js`,
`${path}/index.json`,
];
for (const pathItem of possiblePath) {
const file = await this.packageVersionFileService.showPackageVersionFile(packageVersion, pathItem);
const file = await this.packageVersionFileService.showPackageVersionFile(
packageVersion,
pathItem
);
if (file) {
return file;
@@ -192,9 +251,19 @@ export class PackageVersionFileController extends AbstractController {
}
}
async #getPackageVersion(ctx: EggContext, fullname: string, scope: string, name: string, versionSpec: string) {
const { blockReason, packageVersion } = await this.packageManagerService.showPackageVersionByVersionOrTag(
scope, name, versionSpec);
async #getPackageVersion(
ctx: EggContext,
fullname: string,
scope: string,
name: string,
versionSpec: string
) {
const { blockReason, packageVersion } =
await this.packageManagerService.showPackageVersionByVersionOrTag(
scope,
name,
versionSpec
);
if (blockReason) {
this.setCDNHeaders(ctx);
throw this.createPackageBlockError(blockReason, fullname, versionSpec);
@@ -204,15 +273,28 @@ export class PackageVersionFileController extends AbstractController {
}
if (packageVersion.version !== versionSpec) {
ctx.set('cache-control', META_CACHE_CONTROL);
let location = ctx.url.replace(`/${fullname}/${versionSpec}/files`, `/${fullname}/${packageVersion.version}/files`);
location = location.replace(`/${fullname}/${encodeURIComponent(versionSpec)}/files`, `/${fullname}/${packageVersion.version}/files`);
let location = ctx.url.replace(
`/${fullname}/${versionSpec}/files`,
`/${fullname}/${packageVersion.version}/files`
);
location = location.replace(
`/${fullname}/${encodeURIComponent(versionSpec)}/files`,
`/${fullname}/${packageVersion.version}/files`
);
throw this.createControllerRedirectError(location);
}
return packageVersion;
}
async #listFilesByDirectory(packageVersion: PackageVersion, directory: string) {
const { files, directories } = await this.packageVersionFileService.listPackageVersionFiles(packageVersion, directory);
async #listFilesByDirectory(
packageVersion: PackageVersion,
directory: string
) {
const { files, directories } =
await this.packageVersionFileService.listPackageVersionFiles(
packageVersion,
directory
);
if (files.length === 0 && directories.length === 0) return null;
const info: DirectoryItem = {

Some files were not shown because too many files have changed in this diff Show More