Compare commits
15 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c1eb0978ba | ||
|
|
c6b8aecfd0 | ||
|
|
32e842e882 | ||
|
|
5738d569ea | ||
|
|
9dd2d4bbe4 | ||
|
|
0b35ead2a0 | ||
|
|
a64ebd80f3 | ||
|
|
be8387dfa4 | ||
|
|
d6c4cf5029 | ||
|
|
0ada89b2fc | ||
|
|
7eb209de13 | ||
|
|
5965dbddbc | ||
|
|
e40c5021bb | ||
|
|
65a3df891d | ||
|
|
43d77ee91e |
45
History.md
45
History.md
@@ -1,4 +1,49 @@
|
||||
|
||||
2.7.1 / 2022-11-25
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`c6b8aec`](http://github.com/cnpm/cnpmcore/commit/c6b8aecfd0c2b0d454389e931747c431dac5742b)] - fix: request binary error (#360) (Ke Wu <<gemwuu@163.com>>)
|
||||
|
||||
2.7.0 / 2022-11-25
|
||||
==================
|
||||
|
||||
**others**
|
||||
* [[`5738d56`](http://github.com/cnpm/cnpmcore/commit/5738d569ea691c05c3f3b0b74a454a33fefb8fc7)] - refactor: binary sync task use binaryName by default (#358) (Ke Wu <<gemwuu@163.com>>)
|
||||
|
||||
2.6.1 / 2022-11-23
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`0b35ead`](http://github.com/cnpm/cnpmcore/commit/0b35ead2a0cd73b89d2d961bafec13d7250fe805)] - 🐛 FIX: typo for canvas (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
2.6.0 / 2022-11-23
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`be8387d`](http://github.com/cnpm/cnpmcore/commit/be8387dfa48b9487156542000a93081fa823694a)] - feat: Support canvas sync from different binary (#357) (Ke Wu <<gemwuu@163.com>>)
|
||||
|
||||
**fixes**
|
||||
* [[`d6c4cf5`](http://github.com/cnpm/cnpmcore/commit/d6c4cf5029ca6450064fc05696a8624b6c36f0b2)] - fix: duplicate binary task (#354) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
2.5.2 / 2022-11-11
|
||||
==================
|
||||
|
||||
**fixes**
|
||||
* [[`7eb209d`](http://github.com/cnpm/cnpmcore/commit/7eb209de1332417db2070846891d78f5afa0cd10)] - fix: create task when waiting (#352) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
2.5.1 / 2022-11-07
|
||||
==================
|
||||
|
||||
**others**
|
||||
* [[`e40c502`](http://github.com/cnpm/cnpmcore/commit/e40c5021bb2ba78f8879d19bc477883168560b85)] - 🐛 FIX: Mirror cypress arm64 binary (#351) (fengmk2 <<fengmk2@gmail.com>>)
|
||||
|
||||
2.5.0 / 2022-11-04
|
||||
==================
|
||||
|
||||
**features**
|
||||
* [[`43d77ee`](http://github.com/cnpm/cnpmcore/commit/43d77ee91e52bd74594d9d569b839c1a4b7fbac6)] - feat: long description (#349) (elrrrrrrr <<elrrrrrrr@gmail.com>>)
|
||||
|
||||
2.4.1 / 2022-10-28
|
||||
==================
|
||||
|
||||
|
||||
@@ -19,11 +19,13 @@ export abstract class AbstractBinary {
|
||||
protected httpclient: EggContextHttpClient;
|
||||
protected logger: EggLogger;
|
||||
protected binaryConfig: BinaryTaskConfig;
|
||||
protected binaryName: string;
|
||||
|
||||
constructor(httpclient: EggContextHttpClient, logger: EggLogger, binaryConfig: BinaryTaskConfig) {
|
||||
constructor(httpclient: EggContextHttpClient, logger: EggLogger, binaryConfig: BinaryTaskConfig, binaryName: string) {
|
||||
this.httpclient = httpclient;
|
||||
this.logger = logger;
|
||||
this.binaryConfig = binaryConfig;
|
||||
this.binaryName = binaryName;
|
||||
}
|
||||
|
||||
abstract fetch(dir: string, params?: any): Promise<FetchResult | undefined>;
|
||||
|
||||
@@ -4,13 +4,13 @@ import { BinaryTaskConfig } from '../../../../config/binaries';
|
||||
|
||||
export class ApiBinary extends AbstractBinary {
|
||||
private apiUrl: string;
|
||||
constructor(httpclient: EggContextHttpClient, logger: EggLogger, binaryConfig: BinaryTaskConfig, apiUrl: string) {
|
||||
super(httpclient, logger, binaryConfig);
|
||||
constructor(httpclient: EggContextHttpClient, logger: EggLogger, binaryConfig: BinaryTaskConfig, apiUrl: string, binaryName: string) {
|
||||
super(httpclient, logger, binaryConfig, binaryName);
|
||||
this.apiUrl = apiUrl;
|
||||
}
|
||||
|
||||
async fetch(dir: string): Promise<FetchResult | undefined> {
|
||||
const url = `${this.apiUrl}/${this.binaryConfig.category}${dir}`;
|
||||
const url = `${this.apiUrl}/${this.binaryName}${dir}`;
|
||||
const data = await this.requestJSON(url);
|
||||
if (!Array.isArray(data)) {
|
||||
this.logger.warn('[ApiBinary.fetch:response-data-not-array] data: %j', data);
|
||||
|
||||
@@ -31,10 +31,11 @@ export class CypressBinary extends AbstractBinary {
|
||||
// "https://cdn.cypress.io/desktop/4.0.0/darwin-x64/cypress.zip"
|
||||
// "https://cdn.cypress.io/desktop/4.0.0/linux-x64/cypress.zip"
|
||||
// "https://cdn.cypress.io/desktop/4.0.0/win32-x64/cypress.zip"
|
||||
// "https://cdn.cypress.io/desktop/9.2.0/darwin-arm64/cypress.zip"
|
||||
// "https://cdn.cypress.io/desktop/9.2.0/darwin-x64/cypress.zip"
|
||||
// "https://cdn.cypress.io/desktop/9.2.0/linux-x64/cypress.zip"
|
||||
// "https://cdn.cypress.io/desktop/9.2.0/win32-x64/cypress.zip"
|
||||
const platforms = [ 'darwin-x64', 'linux-x64', 'win32-x64' ];
|
||||
const platforms = [ 'darwin-x64', 'darwin-arm64', 'linux-x64', 'win32-x64' ];
|
||||
for (const platform of platforms) {
|
||||
this.dirItems[subDir].push({
|
||||
name: `${platform}/`,
|
||||
|
||||
@@ -8,7 +8,7 @@ export class ImageminBinary extends AbstractBinary {
|
||||
async fetch(dir: string): Promise<FetchResult | undefined> {
|
||||
if (!this.dirItems) {
|
||||
this.dirItems = {};
|
||||
const npmPackageName = this.binaryConfig.options?.npmPackageName ?? this.binaryConfig.category;
|
||||
const npmPackageName = this.binaryConfig.options?.npmPackageName ?? this.binaryName;
|
||||
const pkgUrl = `https://registry.npmjs.com/${npmPackageName}`;
|
||||
const data = await this.requestJSON(pkgUrl);
|
||||
this.dirItems = {};
|
||||
|
||||
@@ -10,7 +10,7 @@ export class NodePreGypBinary extends AbstractBinary {
|
||||
async fetch(dir: string): Promise<FetchResult | undefined> {
|
||||
if (!this.dirItems) {
|
||||
this.dirItems = {};
|
||||
const pkgUrl = `https://registry.npmjs.com/${this.binaryConfig.category}`;
|
||||
const pkgUrl = `https://registry.npmjs.com/${this.binaryName}`;
|
||||
const data = await this.requestJSON(pkgUrl);
|
||||
this.dirItems = {};
|
||||
this.dirItems['/'] = [];
|
||||
@@ -77,7 +77,7 @@ export class NodePreGypBinary extends AbstractBinary {
|
||||
date,
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `${this.binaryConfig.distUrl}/${this.binaryConfig.category}${versionPrefix}/${name}`,
|
||||
url: `${this.binaryConfig.distUrl}/${this.binaryName}${versionPrefix}/${name}`,
|
||||
ignoreDownloadStatuses: [ 404 ],
|
||||
});
|
||||
}
|
||||
@@ -99,7 +99,7 @@ export class NodePreGypBinary extends AbstractBinary {
|
||||
date,
|
||||
size: '-',
|
||||
isDir: false,
|
||||
url: `${this.binaryConfig.distUrl}/${this.binaryConfig.category}${versionPrefix}/${name}`,
|
||||
url: `${this.binaryConfig.distUrl}/${this.binaryName}${versionPrefix}/${name}`,
|
||||
ignoreDownloadStatuses: [ 404 ],
|
||||
});
|
||||
}
|
||||
@@ -163,7 +163,7 @@ export class NodePreGypBinary extends AbstractBinary {
|
||||
const binaryFileName = binaryFile.replace('{platform}', platform)
|
||||
.replace('{arch}', arch);
|
||||
remotePath = remotePath.replace('{module_name}', moduleName)
|
||||
.replace('{name}', this.binaryConfig.category)
|
||||
.replace('{name}', this.binaryName)
|
||||
.replace('{version}', version)
|
||||
.replace('{configuration}', 'Release');
|
||||
const binaryFilePath = join('/', remotePath, binaryFileName);
|
||||
|
||||
@@ -216,6 +216,7 @@ export class Task<T extends TaskBaseData = TaskBaseData> extends Entity {
|
||||
targetName,
|
||||
authorId: `pid_${PID}`,
|
||||
authorIp: HOST_NAME,
|
||||
bizId: `SyncBinary:${targetName}`,
|
||||
data: {
|
||||
// task execute worker
|
||||
taskWorker: '',
|
||||
|
||||
@@ -30,6 +30,7 @@ import { ElectronBinary } from '../../common/adapter/binary/ElectronBinary';
|
||||
import { NodePreGypBinary } from '../../common/adapter/binary/NodePreGypBinary';
|
||||
import { ImageminBinary } from '../../common/adapter/binary/ImageminBinary';
|
||||
import { PlaywrightBinary } from '../../common/adapter/binary/PlaywrightBinary';
|
||||
import { TaskRepository } from 'app/repository/TaskRepository';
|
||||
|
||||
const BinaryClasses = {
|
||||
[SyncerClass.NodeBinary]: NodeBinary,
|
||||
@@ -58,6 +59,8 @@ export class BinarySyncerService extends AbstractService {
|
||||
@Inject()
|
||||
private readonly taskService: TaskService;
|
||||
@Inject()
|
||||
private readonly taskRepository: TaskRepository;
|
||||
@Inject()
|
||||
private readonly httpclient: EggContextHttpClient;
|
||||
@Inject()
|
||||
private readonly nfsAdapter: NFSAdapter;
|
||||
@@ -71,15 +74,51 @@ export class BinarySyncerService extends AbstractService {
|
||||
}
|
||||
|
||||
public async listRootBinaries(binaryName: string) {
|
||||
return await this.binaryRepository.listBinaries(binaryName, '/');
|
||||
// 通常 binaryName 和 category 是一样的,但是有些特殊的 binaryName 会有多个 category,比如 canvas
|
||||
// 所以查询 canvas 的时候,需要将 binaryName 和 category 的数据都查出来
|
||||
const {
|
||||
category,
|
||||
} = binaries[binaryName];
|
||||
const reqs = [
|
||||
this.binaryRepository.listBinaries(binaryName, '/'),
|
||||
];
|
||||
if (category && category !== binaryName) {
|
||||
reqs.push(this.binaryRepository.listBinaries(category, '/'));
|
||||
}
|
||||
|
||||
const [
|
||||
rootBinary,
|
||||
categoryBinary,
|
||||
] = await Promise.all(reqs);
|
||||
|
||||
const versions = rootBinary.map(b => b.name);
|
||||
categoryBinary?.forEach(b => {
|
||||
const version = b.name;
|
||||
// 只将没有的版本添加进去
|
||||
if (!versions.includes(version)) {
|
||||
rootBinary.push(b);
|
||||
}
|
||||
});
|
||||
|
||||
return rootBinary;
|
||||
}
|
||||
|
||||
public async downloadBinary(binary: Binary) {
|
||||
return await this.nfsAdapter.getDownloadUrlOrStream(binary.storePath);
|
||||
}
|
||||
|
||||
// SyncBinary 由定时任务每台单机定时触发,手动去重
|
||||
// 添加 bizId 在 db 防止重复,记录 id 错误
|
||||
public async createTask(binaryName: string, lastData?: any) {
|
||||
return await this.taskService.createTask(Task.createSyncBinary(binaryName, lastData), false);
|
||||
const existsTask = await this.taskRepository.findTaskByTargetName(binaryName, TaskType.SyncBinary);
|
||||
if (existsTask) {
|
||||
return existsTask;
|
||||
}
|
||||
try {
|
||||
return await this.taskService.createTask(Task.createSyncBinary(binaryName, lastData), false);
|
||||
} catch (e) {
|
||||
this.logger.error('[BinarySyncerService.createTask] binaryName: %s, error: %s', binaryName, e);
|
||||
}
|
||||
}
|
||||
|
||||
public async findTask(taskId: string) {
|
||||
@@ -250,15 +289,13 @@ export class BinarySyncerService extends AbstractService {
|
||||
|
||||
private createBinaryInstance(binaryName: string): AbstractBinary | undefined {
|
||||
const config = this.config.cnpmcore;
|
||||
const binaryConfig = binaries[binaryName];
|
||||
|
||||
if (config.sourceRegistryIsCNpm) {
|
||||
const binaryConfig = binaries[binaryName];
|
||||
const syncBinaryFromAPISource = config.syncBinaryFromAPISource || `${config.sourceRegistry}/-/binary`;
|
||||
return new ApiBinary(this.httpclient, this.logger, binaryConfig, syncBinaryFromAPISource);
|
||||
}
|
||||
for (const binaryConfig of Object.values(binaries)) {
|
||||
if (binaryConfig.category === binaryName) {
|
||||
return new BinaryClasses[binaryConfig.syncer](this.httpclient, this.logger, binaryConfig);
|
||||
}
|
||||
return new ApiBinary(this.httpclient, this.logger, binaryConfig, syncBinaryFromAPISource, binaryName);
|
||||
}
|
||||
|
||||
return new BinaryClasses[binaryConfig.syncer](this.httpclient, this.logger, binaryConfig, binaryName);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -65,6 +65,7 @@ export interface PublishPackageCmd {
|
||||
|
||||
const TOTAL = '@@TOTAL@@';
|
||||
const SCOPE_TOTAL_PREFIX = '@@SCOPE@@:';
|
||||
const DESCRIPTION_LIMIT = 1024 * 10;
|
||||
|
||||
@ContextProto({
|
||||
accessLevel: AccessLevel.PUBLIC,
|
||||
@@ -109,6 +110,11 @@ export class PackageManagerService extends AbstractService {
|
||||
pkg.registryId = cmd.registryId;
|
||||
}
|
||||
}
|
||||
|
||||
// 防止 description 长度超过 db 限制
|
||||
if (pkg.description?.length > DESCRIPTION_LIMIT) {
|
||||
pkg.description = pkg.description.substring(0, DESCRIPTION_LIMIT);
|
||||
}
|
||||
await this.packageRepository.savePackage(pkg);
|
||||
// create maintainer
|
||||
await this.packageRepository.savePackageMaintainer(pkg.packageId, publisher.userId);
|
||||
|
||||
@@ -28,16 +28,21 @@ export class TaskService extends AbstractService {
|
||||
public async createTask(task: Task, addTaskQueueOnExists: boolean) {
|
||||
const existsTask = await this.taskRepository.findTaskByTargetName(task.targetName, task.type);
|
||||
if (existsTask) {
|
||||
if (addTaskQueueOnExists && existsTask.state === TaskState.Waiting) {
|
||||
const queueLength = await this.getTaskQueueLength(task.type);
|
||||
if (queueLength < this.config.cnpmcore.taskQueueHighWaterSize) {
|
||||
// make sure waiting task in queue
|
||||
await this.queueAdapter.push<string>(task.type, existsTask.taskId);
|
||||
this.logger.info('[TaskService.createTask:exists-to-queue] taskType: %s, targetName: %s, taskId: %s, queue size: %s',
|
||||
task.type, task.targetName, task.taskId, queueLength);
|
||||
// 如果任务还未被触发,就不继续重复创建
|
||||
// 如果任务正在执行,可能任务状态已更新,这种情况需要继续创建
|
||||
if (existsTask.state === TaskState.Waiting) {
|
||||
// 提高任务的优先级
|
||||
if (addTaskQueueOnExists) {
|
||||
const queueLength = await this.getTaskQueueLength(task.type);
|
||||
if (queueLength < this.config.cnpmcore.taskQueueHighWaterSize) {
|
||||
// make sure waiting task in queue
|
||||
await this.queueAdapter.push<string>(task.type, existsTask.taskId);
|
||||
this.logger.info('[TaskService.createTask:exists-to-queue] taskType: %s, targetName: %s, taskId: %s, queue size: %s',
|
||||
task.type, task.targetName, task.taskId, queueLength);
|
||||
}
|
||||
}
|
||||
return existsTask;
|
||||
}
|
||||
return existsTask;
|
||||
}
|
||||
await this.taskRepository.saveTask(task);
|
||||
await this.queueAdapter.push<string>(task.type, task.taskId);
|
||||
|
||||
@@ -33,9 +33,10 @@ export class BinarySyncController extends AbstractController {
|
||||
method: HTTPMethodEnum.GET,
|
||||
})
|
||||
async listBinaries() {
|
||||
return Object.values(binaries).map(binaryConfig => {
|
||||
return Object.entries(binaries).map(([ binaryName, binaryConfig ]) => {
|
||||
return {
|
||||
name: `${binaryConfig.category}/`,
|
||||
name: `${binaryName}/`,
|
||||
category: `${binaryConfig.category}/`,
|
||||
description: binaryConfig.description,
|
||||
distUrl: binaryConfig.distUrl,
|
||||
repoUrl: /^https?:\/\//.test(binaryConfig.repo) ? binaryConfig.repo : `https://github.com/${binaryConfig.repo}`,
|
||||
@@ -59,7 +60,18 @@ export class BinarySyncController extends AbstractController {
|
||||
const parsed = path.parse(subpath);
|
||||
const parent = parsed.dir === '/' ? '/' : `${parsed.dir}/`;
|
||||
const name = subpath.endsWith('/') ? `${parsed.base}/` : parsed.base;
|
||||
const binary = await this.binarySyncerService.findBinary(binaryName, parent, name);
|
||||
// 首先查询 binary === category 的情况
|
||||
let binary = await this.binarySyncerService.findBinary(binaryName, parent, name);
|
||||
if (!binary) {
|
||||
// 查询不到再去查询 mergeCategory 的情况
|
||||
const category = binaries?.[binaryName]?.category;
|
||||
if (category) {
|
||||
// canvas/v2.6.1/canvas-v2.6.1-node-v57-linux-glibc-x64.tar.gz
|
||||
// -> node-canvas-prebuilt/v2.6.1/node-canvas-prebuilt-v2.6.1-node-v57-linux-glibc-x64.tar.gz
|
||||
binary = await this.binarySyncerService.findBinary(category, parent, name.replace(new RegExp(`^${binaryName}-`), `${category}-`));
|
||||
}
|
||||
}
|
||||
|
||||
if (!binary) {
|
||||
throw new NotFoundError(`Binary "${binaryName}${subpath}" not found`);
|
||||
}
|
||||
|
||||
@@ -21,10 +21,14 @@ export class CreateSyncBinaryTask {
|
||||
async subscribe() {
|
||||
if (!this.config.cnpmcore.enableSyncBinary) return;
|
||||
|
||||
for (const binary of Object.values(binaries)) {
|
||||
if (this.config.env === 'unittest' && binary.category !== 'node') continue;
|
||||
for (const [ binaryName, binary ] of Object.entries(binaries)) {
|
||||
if (this.config.env === 'unittest' && binaryName !== 'node') continue;
|
||||
if (binary.disable) continue;
|
||||
await this.binarySyncerService.createTask(binary.category);
|
||||
|
||||
// 默认只同步 binaryName 的二进制,即使有不一致的 category,会在同名的 binaryName 任务中同步
|
||||
// 例如 canvas 只同步 binaryName 为 canvas 的二进制,不同步 category 为 node-canvas-prebuilt 的二进制
|
||||
// node-canvas-prebuilt 的二进制会在 node-canvas-prebuilt 的任务中同步
|
||||
await this.binarySyncerService.createTask(binaryName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ export enum SyncerClass {
|
||||
}
|
||||
|
||||
export type BinaryTaskConfig = {
|
||||
category: string;
|
||||
category: string; // 默认 category 为 binaryName,但是有些 binary 会有不同的 category,比如 canvas,包含 canvas 和 node-canvas-prebuilt 两个
|
||||
description: string;
|
||||
syncer: SyncerClass;
|
||||
repo: string;
|
||||
@@ -637,13 +637,6 @@ const binaries: {
|
||||
repo: 'eugeneware/ffmpeg-static',
|
||||
distUrl: 'https://github.com/eugeneware/ffmpeg-static/releases',
|
||||
},
|
||||
canvas: {
|
||||
category: 'canvas',
|
||||
description: 'Node canvas is a Cairo backed Canvas implementation for NodeJS.',
|
||||
syncer: SyncerClass.GithubBinary,
|
||||
repo: 'Automattic/node-canvas',
|
||||
distUrl: 'https://github.com/Automattic/node-canvas/releases',
|
||||
},
|
||||
nodejieba: {
|
||||
category: 'nodejieba',
|
||||
description: '"结巴"中文分词的Node.js版本',
|
||||
@@ -862,6 +855,42 @@ const binaries: {
|
||||
repo: 'dragonflyoss/image-service',
|
||||
distUrl: 'https://github.com/dragonflyoss/image-service/releases',
|
||||
},
|
||||
canvas: {
|
||||
// canvas@<=2.6.1 二进制需要从 node-canvas-prebuilt 下载
|
||||
category: 'node-canvas-prebuilt',
|
||||
description: 'Node canvas is a Cairo backed Canvas implementation for NodeJS.',
|
||||
syncer: SyncerClass.GithubBinary,
|
||||
repo: 'Automattic/node-canvas',
|
||||
distUrl: 'https://github.com/Automattic/node-canvas/releases',
|
||||
},
|
||||
'canvas-prebuilt': {
|
||||
category: 'canvas-prebuilt',
|
||||
distUrl: 'https://github.com/node-gfx/node-canvas-prebuilt/releases',
|
||||
repo: 'chearon/node-canvas-prebuilt',
|
||||
description: 'Prebuilt versions of node-canvas as a drop-in replacement',
|
||||
syncer: SyncerClass.GithubBinary,
|
||||
options: {
|
||||
nodeArchs: {
|
||||
linux: [ 'x64' ],
|
||||
darwin: [ 'x64' ],
|
||||
win32: [ 'x64' ],
|
||||
},
|
||||
},
|
||||
},
|
||||
'node-canvas-prebuilt': {
|
||||
category: 'node-canvas-prebuilt',
|
||||
distUrl: 'https://github.com/node-gfx/node-canvas-prebuilt/releases',
|
||||
repo: 'node-gfx/node-canvas-prebuilt',
|
||||
description: 'Repo used to build binaries for node-canvas on CI',
|
||||
syncer: SyncerClass.GithubBinary,
|
||||
options: {
|
||||
nodeArchs: {
|
||||
linux: [ 'x64' ],
|
||||
darwin: [ 'x64' ],
|
||||
win32: [ 'x64' ],
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export default binaries;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "cnpmcore",
|
||||
"version": "2.4.1",
|
||||
"version": "2.7.1",
|
||||
"description": "npm core",
|
||||
"files": [
|
||||
"dist/**/*"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
-- fix https://github.com/cnpm/cnpmcore/issues/343
|
||||
UPDATE
|
||||
`cnpmcore`.`registries`
|
||||
`registries`
|
||||
SET
|
||||
`host` = 'https://registry.npmjs.org'
|
||||
WHERE
|
||||
|
||||
@@ -323,4 +323,17 @@ export class TestUtil {
|
||||
}
|
||||
return Buffer.concat(chunks).toString();
|
||||
}
|
||||
|
||||
static pickKeys(obj, keys) {
|
||||
const d: Record<string, any> = [];
|
||||
obj.forEach(item => {
|
||||
const newItem = {};
|
||||
for (const key of keys) {
|
||||
newItem[key] = item[key];
|
||||
}
|
||||
|
||||
d.push(newItem);
|
||||
});
|
||||
return d;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,7 +22,7 @@ describe('test/common/adapter/binary/ApiBinary.test.ts', () => {
|
||||
data: await TestUtil.readFixturesFile('cnpmjs.org/mirrors/apis/node.json'),
|
||||
persist: false,
|
||||
});
|
||||
const binary = new ApiBinary(ctx.httpclient, ctx.logger, binaries.node, 'https://cnpmjs.org/mirrors/apis');
|
||||
const binary = new ApiBinary(ctx.httpclient, ctx.logger, binaries.node, 'https://cnpmjs.org/mirrors/apis', 'node');
|
||||
const result = await binary.fetch('/');
|
||||
assert(result);
|
||||
assert(result.items.length > 0);
|
||||
@@ -56,7 +56,7 @@ describe('test/common/adapter/binary/ApiBinary.test.ts', () => {
|
||||
data: await TestUtil.readFixturesFile('r.cnpmjs.org/-/binary/node/v16.13.1.json'),
|
||||
persist: false,
|
||||
});
|
||||
const binary = new ApiBinary(ctx.httpclient, ctx.logger, binaries.node, 'https://r.cnpmjs.org/-/binary');
|
||||
const binary = new ApiBinary(ctx.httpclient, ctx.logger, binaries.node, 'https://r.cnpmjs.org/-/binary', 'node');
|
||||
const result = await binary.fetch('/v16.13.1/');
|
||||
assert(result);
|
||||
assert(result.items.length > 0);
|
||||
|
||||
@@ -22,7 +22,7 @@ describe('test/common/adapter/binary/BucketBinary.test.ts', () => {
|
||||
data: await TestUtil.readFixturesFile('chromedriver.storage.googleapis.com/index.xml'),
|
||||
persist: false,
|
||||
});
|
||||
const binary = new BucketBinary(ctx.httpclient, ctx.logger, binaries.chromedriver);
|
||||
const binary = new BucketBinary(ctx.httpclient, ctx.logger, binaries.chromedriver, 'chromedriver');
|
||||
const result = await binary.fetch('/');
|
||||
assert(result);
|
||||
assert(result.items.length > 0);
|
||||
@@ -51,7 +51,7 @@ describe('test/common/adapter/binary/BucketBinary.test.ts', () => {
|
||||
data: await TestUtil.readFixturesFile('chromedriver.storage.googleapis.com/97.0.4692.71.xml'),
|
||||
persist: false,
|
||||
});
|
||||
const binary = new BucketBinary(ctx.httpclient, ctx.logger, binaries.chromedriver);
|
||||
const binary = new BucketBinary(ctx.httpclient, ctx.logger, binaries.chromedriver, 'chromedriver');
|
||||
const result = await binary.fetch('/97.0.4692.71/');
|
||||
assert(result);
|
||||
assert(result.items.length > 0);
|
||||
@@ -77,7 +77,7 @@ describe('test/common/adapter/binary/BucketBinary.test.ts', () => {
|
||||
persist: false,
|
||||
});
|
||||
// https://selenium-release.storage.googleapis.com/?delimiter=/&prefix=2.43/
|
||||
const binary = new BucketBinary(ctx.httpclient, ctx.logger, binaries.selenium);
|
||||
const binary = new BucketBinary(ctx.httpclient, ctx.logger, binaries.selenium, 'selenium');
|
||||
const result = await binary.fetch('/2.43/');
|
||||
assert(result);
|
||||
assert(result.items.length > 0);
|
||||
@@ -91,7 +91,7 @@ describe('test/common/adapter/binary/BucketBinary.test.ts', () => {
|
||||
data: await TestUtil.readFixturesFile('node-inspector.s3.amazonaws.com/index.xml'),
|
||||
persist: false,
|
||||
});
|
||||
const binary = new BucketBinary(ctx.httpclient, ctx.logger, binaries['node-inspector']);
|
||||
const binary = new BucketBinary(ctx.httpclient, ctx.logger, binaries['node-inspector'], 'node-inspector');
|
||||
const result = await binary.fetch('/');
|
||||
assert(result);
|
||||
assert(result.items.length > 0);
|
||||
@@ -105,7 +105,7 @@ describe('test/common/adapter/binary/BucketBinary.test.ts', () => {
|
||||
data: await TestUtil.readFixturesFile('prisma-builds.s3-eu-west-1.amazonaws.com/index.xml'),
|
||||
persist: false,
|
||||
});
|
||||
const binary = new BucketBinary(ctx.httpclient, ctx.logger, binaries.prisma);
|
||||
const binary = new BucketBinary(ctx.httpclient, ctx.logger, binaries.prisma, 'prisma');
|
||||
const result = await binary.fetch('/');
|
||||
assert(result);
|
||||
assert(result.items.length > 0);
|
||||
|
||||
@@ -22,7 +22,7 @@ describe('test/common/adapter/binary/CypressBinary.test.ts', () => {
|
||||
data: await TestUtil.readFixturesFile('registry.npmjs.com/cypress.json'),
|
||||
persist: false,
|
||||
});
|
||||
const binary = new CypressBinary(ctx.httpclient, ctx.logger, binaries.cypress);
|
||||
const binary = new CypressBinary(ctx.httpclient, ctx.logger, binaries.cypress, 'cypress');
|
||||
const result = await binary.fetch('/');
|
||||
assert(result);
|
||||
assert(result.items.length > 0);
|
||||
@@ -51,13 +51,14 @@ describe('test/common/adapter/binary/CypressBinary.test.ts', () => {
|
||||
data: await TestUtil.readFixturesFile('registry.npmjs.com/cypress.json'),
|
||||
persist: false,
|
||||
});
|
||||
const binary = new CypressBinary(ctx.httpclient, ctx.logger, binaries.cypress);
|
||||
const binary = new CypressBinary(ctx.httpclient, ctx.logger, binaries.cypress, 'cypress');
|
||||
let result = await binary.fetch('/4.0.0/');
|
||||
assert(result);
|
||||
assert(result.items.length === 3);
|
||||
assert(result.items.length === 4);
|
||||
assert(result.items[0].name === 'darwin-x64/');
|
||||
assert(result.items[1].name === 'linux-x64/');
|
||||
assert(result.items[2].name === 'win32-x64/');
|
||||
assert(result.items[1].name === 'darwin-arm64/');
|
||||
assert(result.items[2].name === 'linux-x64/');
|
||||
assert(result.items[3].name === 'win32-x64/');
|
||||
assert(result.items[0].isDir);
|
||||
|
||||
result = await binary.fetch('/4.0.0/darwin-x64/');
|
||||
@@ -67,6 +68,13 @@ describe('test/common/adapter/binary/CypressBinary.test.ts', () => {
|
||||
assert(result.items[0].url === 'https://cdn.cypress.io/desktop/4.0.0/darwin-x64/cypress.zip');
|
||||
assert(!result.items[0].isDir);
|
||||
|
||||
result = await binary.fetch('/4.0.0/darwin-arm64/');
|
||||
assert(result);
|
||||
assert(result.items.length === 1);
|
||||
assert(result.items[0].name === 'cypress.zip');
|
||||
assert(result.items[0].url === 'https://cdn.cypress.io/desktop/4.0.0/darwin-arm64/cypress.zip');
|
||||
assert(!result.items[0].isDir);
|
||||
|
||||
result = await binary.fetch('/4.0.0/linux-x64/');
|
||||
assert(result);
|
||||
assert(result.items.length === 1);
|
||||
|
||||
@@ -23,7 +23,7 @@ describe('test/common/adapter/binary/ElectronBinary.test.ts', () => {
|
||||
data: response,
|
||||
status: 200,
|
||||
});
|
||||
const binary = new ElectronBinary(ctx.httpclient, ctx.logger, binaries.electron);
|
||||
const binary = new ElectronBinary(ctx.httpclient, ctx.logger, binaries.electron, 'electron');
|
||||
let result = await binary.fetch('/');
|
||||
assert(result);
|
||||
assert(result.items.length > 0);
|
||||
|
||||
@@ -23,7 +23,7 @@ describe('test/common/adapter/binary/GithubBinary.test.ts', () => {
|
||||
data: response,
|
||||
status: 200,
|
||||
});
|
||||
const binary = new GithubBinary(ctx.httpclient, ctx.logger, binaries.electron);
|
||||
const binary = new GithubBinary(ctx.httpclient, ctx.logger, binaries.electron, 'electron');
|
||||
let result = await binary.fetch('/');
|
||||
assert(result);
|
||||
assert(result.items.length > 0);
|
||||
|
||||
@@ -20,7 +20,7 @@ describe('test/common/adapter/binary/ImageminBinary.test.ts', () => {
|
||||
app.mockHttpclient('https://registry.npmjs.com/jpegtran-bin', 'GET', {
|
||||
data: await TestUtil.readFixturesFile('registry.npmjs.com/jpegtran-bin.json'),
|
||||
});
|
||||
const binary = new ImageminBinary(ctx.httpclient, ctx.logger, binaries['jpegtran-bin']);
|
||||
const binary = new ImageminBinary(ctx.httpclient, ctx.logger, binaries['jpegtran-bin'], 'jpegtran-bin');
|
||||
let result = await binary.fetch('/');
|
||||
assert(result);
|
||||
assert(result.items.length > 0);
|
||||
@@ -88,7 +88,7 @@ describe('test/common/adapter/binary/ImageminBinary.test.ts', () => {
|
||||
app.mockHttpclient('https://registry.npmjs.com/advpng-bin', 'GET', {
|
||||
data: await TestUtil.readFixturesFile('registry.npmjs.com/advpng-bin.json'),
|
||||
});
|
||||
const binary = new ImageminBinary(ctx.httpclient, ctx.logger, binaries['advpng-bin']);
|
||||
const binary = new ImageminBinary(ctx.httpclient, ctx.logger, binaries['advpng-bin'], 'advpng-bin');
|
||||
let result = await binary.fetch('/');
|
||||
assert(result);
|
||||
assert(result.items.length > 0);
|
||||
@@ -141,7 +141,7 @@ describe('test/common/adapter/binary/ImageminBinary.test.ts', () => {
|
||||
app.mockHttpclient('https://registry.npmjs.com/mozjpeg', 'GET', {
|
||||
data: await TestUtil.readFixturesFile('registry.npmjs.com/mozjpeg.json'),
|
||||
});
|
||||
const binary = new ImageminBinary(ctx.httpclient, ctx.logger, binaries['mozjpeg-bin']);
|
||||
const binary = new ImageminBinary(ctx.httpclient, ctx.logger, binaries['mozjpeg-bin'], 'mozjpeg-bin');
|
||||
let result = await binary.fetch('/');
|
||||
assert(result);
|
||||
// console.log(result.items);
|
||||
@@ -205,7 +205,7 @@ describe('test/common/adapter/binary/ImageminBinary.test.ts', () => {
|
||||
app.mockHttpclient('https://registry.npmjs.com/gifsicle', 'GET', {
|
||||
data: await TestUtil.readFixturesFile('registry.npmjs.com/gifsicle.json'),
|
||||
});
|
||||
const binary = new ImageminBinary(ctx.httpclient, ctx.logger, binaries['gifsicle-bin']);
|
||||
const binary = new ImageminBinary(ctx.httpclient, ctx.logger, binaries['gifsicle-bin'], 'gifsicle-bin');
|
||||
const result = await binary.fetch('/');
|
||||
assert(result);
|
||||
// console.log(result.items);
|
||||
@@ -234,7 +234,7 @@ describe('test/common/adapter/binary/ImageminBinary.test.ts', () => {
|
||||
app.mockHttpclient('https://registry.npmjs.com/optipng-bin', 'GET', {
|
||||
data: await TestUtil.readFixturesFile('registry.npmjs.com/optipng-bin.json'),
|
||||
});
|
||||
const binary = new ImageminBinary(ctx.httpclient, ctx.logger, binaries['optipng-bin']);
|
||||
const binary = new ImageminBinary(ctx.httpclient, ctx.logger, binaries['optipng-bin'], 'optipng-bin');
|
||||
const result = await binary.fetch('/');
|
||||
assert(result);
|
||||
// console.log(result.items);
|
||||
@@ -263,7 +263,7 @@ describe('test/common/adapter/binary/ImageminBinary.test.ts', () => {
|
||||
app.mockHttpclient('https://registry.npmjs.com/zopflipng-bin', 'GET', {
|
||||
data: await TestUtil.readFixturesFile('registry.npmjs.com/zopflipng-bin.json'),
|
||||
});
|
||||
const binary = new ImageminBinary(ctx.httpclient, ctx.logger, binaries['zopflipng-bin']);
|
||||
const binary = new ImageminBinary(ctx.httpclient, ctx.logger, binaries['zopflipng-bin'], 'zopflipng-bin');
|
||||
const result = await binary.fetch('/');
|
||||
assert(result);
|
||||
// console.log(result.items);
|
||||
@@ -292,7 +292,7 @@ describe('test/common/adapter/binary/ImageminBinary.test.ts', () => {
|
||||
app.mockHttpclient('https://registry.npmjs.com/jpegoptim-bin', 'GET', {
|
||||
data: await TestUtil.readFixturesFile('registry.npmjs.com/jpegoptim-bin.json'),
|
||||
});
|
||||
const binary = new ImageminBinary(ctx.httpclient, ctx.logger, binaries['jpegoptim-bin']);
|
||||
const binary = new ImageminBinary(ctx.httpclient, ctx.logger, binaries['jpegoptim-bin'], 'jpegoptim-bin');
|
||||
const result = await binary.fetch('/');
|
||||
assert(result);
|
||||
// console.log(result.items);
|
||||
@@ -321,7 +321,7 @@ describe('test/common/adapter/binary/ImageminBinary.test.ts', () => {
|
||||
app.mockHttpclient('https://registry.npmjs.com/guetzli', 'GET', {
|
||||
data: await TestUtil.readFixturesFile('registry.npmjs.com/guetzli.json'),
|
||||
});
|
||||
const binary = new ImageminBinary(ctx.httpclient, ctx.logger, binaries['guetzli-bin']);
|
||||
const binary = new ImageminBinary(ctx.httpclient, ctx.logger, binaries['guetzli-bin'], 'guetzli-bin');
|
||||
const result = await binary.fetch('/');
|
||||
assert(result);
|
||||
// console.log(result.items);
|
||||
|
||||
@@ -21,7 +21,7 @@ describe('test/common/adapter/binary/NodeBinary.test.ts', () => {
|
||||
app.mockHttpclient('https://nodejs.org/dist/', 'GET', {
|
||||
data: await TestUtil.readFixturesFile('nodejs.org/site/index.html'),
|
||||
});
|
||||
const binary = new NodeBinary(ctx.httpclient, ctx.logger, binaries.node);
|
||||
const binary = new NodeBinary(ctx.httpclient, ctx.logger, binaries.node, 'node');
|
||||
const result = await binary.fetch('/');
|
||||
assert(result);
|
||||
assert(result.items.length > 0);
|
||||
@@ -54,7 +54,7 @@ describe('test/common/adapter/binary/NodeBinary.test.ts', () => {
|
||||
app.mockHttpclient('https://nodejs.org/dist/v16.13.1/', 'GET', {
|
||||
data: await TestUtil.readFixturesFile('nodejs.org/site/v16.13.1/index.html'),
|
||||
});
|
||||
const binary = new NodeBinary(ctx.httpclient, ctx.logger, binaries.node);
|
||||
const binary = new NodeBinary(ctx.httpclient, ctx.logger, binaries.node, 'node');
|
||||
const result = await binary.fetch('/v16.13.1/');
|
||||
assert(result);
|
||||
assert(result.items.length > 0);
|
||||
@@ -87,7 +87,7 @@ describe('test/common/adapter/binary/NodeBinary.test.ts', () => {
|
||||
app.mockHttpclient('https://nodejs.org/download/nightly/v14.0.0-nightly20200119b318926634/', 'GET', {
|
||||
data: await TestUtil.readFixturesFile('nodejs.org/download/nightly/v14.0.0-nightly20200119b318926634/index.html'),
|
||||
});
|
||||
const binary = new NodeBinary(ctx.httpclient, ctx.logger, binaries['node-nightly']);
|
||||
const binary = new NodeBinary(ctx.httpclient, ctx.logger, binaries['node-nightly'], 'node-nightly');
|
||||
const result = await binary.fetch('/v14.0.0-nightly20200119b318926634/');
|
||||
assert(result);
|
||||
assert(result.items.length > 0);
|
||||
@@ -129,7 +129,7 @@ describe('test/common/adapter/binary/NodeBinary.test.ts', () => {
|
||||
app.mockHttpclient('https://nodejs.org/download/nightly/v14.0.0-nightly20200204ee9e689df2/', 'GET', {
|
||||
data: await TestUtil.readFixturesFile('nodejs.org/download/nightly/v14.0.0-nightly20200204ee9e689df2/index.html'),
|
||||
});
|
||||
const binary = new NodeBinary(ctx.httpclient, ctx.logger, binaries['node-nightly']);
|
||||
const binary = new NodeBinary(ctx.httpclient, ctx.logger, binaries['node-nightly'], 'node-nightly');
|
||||
const result = await binary.fetch('/v14.0.0-nightly20200204ee9e689df2/');
|
||||
assert(result);
|
||||
assert(result.items.length > 0);
|
||||
@@ -184,7 +184,7 @@ describe('test/common/adapter/binary/NodeBinary.test.ts', () => {
|
||||
persist: false,
|
||||
});
|
||||
|
||||
const binary = new NodeBinary(ctx.httpclient, ctx.logger, binaries.python);
|
||||
const binary = new NodeBinary(ctx.httpclient, ctx.logger, binaries.python, 'python');
|
||||
let result = await binary.fetch('/');
|
||||
assert(result);
|
||||
assert(result.items.length > 0);
|
||||
|
||||
@@ -24,7 +24,7 @@ describe('test/common/adapter/binary/NodePreGypBinary.test.ts', () => {
|
||||
app.mockHttpclient('https://nodejs.org/dist/index.json', 'GET', {
|
||||
data: await TestUtil.readFixturesFile('nodejs.org/site/index.json'),
|
||||
});
|
||||
const binary = new NodePreGypBinary(ctx.httpclient, ctx.logger, binaries.grpc);
|
||||
const binary = new NodePreGypBinary(ctx.httpclient, ctx.logger, binaries.grpc, 'grpc');
|
||||
let result = await binary.fetch('/');
|
||||
assert(result);
|
||||
assert(result.items.length > 0);
|
||||
@@ -67,7 +67,7 @@ describe('test/common/adapter/binary/NodePreGypBinary.test.ts', () => {
|
||||
app.mockHttpclient('https://nodejs.org/dist/index.json', 'GET', {
|
||||
data: await TestUtil.readFixturesFile('nodejs.org/site/index.json'),
|
||||
});
|
||||
const binary = new NodePreGypBinary(ctx.httpclient, ctx.logger, binaries['grpc-tools']);
|
||||
const binary = new NodePreGypBinary(ctx.httpclient, ctx.logger, binaries['grpc-tools'], 'grpc-tools');
|
||||
let result = await binary.fetch('/');
|
||||
assert(result);
|
||||
assert(result.items.length > 0);
|
||||
@@ -111,7 +111,7 @@ describe('test/common/adapter/binary/NodePreGypBinary.test.ts', () => {
|
||||
app.mockHttpclient('https://nodejs.org/dist/index.json', 'GET', {
|
||||
data: await TestUtil.readFixturesFile('nodejs.org/site/index.json'),
|
||||
});
|
||||
const binary = new NodePreGypBinary(ctx.httpclient, ctx.logger, binaries.nodegit);
|
||||
const binary = new NodePreGypBinary(ctx.httpclient, ctx.logger, binaries.nodegit, 'nodegit');
|
||||
const result = await binary.fetch('/');
|
||||
assert(result);
|
||||
assert(result.items.length > 0);
|
||||
@@ -155,7 +155,7 @@ describe('test/common/adapter/binary/NodePreGypBinary.test.ts', () => {
|
||||
app.mockHttpclient('https://nodejs.org/dist/index.json', 'GET', {
|
||||
data: await TestUtil.readFixturesFile('nodejs.org/site/index.json'),
|
||||
});
|
||||
const binary = new NodePreGypBinary(ctx.httpclient, ctx.logger, binaries['skia-canvas']);
|
||||
const binary = new NodePreGypBinary(ctx.httpclient, ctx.logger, binaries['skia-canvas'], 'skia-canvas');
|
||||
let result = await binary.fetch('/');
|
||||
assert(result);
|
||||
assert(result.items.length > 0);
|
||||
@@ -210,7 +210,7 @@ describe('test/common/adapter/binary/NodePreGypBinary.test.ts', () => {
|
||||
app.mockHttpclient('https://nodejs.org/dist/index.json', 'GET', {
|
||||
data: await TestUtil.readFixturesFile('nodejs.org/site/index.json'),
|
||||
});
|
||||
const binary = new NodePreGypBinary(ctx.httpclient, ctx.logger, binaries.wrtc);
|
||||
const binary = new NodePreGypBinary(ctx.httpclient, ctx.logger, binaries.wrtc, 'wrtc');
|
||||
let result = await binary.fetch('/');
|
||||
assert(result);
|
||||
assert(result.items.length > 0);
|
||||
|
||||
@@ -22,7 +22,7 @@ describe('test/common/adapter/binary/NwjsBinary.test.ts', () => {
|
||||
data: await TestUtil.readFixturesFile('dl.nwjs.io/index.html'),
|
||||
persist: false,
|
||||
});
|
||||
const binary = new NwjsBinary(ctx.httpclient, ctx.logger, binaries.nwjs);
|
||||
const binary = new NwjsBinary(ctx.httpclient, ctx.logger, binaries.nwjs, 'nwjs');
|
||||
const result = await binary.fetch('/');
|
||||
assert(result);
|
||||
assert(result.items.length > 0);
|
||||
@@ -44,7 +44,7 @@ describe('test/common/adapter/binary/NwjsBinary.test.ts', () => {
|
||||
data: await TestUtil.readFixturesFile('nwjs2.s3.amazonaws.com/v0.59.0.xml'),
|
||||
persist: false,
|
||||
});
|
||||
const binary = new NwjsBinary(ctx.httpclient, ctx.logger, binaries.nwjs);
|
||||
const binary = new NwjsBinary(ctx.httpclient, ctx.logger, binaries.nwjs, 'nwjs');
|
||||
let result = await binary.fetch('/v0.59.0/');
|
||||
assert(result);
|
||||
assert(result.items.length > 0);
|
||||
|
||||
@@ -29,7 +29,7 @@ describe('test/common/adapter/binary/PlaywrightBinary.test.ts', () => {
|
||||
})
|
||||
.reply(200, await TestUtil.readFixturesFile('unpkg.com/playwright-core-browsers.json'))
|
||||
.persist();
|
||||
const binary = new PlaywrightBinary(ctx.httpclient, ctx.logger, binaries.playwright);
|
||||
const binary = new PlaywrightBinary(ctx.httpclient, ctx.logger, binaries.playwright, 'playwright');
|
||||
const result = await binary.fetch('/');
|
||||
assert(result);
|
||||
assert(result.items.length > 0);
|
||||
@@ -57,7 +57,7 @@ describe('test/common/adapter/binary/PlaywrightBinary.test.ts', () => {
|
||||
})
|
||||
.reply(200, await TestUtil.readFixturesFile('unpkg.com/playwright-core-browsers.json'))
|
||||
.persist();
|
||||
const binary = new PlaywrightBinary(ctx.httpclient, ctx.logger, binaries.playwright);
|
||||
const binary = new PlaywrightBinary(ctx.httpclient, ctx.logger, binaries.playwright, 'playwright');
|
||||
let result = await binary.fetch('/builds/');
|
||||
assert(result);
|
||||
// console.log(result.items);
|
||||
|
||||
@@ -30,7 +30,7 @@ describe('test/common/adapter/binary/PuppeteerBinary.test.ts', () => {
|
||||
data: '1055816',
|
||||
persist: false,
|
||||
});
|
||||
const binary = new PuppeteerBinary(ctx.httpclient, ctx.logger, binaries['chromium-browser-snapshots']);
|
||||
const binary = new PuppeteerBinary(ctx.httpclient, ctx.logger, binaries['chromium-browser-snapshots'], 'chromium-browser-snapshots');
|
||||
let result = await binary.fetch('/');
|
||||
assert(result);
|
||||
assert(result.items.length === 5);
|
||||
|
||||
@@ -22,7 +22,7 @@ describe('test/common/adapter/binary/SqlcipherBinary.test.ts', () => {
|
||||
data: await TestUtil.readFixturesFile('registry.npmjs.com/@journeyapps/sqlcipher.json'),
|
||||
persist: false,
|
||||
});
|
||||
const binary = new SqlcipherBinary(ctx.httpclient, ctx.logger, binaries['@journeyapps/sqlcipher']);
|
||||
const binary = new SqlcipherBinary(ctx.httpclient, ctx.logger, binaries['@journeyapps/sqlcipher'], '@journeyapps/sqlcipher');
|
||||
const result = await binary.fetch('/');
|
||||
assert(result);
|
||||
assert(result.items.length > 0);
|
||||
@@ -52,7 +52,7 @@ describe('test/common/adapter/binary/SqlcipherBinary.test.ts', () => {
|
||||
data: await TestUtil.readFixturesFile('registry.npmjs.com/@journeyapps/sqlcipher.json'),
|
||||
persist: false,
|
||||
});
|
||||
const binary = new SqlcipherBinary(ctx.httpclient, ctx.logger, binaries['@journeyapps/sqlcipher']);
|
||||
const binary = new SqlcipherBinary(ctx.httpclient, ctx.logger, binaries['@journeyapps/sqlcipher'], '@journeyapps/sqlcipher');
|
||||
const result = await binary.fetch('/v5.3.1/');
|
||||
assert(result);
|
||||
assert(result.items.length > 0);
|
||||
|
||||
27
test/core/service/BinarySyncerService/createTask.test.ts
Normal file
27
test/core/service/BinarySyncerService/createTask.test.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import assert = require('assert');
|
||||
import { app } from 'egg-mock/bootstrap';
|
||||
import { Context } from 'egg';
|
||||
import { BinarySyncerService } from 'app/core/service/BinarySyncerService';
|
||||
|
||||
describe('test/core/service/BinarySyncerService/createTask.test.ts', () => {
|
||||
let ctx: Context;
|
||||
let binarySyncerService: BinarySyncerService;
|
||||
|
||||
beforeEach(async () => {
|
||||
ctx = await app.mockModuleContext();
|
||||
binarySyncerService = await ctx.getEggObject(BinarySyncerService);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await app.destroyModuleContext(ctx);
|
||||
});
|
||||
|
||||
describe('createTask()', () => {
|
||||
it('should ignore duplicate binary task', async () => {
|
||||
const task = await binarySyncerService.createTask('banana', {});
|
||||
const newTask = await binarySyncerService.createTask('banana', {});
|
||||
assert(task?.taskId === newTask?.taskId);
|
||||
assert(task?.bizId === 'SyncBinary:banana');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -75,6 +75,28 @@ describe('test/core/service/PackageManagerService/publish.test.ts', () => {
|
||||
app.expectLog(/\[\d+\.\d+\] \[NFSAdapter:uploadBytes|T\]/);
|
||||
});
|
||||
|
||||
it('should work slice long description', async () => {
|
||||
app.mockLog();
|
||||
const { packageId } = await packageManagerService.publish({
|
||||
dist: {
|
||||
content: Buffer.alloc(0),
|
||||
},
|
||||
tag: '',
|
||||
scope: '',
|
||||
name: 'foo',
|
||||
description: '~'.repeat(1100 * 100),
|
||||
packageJson: {},
|
||||
readme: '',
|
||||
version: '1.0.0',
|
||||
isPrivate: true,
|
||||
}, publisher);
|
||||
const pkgVersion = await packageRepository.findPackageVersion(packageId, '1.0.0');
|
||||
assert(pkgVersion);
|
||||
assert.equal(pkgVersion.version, '1.0.0');
|
||||
const pkg = await packageRepository.findPackage('', 'foo');
|
||||
assert(pkg?.description === '~'.repeat(1024 * 10));
|
||||
});
|
||||
|
||||
it('should work with dist.localFile', async () => {
|
||||
const { packageId } = await packageManagerService.publish({
|
||||
dist: {
|
||||
|
||||
@@ -1,18 +1,27 @@
|
||||
import assert = require('assert');
|
||||
import { app, mock } from 'egg-mock/bootstrap';
|
||||
import { Context } from 'egg';
|
||||
import { setTimeout } from 'timers/promises';
|
||||
import { PackageSyncerService } from '../../../../app/core/service/PackageSyncerService';
|
||||
import { TestUtil } from '../../../TestUtil';
|
||||
import { Task } from 'app/core/entity/Task';
|
||||
import { TaskState } from '../../../../app/common/enum/Task';
|
||||
import { TaskRepository } from '../../../../app/repository/TaskRepository';
|
||||
import { TaskService } from '../../../../app/core/service/TaskService';
|
||||
|
||||
describe('test/core/service/PackageSyncerService/createTask.test.ts', () => {
|
||||
let ctx: Context;
|
||||
const pkgName = '@cnpmcore/foo';
|
||||
const username = 'mock_username';
|
||||
let packageSyncerService: PackageSyncerService;
|
||||
let taskRepository: TaskRepository;
|
||||
let taskService: TaskService;
|
||||
|
||||
beforeEach(async () => {
|
||||
ctx = await app.mockModuleContext();
|
||||
packageSyncerService = await ctx.getEggObject(PackageSyncerService);
|
||||
taskRepository = await ctx.getEggObject(TaskRepository);
|
||||
taskService = await ctx.getEggObject(TaskService);
|
||||
|
||||
await TestUtil.createPackage({
|
||||
name: pkgName,
|
||||
@@ -56,4 +65,25 @@ describe('test/core/service/PackageSyncerService/createTask.test.ts', () => {
|
||||
});
|
||||
assert(task);
|
||||
});
|
||||
|
||||
it('should create task when processing', async () => {
|
||||
mock(PackageSyncerService.prototype, 'executeTask', async (task: Task) => {
|
||||
task.state = TaskState.Processing;
|
||||
await taskRepository.saveTask(task);
|
||||
await setTimeout(2);
|
||||
await taskService.finishTask(task, TaskState.Success);
|
||||
});
|
||||
const task = await packageSyncerService.createTask(pkgName);
|
||||
const res = await Promise.all([ packageSyncerService.executeTask(task), (async () => {
|
||||
await setTimeout(1);
|
||||
return await packageSyncerService.createTask(pkgName);
|
||||
})() ]);
|
||||
assert(res[1].taskId !== task.taskId);
|
||||
});
|
||||
|
||||
it('should not duplicate task when waiting', async () => {
|
||||
const task = await packageSyncerService.createTask(pkgName);
|
||||
const newTask = await packageSyncerService.createTask(pkgName);
|
||||
assert(newTask.taskId === task.taskId);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,15 +4,22 @@ import { app, mock } from 'egg-mock/bootstrap';
|
||||
import { BinarySyncerService } from 'app/core/service/BinarySyncerService';
|
||||
import { NodeBinary } from 'app/common/adapter/binary/NodeBinary';
|
||||
import { SqlcipherBinary } from 'app/common/adapter/binary/SqlcipherBinary';
|
||||
import { BinaryRepository } from 'app/repository/BinaryRepository';
|
||||
import { Binary } from 'app/core/entity/Binary';
|
||||
import { NFSClientAdapter } from 'app/infra/NFSClientAdapter';
|
||||
import { TestUtil } from 'test/TestUtil';
|
||||
|
||||
describe('test/port/controller/BinarySyncController/showBinary.test.ts', () => {
|
||||
let ctx: Context;
|
||||
let binarySyncerService: BinarySyncerService;
|
||||
let binaryRepository: BinaryRepository;
|
||||
let nfsClientAdapter: NFSClientAdapter;
|
||||
|
||||
beforeEach(async () => {
|
||||
ctx = await app.mockModuleContext();
|
||||
binarySyncerService = await ctx.getEggObject(BinarySyncerService);
|
||||
binaryRepository = await ctx.getEggObject(BinaryRepository);
|
||||
nfsClientAdapter = await app.getEggObject(NFSClientAdapter);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
@@ -47,6 +54,99 @@ describe('test/port/controller/BinarySyncController/showBinary.test.ts', () => {
|
||||
}
|
||||
});
|
||||
|
||||
it('should show valid root dirs', async () => {
|
||||
await binaryRepository.saveBinary(Binary.create({
|
||||
category: 'node-canvas-prebuilt',
|
||||
parent: '/',
|
||||
name: 'v2.6.1/',
|
||||
isDir: true,
|
||||
size: 0,
|
||||
date: '2021-12-14T13:12:31.587Z',
|
||||
}));
|
||||
const res = await app.httpRequest()
|
||||
.get('/-/binary/');
|
||||
assert(res.status === 200);
|
||||
assert(res.headers['content-type'] === 'application/json; charset=utf-8');
|
||||
const items = res.body;
|
||||
assert(items.length > 0);
|
||||
for (const item of items) {
|
||||
assert(item.type === 'dir');
|
||||
assert(item.name);
|
||||
assert(item.url);
|
||||
assert(item.repoUrl);
|
||||
assert(item.distUrl);
|
||||
assert(item.description);
|
||||
}
|
||||
|
||||
const item = items.filter((item: any) => item.name === 'nwjs/');
|
||||
assert.deepStrictEqual(item, [{
|
||||
name: 'nwjs/',
|
||||
category: 'nwjs/',
|
||||
description: 'NW.js (previously known as node-webkit) lets you call all Node.js modules directly from DOM and enables a new way of writing applications with all Web technologies.',
|
||||
distUrl: 'https://dl.nwjs.io/',
|
||||
repoUrl: 'https://github.com/nwjs/nw.js',
|
||||
type: 'dir',
|
||||
url: 'http://localhost:7001/-/binary/nwjs/',
|
||||
}]);
|
||||
});
|
||||
it('should show valid sub dirs', async () => {
|
||||
await binaryRepository.saveBinary(Binary.create({
|
||||
category: 'node-canvas-prebuilt',
|
||||
parent: '/',
|
||||
name: 'v2.6.1/',
|
||||
isDir: true,
|
||||
size: 0,
|
||||
date: '2021-12-14T13:12:31.587Z',
|
||||
}));
|
||||
const res = await app.httpRequest()
|
||||
.get('/-/binary/node-canvas-prebuilt/');
|
||||
assert(res.status === 200);
|
||||
assert(res.headers['content-type'] === 'application/json; charset=utf-8');
|
||||
const items = TestUtil.pickKeys(res.body, [ 'category', 'name', 'date', 'type', 'url' ]);
|
||||
assert.deepStrictEqual(items, [{
|
||||
category: 'node-canvas-prebuilt',
|
||||
name: 'v2.6.1/',
|
||||
date: '2021-12-14T13:12:31.587Z',
|
||||
type: 'dir',
|
||||
url: 'http://localhost:7001/-/binary/node-canvas-prebuilt/v2.6.1/',
|
||||
}]);
|
||||
|
||||
});
|
||||
it('should show valid files', async () => {
|
||||
await binaryRepository.saveBinary(Binary.create({
|
||||
category: 'node-canvas-prebuilt',
|
||||
parent: '/',
|
||||
name: 'v2.6.1/',
|
||||
isDir: true,
|
||||
size: 0,
|
||||
date: '2021-12-14T13:12:31.587Z',
|
||||
}));
|
||||
await binaryRepository.saveBinary(Binary.create({
|
||||
category: 'node-canvas-prebuilt',
|
||||
parent: '/v2.6.1/',
|
||||
name: 'node-canvas-prebuilt-v2.6.1-node-v57-linux-glibc-x64.tar.gz',
|
||||
isDir: false,
|
||||
size: 10,
|
||||
date: '2021-12-14T13:12:31.587Z',
|
||||
}));
|
||||
const res = await app.httpRequest()
|
||||
.get('/-/binary/node-canvas-prebuilt/v2.6.1/');
|
||||
assert(res.status === 200);
|
||||
assert(res.headers['content-type'] === 'application/json; charset=utf-8');
|
||||
const items = TestUtil.pickKeys(res.body, [ 'category', 'name', 'date', 'type', 'url' ]);
|
||||
assert(items.length > 0);
|
||||
|
||||
assert.deepStrictEqual(items, [
|
||||
{
|
||||
category: 'node-canvas-prebuilt',
|
||||
name: 'node-canvas-prebuilt-v2.6.1-node-v57-linux-glibc-x64.tar.gz',
|
||||
date: '2021-12-14T13:12:31.587Z',
|
||||
type: 'file',
|
||||
url: 'http://localhost:7001/-/binary/node-canvas-prebuilt/v2.6.1/node-canvas-prebuilt-v2.6.1-node-v57-linux-glibc-x64.tar.gz',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should show node binaries', async () => {
|
||||
app.mockHttpclient('https://nodejs.org/dist/index.json', 'GET', {
|
||||
data: await TestUtil.readFixturesFile('nodejs.org/site/index.json'),
|
||||
@@ -247,5 +347,210 @@ describe('test/port/controller/BinarySyncController/showBinary.test.ts', () => {
|
||||
}
|
||||
app.mockAgent().assertNoPendingInterceptors();
|
||||
});
|
||||
|
||||
it('should merge category binaries when binaryName and category not equal', async () => {
|
||||
await binaryRepository.saveBinary(Binary.create({
|
||||
category: 'node-canvas-prebuilt',
|
||||
parent: '/',
|
||||
name: 'v2.6.1/',
|
||||
isDir: true,
|
||||
size: 0,
|
||||
date: '2021-12-14T13:12:31.587Z',
|
||||
}));
|
||||
await binaryRepository.saveBinary(Binary.create({
|
||||
category: 'node-canvas-prebuilt',
|
||||
parent: '/',
|
||||
name: 'v2.7.0/',
|
||||
isDir: true,
|
||||
size: 0,
|
||||
date: '2021-12-14T13:12:31.587Z',
|
||||
}));
|
||||
await binaryRepository.saveBinary(Binary.create({
|
||||
category: 'node-canvas-prebuilt',
|
||||
parent: '/v2.6.1/',
|
||||
name: 'node-canvas-prebuilt-v2.6.1-node-v57-linux-glibc-x64.tar.gz',
|
||||
isDir: false,
|
||||
size: 10,
|
||||
date: '2021-12-14T13:12:31.587Z',
|
||||
}));
|
||||
|
||||
await binaryRepository.saveBinary(Binary.create({
|
||||
category: 'canvas',
|
||||
parent: '/v2.7.0/',
|
||||
name: 'canvas-v2.7.0-node-v57-linux-glibc-x64.tar.gz',
|
||||
isDir: false,
|
||||
size: 10,
|
||||
date: '2021-12-14T13:12:31.587Z',
|
||||
}));
|
||||
|
||||
await binaryRepository.saveBinary(Binary.create({
|
||||
category: 'canvas',
|
||||
parent: '/',
|
||||
name: 'v2.7.0/',
|
||||
isDir: true,
|
||||
size: 0,
|
||||
date: '2021-12-14T13:12:31.587Z',
|
||||
}));
|
||||
|
||||
let res = await app.httpRequest()
|
||||
.get('/-/binary/canvas');
|
||||
|
||||
assert.strictEqual(res.status, 200);
|
||||
assert(res.body);
|
||||
let stableData = TestUtil.pickKeys(res.body, [ 'category', 'name', 'date', 'type', 'url' ]);
|
||||
assert.deepStrictEqual(stableData, [
|
||||
{
|
||||
category: 'canvas',
|
||||
name: 'v2.7.0/',
|
||||
date: '2021-12-14T13:12:31.587Z',
|
||||
type: 'dir',
|
||||
url: 'http://localhost:7001/-/binary/canvas/v2.7.0/',
|
||||
},
|
||||
{
|
||||
category: 'node-canvas-prebuilt',
|
||||
name: 'v2.6.1/',
|
||||
date: '2021-12-14T13:12:31.587Z',
|
||||
type: 'dir',
|
||||
url: 'http://localhost:7001/-/binary/node-canvas-prebuilt/v2.6.1/',
|
||||
},
|
||||
]);
|
||||
|
||||
res = await app.httpRequest()
|
||||
.get('/-/binary/node-canvas-prebuilt');
|
||||
|
||||
assert.strictEqual(res.status, 200);
|
||||
assert(res.body);
|
||||
stableData = TestUtil.pickKeys(res.body, [ 'category', 'name', 'date', 'type', 'url' ]);
|
||||
assert.deepStrictEqual(stableData, [
|
||||
{
|
||||
category: 'node-canvas-prebuilt',
|
||||
name: 'v2.6.1/',
|
||||
date: '2021-12-14T13:12:31.587Z',
|
||||
type: 'dir',
|
||||
url: 'http://localhost:7001/-/binary/node-canvas-prebuilt/v2.6.1/',
|
||||
},
|
||||
{
|
||||
category: 'node-canvas-prebuilt',
|
||||
name: 'v2.7.0/',
|
||||
date: '2021-12-14T13:12:31.587Z',
|
||||
type: 'dir',
|
||||
url: 'http://localhost:7001/-/binary/node-canvas-prebuilt/v2.7.0/',
|
||||
},
|
||||
]);
|
||||
|
||||
res = await app.httpRequest()
|
||||
.get('/-/binary/canvas/v2.7.0/');
|
||||
|
||||
assert.strictEqual(res.status, 200);
|
||||
assert(res.body);
|
||||
stableData = TestUtil.pickKeys(res.body, [ 'category', 'name', 'date', 'type', 'url' ]);
|
||||
|
||||
assert.deepStrictEqual(stableData, [
|
||||
{
|
||||
name: 'canvas-v2.7.0-node-v57-linux-glibc-x64.tar.gz',
|
||||
type: 'file',
|
||||
category: 'canvas',
|
||||
date: '2021-12-14T13:12:31.587Z',
|
||||
url: 'http://localhost:7001/-/binary/canvas/v2.7.0/canvas-v2.7.0-node-v57-linux-glibc-x64.tar.gz',
|
||||
},
|
||||
]);
|
||||
|
||||
res = await app.httpRequest()
|
||||
.get('/-/binary/canvas/v2.6.1/');
|
||||
|
||||
assert.strictEqual(res.status, 200);
|
||||
assert(res.body);
|
||||
stableData = TestUtil.pickKeys(res.body, [ 'category', 'name', 'date', 'type', 'url' ]);
|
||||
|
||||
assert.deepStrictEqual(stableData, [
|
||||
{
|
||||
name: 'node-canvas-prebuilt-v2.6.1-node-v57-linux-glibc-x64.tar.gz',
|
||||
type: 'file',
|
||||
category: 'node-canvas-prebuilt',
|
||||
date: '2021-12-14T13:12:31.587Z',
|
||||
url: 'http://localhost:7001/-/binary/node-canvas-prebuilt/v2.6.1/node-canvas-prebuilt-v2.6.1-node-v57-linux-glibc-x64.tar.gz',
|
||||
},
|
||||
]);
|
||||
|
||||
res = await app.httpRequest()
|
||||
.get('/-/binary/node-canvas-prebuilt/v2.6.1/');
|
||||
|
||||
assert.strictEqual(res.status, 200);
|
||||
assert(res.body);
|
||||
stableData = TestUtil.pickKeys(res.body, [ 'category', 'name', 'date', 'type', 'url' ]);
|
||||
|
||||
assert.deepStrictEqual(stableData, [
|
||||
{
|
||||
name: 'node-canvas-prebuilt-v2.6.1-node-v57-linux-glibc-x64.tar.gz',
|
||||
type: 'file',
|
||||
category: 'node-canvas-prebuilt',
|
||||
date: '2021-12-14T13:12:31.587Z',
|
||||
url: 'http://localhost:7001/-/binary/node-canvas-prebuilt/v2.6.1/node-canvas-prebuilt-v2.6.1-node-v57-linux-glibc-x64.tar.gz',
|
||||
},
|
||||
]);
|
||||
|
||||
res = await app.httpRequest()
|
||||
.get('/-/binary/canvas/v2.7.1/');
|
||||
assert.strictEqual(res.status, 404);
|
||||
|
||||
res = await app.httpRequest()
|
||||
.get('/-/binary/node-canvas-prebuilt/v2.7.1/');
|
||||
|
||||
assert.strictEqual(res.status, 404);
|
||||
});
|
||||
|
||||
it('should get binary file success', async () => {
|
||||
await binaryRepository.saveBinary(Binary.create({
|
||||
category: 'node-canvas-prebuilt',
|
||||
parent: '/',
|
||||
name: 'v2.6.1/',
|
||||
isDir: true,
|
||||
size: 0,
|
||||
date: '2021-12-14T13:12:31.587Z',
|
||||
}));
|
||||
await binaryRepository.saveBinary(Binary.create({
|
||||
category: 'node-canvas-prebuilt',
|
||||
parent: '/',
|
||||
name: 'v2.7.0/',
|
||||
isDir: true,
|
||||
size: 0,
|
||||
date: '2021-12-14T13:12:31.587Z',
|
||||
}));
|
||||
await binaryRepository.saveBinary(Binary.create({
|
||||
category: 'node-canvas-prebuilt',
|
||||
parent: '/v2.6.1/',
|
||||
name: 'node-canvas-prebuilt-v2.6.1-node-v57-linux-glibc-x64.tar.gz',
|
||||
isDir: false,
|
||||
size: 10,
|
||||
date: '2021-12-14T13:12:31.587Z',
|
||||
}));
|
||||
|
||||
await binaryRepository.saveBinary(Binary.create({
|
||||
category: 'canvas',
|
||||
parent: '/v2.7.0/',
|
||||
name: 'canvas-v2.7.0-node-v57-linux-glibc-x64.tar.gz',
|
||||
isDir: false,
|
||||
size: 10,
|
||||
date: '2021-12-14T13:12:31.587Z',
|
||||
}));
|
||||
|
||||
await binaryRepository.saveBinary(Binary.create({
|
||||
category: 'canvas',
|
||||
parent: '/',
|
||||
name: 'v2.7.0/',
|
||||
isDir: true,
|
||||
size: 0,
|
||||
date: '2021-12-14T13:12:31.587Z',
|
||||
}));
|
||||
|
||||
mock(nfsClientAdapter, 'url', (storeKey: string) => {
|
||||
return `https://cdn.mock.com${storeKey}`;
|
||||
});
|
||||
const res = await app.httpRequest()
|
||||
.get('/-/binary/canvas/v2.6.1/canvas-v2.6.1-node-v57-linux-glibc-x64.tar.gz');
|
||||
|
||||
assert.strictEqual(res.status, 302);
|
||||
assert.strictEqual(res.headers.location, 'https://cdn.mock.com/binaries/node-canvas-prebuilt/v2.6.1/node-canvas-prebuilt-v2.6.1-node-v57-linux-glibc-x64.tar.gz');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -5,7 +5,6 @@ import { app, mock } from 'egg-mock/bootstrap';
|
||||
import { TestUtil } from 'test/TestUtil';
|
||||
import { Task as TaskModel } from 'app/repository/model/Task';
|
||||
import { PackageSyncerService } from 'app/core/service/PackageSyncerService';
|
||||
import { TaskState } from 'app/common/enum/Task';
|
||||
|
||||
describe('test/port/controller/PackageSyncController/createSyncTask.test.ts', () => {
|
||||
let publisher: any;
|
||||
@@ -286,7 +285,7 @@ describe('test/port/controller/PackageSyncController/createSyncTask.test.ts', ()
|
||||
assert(res.body.id === firstTaskId);
|
||||
});
|
||||
|
||||
it('should dont create exists processing task update less than 1 min', async () => {
|
||||
it('should dont create exists waiting task', async () => {
|
||||
let res = await app.httpRequest()
|
||||
.put('/-/package/koa/syncs')
|
||||
.expect(201);
|
||||
@@ -295,13 +294,12 @@ describe('test/port/controller/PackageSyncController/createSyncTask.test.ts', ()
|
||||
assert(res.body.id);
|
||||
const firstTaskId = res.body.id;
|
||||
|
||||
await TaskModel.update({ taskId: firstTaskId }, { state: TaskState.Processing });
|
||||
// again dont create
|
||||
res = await app.httpRequest()
|
||||
.put('/-/package/koa/syncs')
|
||||
.expect(201);
|
||||
assert(res.body.ok === true);
|
||||
assert(res.body.state === 'processing');
|
||||
assert(res.body.state === 'waiting');
|
||||
assert(res.body.id === firstTaskId);
|
||||
|
||||
// update bigger than 1 min, same task return
|
||||
@@ -310,7 +308,7 @@ describe('test/port/controller/PackageSyncController/createSyncTask.test.ts', ()
|
||||
.put('/-/package/koa/syncs')
|
||||
.expect(201);
|
||||
assert(res.body.ok === true);
|
||||
assert(res.body.state === 'processing');
|
||||
assert(res.body.state === 'waiting');
|
||||
assert(res.body.id === firstTaskId);
|
||||
});
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user