Compare commits
857 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
87bdc7013f | ||
|
|
7989000ef8 | ||
|
|
1867bd3184 | ||
|
|
cc96236dac | ||
|
|
807f170b72 | ||
|
|
109d80a39e | ||
|
|
4f59b0d2c6 | ||
|
|
1d07e65997 | ||
|
|
df87eb3224 | ||
|
|
7a5fe2afac | ||
|
|
7fd2357245 | ||
|
|
6cdf8f9007 | ||
|
|
e1d447c450 | ||
|
|
32709843fb | ||
|
|
02431bf71e | ||
|
|
ef347442be | ||
|
|
8e009d8fe8 | ||
|
|
432a6c4eaf | ||
|
|
da0618c442 | ||
|
|
4ea5ec7194 | ||
|
|
7f16f90186 | ||
|
|
71a22be8ca | ||
|
|
f504c6c181 | ||
|
|
c54f886525 | ||
|
|
d8177a7724 | ||
|
|
17bc128546 | ||
|
|
09b8c73e47 | ||
|
|
3618508640 | ||
|
|
2a25bce808 | ||
|
|
6e91aa88be | ||
|
|
1ae08b6b78 | ||
|
|
19f969d442 | ||
|
|
2f0609d6c7 | ||
|
|
aeed698ed2 | ||
|
|
6e1a04ecd8 | ||
|
|
559d4c0ece | ||
|
|
9640730d6c | ||
|
|
a702e69e16 | ||
|
|
492eeef721 | ||
|
|
b2008958f1 | ||
|
|
e41dad708e | ||
|
|
582285a37f | ||
|
|
1926522f99 | ||
|
|
96c9f459c1 | ||
|
|
15c6cb09fd | ||
|
|
0651c20990 | ||
|
|
eb1cb24723 | ||
|
|
912f4e4fc4 | ||
|
|
80aebfc26f | ||
|
|
f6ebdb9aec | ||
|
|
011a9c944b | ||
|
|
efb25cf611 | ||
|
|
a1c396ea27 | ||
|
|
8cfd437b7e | ||
|
|
9a4f81c2cb | ||
|
|
96c6604252 | ||
|
|
9036126359 | ||
|
|
73cc898f6b | ||
|
|
a5d0949bb4 | ||
|
|
c8b50ba2d3 | ||
|
|
226a0895a9 | ||
|
|
a77371e9b4 | ||
|
|
3ed7610e8d | ||
|
|
2351b733cd | ||
|
|
c76559a4d4 | ||
|
|
a5f44c464a | ||
|
|
51cb473141 | ||
|
|
99bc24c0ef | ||
|
|
5a7e08c4e5 | ||
|
|
9d89393695 | ||
|
|
b436e1dc88 | ||
|
|
172b271c6f | ||
|
|
8fbf77a76d | ||
|
|
9fdc251b83 | ||
|
|
2d4f1b2362 | ||
|
|
450c22359f | ||
|
|
ef360d6927 | ||
|
|
9db2372c05 | ||
|
|
2cceccad55 | ||
|
|
78b54060b5 | ||
|
|
da4c04a22e | ||
|
|
5ec951622d | ||
|
|
42fb6c3b14 | ||
|
|
d5a3d085aa | ||
|
|
fdd98ace07 | ||
|
|
733f13528d | ||
|
|
f21c3f0b26 | ||
|
|
6f67870f70 | ||
|
|
0e9d2454d8 | ||
|
|
b33f50a1ae | ||
|
|
f3b4fb68ef | ||
|
|
2ee4f38743 | ||
|
|
d2a4cf3264 | ||
|
|
1358d53dd9 | ||
|
|
ac456232bb | ||
|
|
181267f256 | ||
|
|
e799500893 | ||
|
|
75c3cbafd0 | ||
|
|
c7b44ddcaa | ||
|
|
fcae1fa91d | ||
|
|
09ca08baaf | ||
|
|
391e98de9f | ||
|
|
fa4952408a | ||
|
|
4cb0424d23 | ||
|
|
db3233075c | ||
|
|
1307181005 | ||
|
|
e9929c23e1 | ||
|
|
fe73eba54e | ||
|
|
f08e733cc4 | ||
|
|
a3ebb7942d | ||
|
|
c0bf077974 | ||
|
|
c1b2d09cd6 | ||
|
|
ce465f0b03 | ||
|
|
794689304f | ||
|
|
0cabd62dea | ||
|
|
7018fc99a2 | ||
|
|
c2741683b2 | ||
|
|
e58ef5ed9c | ||
|
|
fe2d954555 | ||
|
|
64c3ea445b | ||
|
|
163852615a | ||
|
|
717b8aa194 | ||
|
|
3e37e54158 | ||
|
|
16f8ae0f30 | ||
|
|
5336a0a611 | ||
|
|
f8dc1da537 | ||
|
|
8927d7d144 | ||
|
|
3b65f66584 | ||
|
|
6fe99a6f1b | ||
|
|
9626250140 | ||
|
|
a514e254a2 | ||
|
|
bf8c018fc5 | ||
|
|
1bdb651636 | ||
|
|
abc40b8e8e | ||
|
|
a1e6368c29 | ||
|
|
ba6543a322 | ||
|
|
2961d21124 | ||
|
|
45e4e66b4c | ||
|
|
b99d77ec29 | ||
|
|
addd3605f6 | ||
|
|
54729bffa2 | ||
|
|
01dc7dc1d8 | ||
|
|
ff05d779c4 | ||
|
|
93bea50176 | ||
|
|
7bf1a5a802 | ||
|
|
83979b3468 | ||
|
|
e04a6784dd | ||
|
|
dc8fe022bc | ||
|
|
502d20e2cf | ||
|
|
fd66425de6 | ||
|
|
19310b2078 | ||
|
|
aea51476da | ||
|
|
8e729c79d8 | ||
|
|
98e334b7fb | ||
|
|
3fd63e5348 | ||
|
|
e95064fef5 | ||
|
|
2215dc3397 | ||
|
|
4b9af22c95 | ||
|
|
54481095ce | ||
|
|
214b3dcde8 | ||
|
|
ecb9f52bf3 | ||
|
|
92df8bd178 | ||
|
|
445df5b30e | ||
|
|
57692cd8b1 | ||
|
|
f4058bb5c2 | ||
|
|
c80dd8958a | ||
|
|
24879f8198 | ||
|
|
24411f7a68 | ||
|
|
93d28f66b2 | ||
|
|
f4c584cfce | ||
|
|
a85978f2be | ||
|
|
90e1a66930 | ||
|
|
2a1f1a7dfa | ||
|
|
6fd47b333e | ||
|
|
5b8fe3aeb5 | ||
|
|
7c6c2d2932 | ||
|
|
babf679914 | ||
|
|
f8adc5e1a2 | ||
|
|
aaedc13d2c | ||
|
|
1e4ac1f842 | ||
|
|
4ee0ca29d4 | ||
|
|
b264e9f3df | ||
|
|
90328d5faf | ||
|
|
1d062075c4 | ||
|
|
acfd865bb0 | ||
|
|
ff7deb4712 | ||
|
|
09c60e68e0 | ||
|
|
318634f072 | ||
|
|
105a34f912 | ||
|
|
5a17a04090 | ||
|
|
7dde31546e | ||
|
|
472e8e94b0 | ||
|
|
3250bc5513 | ||
|
|
1092c89f86 | ||
|
|
977946a319 | ||
|
|
373c5cb348 | ||
|
|
fee0864ba1 | ||
|
|
6d8d05b0cc | ||
|
|
fcc40d65ae | ||
|
|
3457e0925d | ||
|
|
f282941075 | ||
|
|
e3e1485369 | ||
|
|
b05050da5b | ||
|
|
7970e52068 | ||
|
|
d79f12d45a | ||
|
|
e6561e10d4 | ||
|
|
b5acc054bf | ||
|
|
ce59c07b0b | ||
|
|
f6edac245d | ||
|
|
c21239a634 | ||
|
|
8372674d09 | ||
|
|
df697b9461 | ||
|
|
0ef1286c37 | ||
|
|
52cb33ffdd | ||
|
|
dd3a4b4821 | ||
|
|
896f083000 | ||
|
|
05b23fdadc | ||
|
|
fe6e503ec3 | ||
|
|
d824821b7a | ||
|
|
e5ad620a15 | ||
|
|
0a769008cf | ||
|
|
34bbde1a7c | ||
|
|
cb1a7af3c2 | ||
|
|
51b6c6006b | ||
|
|
580d3e77f7 | ||
|
|
7df6962f43 | ||
|
|
caabe0ebda | ||
|
|
cb37b38ee4 | ||
|
|
601caa8711 | ||
|
|
5150ecb973 | ||
|
|
1ce74482bc | ||
|
|
a18ac6bed6 | ||
|
|
13d653e4dd | ||
|
|
ea4f8474c1 | ||
|
|
62ef3d91b6 | ||
|
|
38518baf44 | ||
|
|
04e8638291 | ||
|
|
5eea1bac6c | ||
|
|
a57f99cbe0 | ||
|
|
338ea47b6c | ||
|
|
20f9436672 | ||
|
|
d11d9ea5a4 | ||
|
|
dc8abe99c3 | ||
|
|
ce65cfff23 | ||
|
|
adb40fc5f4 | ||
|
|
7d62824936 | ||
|
|
e267ef1f54 | ||
|
|
ad7cf21d0e | ||
|
|
d484bb4f2f | ||
|
|
d66aa7097a | ||
|
|
69d51e88d8 | ||
|
|
f51ea1f206 | ||
|
|
e50d143d96 | ||
|
|
92a9174603 | ||
|
|
4edbd47088 | ||
|
|
c166c20701 | ||
|
|
640240e5c2 | ||
|
|
2793914d80 | ||
|
|
90bb89660c | ||
|
|
e3af9f023c | ||
|
|
95c8a1fb82 | ||
|
|
32d7e8d891 | ||
|
|
ee99d1fb26 | ||
|
|
098675516b | ||
|
|
9350963f58 | ||
|
|
999abc5b73 | ||
|
|
c00456f6fe | ||
|
|
ea1ed4159e | ||
|
|
18d52a4103 | ||
|
|
4bb6a223be | ||
|
|
d4710164c7 | ||
|
|
ee2439d8da | ||
|
|
94f4cd2be1 | ||
|
|
2ed6d7f750 | ||
|
|
f1ff7d0eaf | ||
|
|
389b6d006c | ||
|
|
71d5e5f150 | ||
|
|
8c2329795d | ||
|
|
977e4adfc6 | ||
|
|
b22bb17d90 | ||
|
|
7031b9294f | ||
|
|
3bd1e82b54 | ||
|
|
af4df69a16 | ||
|
|
08864ce302 | ||
|
|
a00be0c20d | ||
|
|
4f22d6fec6 | ||
|
|
216d376598 | ||
|
|
773ac4922b | ||
|
|
3a131f380d | ||
|
|
9f06ea4b05 | ||
|
|
9e3ec11c70 | ||
|
|
f3bbc7957e | ||
|
|
8e8c8b67f5 | ||
|
|
dbb5fa2c9c | ||
|
|
b82861a5e8 | ||
|
|
6fa9ed7f07 | ||
|
|
fdd6a21102 | ||
|
|
8a0b20719e | ||
|
|
03c7142fa6 | ||
|
|
03340c1eae | ||
|
|
e588e281b6 | ||
|
|
cf8b906e54 | ||
|
|
59b8b6f037 | ||
|
|
0b7dc598b2 | ||
|
|
c609284d0f | ||
|
|
90c1ec9d36 | ||
|
|
b76546084c | ||
|
|
e984bd212b | ||
|
|
83de901c8a | ||
|
|
c9f7ba8004 | ||
|
|
01a6c22103 | ||
|
|
9f1b1619e8 | ||
|
|
ffb3c0e6d0 | ||
|
|
0470a02b2f | ||
|
|
6bbeff6d05 | ||
|
|
0bc3e6c8a0 | ||
|
|
53018704e0 | ||
|
|
e8c16cfb4b | ||
|
|
27ea2f91c0 | ||
|
|
6c6646873c | ||
|
|
223d6492d4 | ||
|
|
ccd3d26059 | ||
|
|
f63e690b15 | ||
|
|
674057b1af | ||
|
|
014a0b53ae | ||
|
|
32cc6ea617 | ||
|
|
d6dd63f012 | ||
|
|
0210752ea5 | ||
|
|
3c060766e7 | ||
|
|
7915bc3b59 | ||
|
|
69c5628184 | ||
|
|
03d66b0664 | ||
|
|
7c82d7e485 | ||
|
|
c927517e37 | ||
|
|
9fd42ffb34 | ||
|
|
a7c4285a68 | ||
|
|
eb6d30537b | ||
|
|
b9b61c7e97 | ||
|
|
eebc466e34 | ||
|
|
f84ca18798 | ||
|
|
831083a976 | ||
|
|
7530aee68a | ||
|
|
18cc0495ef | ||
|
|
047fbb22f8 | ||
|
|
4fa150e077 | ||
|
|
5e00e46a9b | ||
|
|
25e00a3172 | ||
|
|
3d9af3152e | ||
|
|
9f9c0fae71 | ||
|
|
3729c13a91 | ||
|
|
eec65929da | ||
|
|
6f7a28b359 | ||
|
|
a6fa0432ee | ||
|
|
6fb1dc2342 | ||
|
|
bb7138c3f6 | ||
|
|
c6923c9686 | ||
|
|
c20cf6d6e5 | ||
|
|
d42a5ad1c5 | ||
|
|
5e7faae788 | ||
|
|
e4e4a104fe | ||
|
|
2d5cbaf461 | ||
|
|
f89ca4423a | ||
|
|
548c7475d6 | ||
|
|
fea001efcf | ||
|
|
4050e063c0 | ||
|
|
564b91fba9 | ||
|
|
9fe9e38786 | ||
|
|
05fde696c7 | ||
|
|
b31198f685 | ||
|
|
1efd94dfe6 | ||
|
|
3b95710f27 | ||
|
|
8083df1361 | ||
|
|
6de559c020 | ||
|
|
7579bb21c0 | ||
|
|
5cdadbd7fa | ||
|
|
115a02b0eb | ||
|
|
7b4c105046 | ||
|
|
d8f2d95f92 | ||
|
|
dd406da3e2 | ||
|
|
cafbb5f76b | ||
|
|
cca5196752 | ||
|
|
86b9f512a5 | ||
|
|
06980e0cc1 | ||
|
|
e45fe466ee | ||
|
|
2c76a5f8ac | ||
|
|
3cbbb281a5 | ||
|
|
5fe64d41ec | ||
|
|
6301c3f169 | ||
|
|
1ccf645638 | ||
|
|
dfdcaa893e | ||
|
|
d260231737 | ||
|
|
ecded4a440 | ||
|
|
3f55fb4c0c | ||
|
|
b5d40b083a | ||
|
|
602f17c8b2 | ||
|
|
74e89e94e9 | ||
|
|
4be7a2028e | ||
|
|
4379821787 | ||
|
|
6b3617754c | ||
|
|
ee6cf0eb9f | ||
|
|
fde2321222 | ||
|
|
a9c749995a | ||
|
|
93245c0179 | ||
|
|
a1843ffd34 | ||
|
|
0d266be965 | ||
|
|
7c822d06f6 | ||
|
|
d370e5a6a9 | ||
|
|
3dd6442b94 | ||
|
|
cac6e89870 | ||
|
|
45edca2218 | ||
|
|
46d710e079 | ||
|
|
f1bb9f83e6 | ||
|
|
6b6247b009 | ||
|
|
8a68666096 | ||
|
|
6d01bffc79 | ||
|
|
c09d03b20d | ||
|
|
0da30ca089 | ||
|
|
b5c7895bb5 | ||
|
|
97386397c2 | ||
|
|
137fd5978f | ||
|
|
e8593c47cc | ||
|
|
74bb946a91 | ||
|
|
82756c3968 | ||
|
|
faaf1f537a | ||
|
|
5383b2fccb | ||
|
|
9294981d71 | ||
|
|
17bdfd904a | ||
|
|
6cb257e51f | ||
|
|
3c16e59a5c | ||
|
|
37f6591563 | ||
|
|
3e9932d866 | ||
|
|
6954898da0 | ||
|
|
a55dc65775 | ||
|
|
66324d718a | ||
|
|
553e463b7e | ||
|
|
d7c95d6cd6 | ||
|
|
bf40ceb064 | ||
|
|
554d31d119 | ||
|
|
8a3a03805e | ||
|
|
9047e28074 | ||
|
|
2ee12c7293 | ||
|
|
dc6b2f184a | ||
|
|
1a9596737e | ||
|
|
89353f9312 | ||
|
|
6d58d5920e | ||
|
|
dfef2b862f | ||
|
|
61fff273fc | ||
|
|
97c7c6814f | ||
|
|
025e23627f | ||
|
|
0a38a704fe | ||
|
|
798ba0d262 | ||
|
|
24e3b5efba | ||
|
|
c00cde0aa3 | ||
|
|
0af72f0c68 | ||
|
|
82539add26 | ||
|
|
76a1e8df80 | ||
|
|
3fe188df21 | ||
|
|
4ed27c6eda | ||
|
|
6ec76aa61f | ||
|
|
4a46ffe3a0 | ||
|
|
e47862be71 | ||
|
|
04a9f14b3c | ||
|
|
0aed9ee9f4 | ||
|
|
79e2ff2ee8 | ||
|
|
e5880841f3 | ||
|
|
c49b0699c2 | ||
|
|
bc91a05dd2 | ||
|
|
5b5f56f51c | ||
|
|
f558c78d32 | ||
|
|
ed3eb37c4f | ||
|
|
09318d1943 | ||
|
|
fad4f457ae | ||
|
|
fc3668885d | ||
|
|
c385404e23 | ||
|
|
a0ff6dbc9e | ||
|
|
484ba9dc25 | ||
|
|
848d591f0a | ||
|
|
c07d819ce9 | ||
|
|
ea50f3d3ed | ||
|
|
76bdc74670 | ||
|
|
4a4502967e | ||
|
|
5358d2b81b | ||
|
|
0f6404f4ea | ||
|
|
2b9bfcc0d9 | ||
|
|
950cfb2945 | ||
|
|
06bb301bab | ||
|
|
bad7aee4ec | ||
|
|
400288bb69 | ||
|
|
c549466737 | ||
|
|
841f77d36a | ||
|
|
e281b4e389 | ||
|
|
64adb19b85 | ||
|
|
b84bb568cd | ||
|
|
ecc5e9bfaf | ||
|
|
133bceb43f | ||
|
|
b338aefe45 | ||
|
|
feae33499e | ||
|
|
40baedaa78 | ||
|
|
2b99b23eec | ||
|
|
4592ec3657 | ||
|
|
cf71b9dc9e | ||
|
|
691bdb3a92 | ||
|
|
ae1f67d31f | ||
|
|
a425c5e2ff | ||
|
|
972551e838 | ||
|
|
637d51cba0 | ||
|
|
c71e8dc829 | ||
|
|
2f541130ab | ||
|
|
322f64d517 | ||
|
|
ca3cb6487d | ||
|
|
76ebb24b77 | ||
|
|
4c11a42d21 | ||
|
|
8259455ac5 | ||
|
|
1fe0cedbd0 | ||
|
|
09485451f7 | ||
|
|
c78390752a | ||
|
|
7687965219 | ||
|
|
100430227c | ||
|
|
148795918f | ||
|
|
31bd3c9db7 | ||
|
|
037b3aea6a | ||
|
|
6a778e8c17 | ||
|
|
bac62f6969 | ||
|
|
6a14a277b2 | ||
|
|
67b4528643 | ||
|
|
7b34bb63c9 | ||
|
|
a854d07a66 | ||
|
|
7a7d794207 | ||
|
|
4fb5acad48 | ||
|
|
2e396e4d65 | ||
|
|
55ddeb7aaa | ||
|
|
907e1a8c43 | ||
|
|
8628c34b25 | ||
|
|
9938a6bf4b | ||
|
|
f308bfb4bf | ||
|
|
c0793c575d | ||
|
|
3c00c96b6a | ||
|
|
a1010be87a | ||
|
|
4b20afbd15 | ||
|
|
dc9859cd83 | ||
|
|
08f218724b | ||
|
|
daeb1eb2a3 | ||
|
|
326c7355ed | ||
|
|
d38606f409 | ||
|
|
29c45e3ebc | ||
|
|
20f748be12 | ||
|
|
1cb1e91ca1 | ||
|
|
7fb119c9f3 | ||
|
|
eda8dfe9ca | ||
|
|
4b3939ef4e | ||
|
|
cb0c79e79f | ||
|
|
e36638973f | ||
|
|
aef982d6e6 | ||
|
|
4a588f6512 | ||
|
|
b34cfb1fe0 | ||
|
|
4f028a107b | ||
|
|
91ee928f06 | ||
|
|
862b1eeed4 | ||
|
|
0786ec6108 | ||
|
|
2dca3114e9 | ||
|
|
3bf26bd89e | ||
|
|
ef074f8d6c | ||
|
|
4b46bff72f | ||
|
|
765d3b47a7 | ||
|
|
20c97527fb | ||
|
|
35b3efb058 | ||
|
|
8d1781489b | ||
|
|
92a33acf45 | ||
|
|
48a35b6a01 | ||
|
|
8db7c4355e | ||
|
|
d0a1013131 | ||
|
|
936c0b607b | ||
|
|
1123a440bc | ||
|
|
8f0039de92 | ||
|
|
f8b0ae6c6f | ||
|
|
188ab10871 | ||
|
|
a016077bf3 | ||
|
|
834c155da4 | ||
|
|
19b1d76e98 | ||
|
|
c43846f14e | ||
|
|
fd8d481be4 | ||
|
|
8576201cc1 | ||
|
|
0837025a74 | ||
|
|
5468c4e767 | ||
|
|
34c12de81c | ||
|
|
a5cd498f92 | ||
|
|
dbb588f031 | ||
|
|
720b98b715 | ||
|
|
efd921d876 | ||
|
|
1e28c2d949 | ||
|
|
a0c861ec5a | ||
|
|
8d841ea8e3 | ||
|
|
504430dcdd | ||
|
|
435ff1beeb | ||
|
|
6e247829a3 | ||
|
|
99795b747c | ||
|
|
3a325a1e04 | ||
|
|
734565dc05 | ||
|
|
697d5f0892 | ||
|
|
409d25739e | ||
|
|
65a792ba53 | ||
|
|
4239107ed2 | ||
|
|
f512925fd4 | ||
|
|
b740dd11cf | ||
|
|
430a479113 | ||
|
|
86394b25ee | ||
|
|
8086c6f0bf | ||
|
|
7742e11cde | ||
|
|
2597fcae80 | ||
|
|
5dd720cc21 | ||
|
|
fdbde9eb87 | ||
|
|
7e5227e4a1 | ||
|
|
60ae815c09 | ||
|
|
adecd99866 | ||
|
|
932fa34edc | ||
|
|
75fc0ae472 | ||
|
|
10433438d0 | ||
|
|
c84bc05d10 | ||
|
|
33412c4613 | ||
|
|
4bc965b45b | ||
|
|
6704abf988 | ||
|
|
d9f84677f8 | ||
|
|
e1880ce19e | ||
|
|
478d75fec4 | ||
|
|
77700e72b1 | ||
|
|
5d4068d539 | ||
|
|
57d6fe7150 | ||
|
|
a8dc1b2f7b | ||
|
|
060dcb49b4 | ||
|
|
7981d55860 | ||
|
|
8aa6754875 | ||
|
|
4660dd3b7f | ||
|
|
58046a2ec7 | ||
|
|
008423d49b | ||
|
|
3eb09b8b07 | ||
|
|
888cfe3f1e | ||
|
|
4545076b0f | ||
|
|
854fd796ae | ||
|
|
77af6d4c17 | ||
|
|
21704c9f84 | ||
|
|
4f913f2468 | ||
|
|
859f1bb959 | ||
|
|
8d8963d37a | ||
|
|
c2d3fe9929 | ||
|
|
3809d6eb32 | ||
|
|
490340fbb0 | ||
|
|
a528811e35 | ||
|
|
9275b2cc85 | ||
|
|
48b7031074 | ||
|
|
41d4997ea7 | ||
|
|
ff8a5e99ec | ||
|
|
9a14a6e022 | ||
|
|
404acfae49 | ||
|
|
1e438d7dae | ||
|
|
e929e089d0 | ||
|
|
81486f412f | ||
|
|
5cc0187b67 | ||
|
|
d9accbb6a7 | ||
|
|
17d8ab7dcd | ||
|
|
3f746eb7c8 | ||
|
|
9816059485 | ||
|
|
fe058b716b | ||
|
|
dc7d7dfd53 | ||
|
|
3e514c2c3b | ||
|
|
703adeaffc | ||
|
|
c4e5cb71b4 | ||
|
|
73572b0839 | ||
|
|
88f3944f5a | ||
|
|
20524118ef | ||
|
|
68bf469ad4 | ||
|
|
21f7f03cf7 | ||
|
|
96126e5a2f | ||
|
|
490b04f298 | ||
|
|
abacb64479 | ||
|
|
2a299c9201 | ||
|
|
39083b819f | ||
|
|
228f1512d7 | ||
|
|
f5b9e0e0d5 | ||
|
|
daaee2b971 | ||
|
|
a38cf312b2 | ||
|
|
ab44907dac | ||
|
|
1246f12f5a | ||
|
|
99b8c31d3a | ||
|
|
dbf3301ff9 | ||
|
|
a6fe4a1516 | ||
|
|
277f1023c9 | ||
|
|
820ae3f27c | ||
|
|
d52cfe8bac | ||
|
|
f74d7cbd3e | ||
|
|
7e9b5944fd | ||
|
|
6fb0be9cc0 | ||
|
|
1ddc1c68dc | ||
|
|
8b3dc9072f | ||
|
|
112e551240 | ||
|
|
7b4bac1874 | ||
|
|
baa7f78e83 | ||
|
|
45e4208218 | ||
|
|
1e48eacbf8 | ||
|
|
9e062994fe | ||
|
|
4f155f5004 | ||
|
|
c0ee2db8a4 | ||
|
|
5464b449ce | ||
|
|
f2e433d52b | ||
|
|
11fdc9340e | ||
|
|
1e71e2faa0 | ||
|
|
2806de2a5d | ||
|
|
27c032d53f | ||
|
|
c47f73f799 | ||
|
|
2901e619cb | ||
|
|
5a98db9589 | ||
|
|
34b631fa12 | ||
|
|
bb129c1151 | ||
|
|
37afd429ec | ||
|
|
edee08a778 | ||
|
|
0395d958cf | ||
|
|
9e51798295 | ||
|
|
cb6e2cd7f6 | ||
|
|
a79fa68deb | ||
|
|
fabf3eff4c | ||
|
|
6a60650073 | ||
|
|
6eada0f22e | ||
|
|
5241ddfe84 | ||
|
|
bea03619d7 | ||
|
|
8f03805697 | ||
|
|
d0c2ac0f9d | ||
|
|
d820a030a1 | ||
|
|
b4c0b89365 | ||
|
|
752d0f62f0 | ||
|
|
9404e811a5 | ||
|
|
2102e71c10 | ||
|
|
f023826a59 | ||
|
|
f03512d32b | ||
|
|
7f56593ee3 | ||
|
|
2995858bb0 | ||
|
|
a8cdfcd7cd | ||
|
|
4470cb7d55 | ||
|
|
4b06026d2e | ||
|
|
7967d5857d | ||
|
|
4ea84e8b74 | ||
|
|
bf63b9f738 | ||
|
|
8af77df0e0 | ||
|
|
df49fb84c1 | ||
|
|
d657e180d5 | ||
|
|
0afd9d1bb3 | ||
|
|
f839837f34 | ||
|
|
a030e1110d | ||
|
|
dabf5e1c9a | ||
|
|
bb9612441c | ||
|
|
48825a2e46 | ||
|
|
6a2a463b76 | ||
|
|
f1ec18dc4b | ||
|
|
6b9001ef6c | ||
|
|
9ff1203688 | ||
|
|
a891354a32 | ||
|
|
9c4c93695b | ||
|
|
3b510437a8 | ||
|
|
ed1ec0c27a | ||
|
|
526f61b136 | ||
|
|
568058d7d9 | ||
|
|
b77da91094 | ||
|
|
e2457e9e5c | ||
|
|
c6714cd591 | ||
|
|
58e98c7263 | ||
|
|
234deb4e7e | ||
|
|
4466cf6be1 | ||
|
|
cf5dddb174 | ||
|
|
70506d54ee | ||
|
|
09781df60a | ||
|
|
c730670711 | ||
|
|
691d62f403 | ||
|
|
a5d0094669 | ||
|
|
b65cf7afe3 | ||
|
|
e2e2c39148 | ||
|
|
de4dbd234a | ||
|
|
440f42415c | ||
|
|
8840ac4c1f | ||
|
|
9f662a69e1 | ||
|
|
c098eb4661 | ||
|
|
2c28f3549e | ||
|
|
616ec7626a | ||
|
|
9a915511b3 | ||
|
|
0d37933259 | ||
|
|
2add883370 | ||
|
|
e522347667 | ||
|
|
8987ee0b2a | ||
|
|
ceb9b5be1d | ||
|
|
6c258758c8 | ||
|
|
dadbf3a18b | ||
|
|
c18ac8015a | ||
|
|
68d3cc7295 | ||
|
|
8ebe73d1f1 | ||
|
|
75ea41a55b | ||
|
|
d519e8e763 | ||
|
|
bb6b31e7c0 | ||
|
|
5d19b66290 | ||
|
|
5422de642e | ||
|
|
02f8143097 | ||
|
|
3c7ae2f37d | ||
|
|
6075034521 | ||
|
|
d1a1a8f4cd | ||
|
|
54535893ab | ||
|
|
8c05cfe6a2 | ||
|
|
6c838c7947 | ||
|
|
f3f4fdc4ac | ||
|
|
985d705ad2 | ||
|
|
6234f8cba9 | ||
|
|
c0e34890e4 | ||
|
|
b42687d230 | ||
|
|
4ce0142651 | ||
|
|
b44255de3c | ||
|
|
b0fa7ee2d1 | ||
|
|
1c17291654 | ||
|
|
3b2bd161b7 | ||
|
|
d29e22ed4b | ||
|
|
1a0f577689 | ||
|
|
a3a59aa8f3 | ||
|
|
3a31064ded | ||
|
|
151136381e | ||
|
|
503b60a45b | ||
|
|
225c2bb65f | ||
|
|
8ea985ff6b | ||
|
|
0cec69ee1d | ||
|
|
d5d2a8bdcc | ||
|
|
693aa576b4 | ||
|
|
3abce5e2b6 | ||
|
|
5c6d6301ef | ||
|
|
f43ec71ccb | ||
|
|
ef61515f28 | ||
|
|
9094d0742f | ||
|
|
ede09b5c77 | ||
|
|
19e4d78197 | ||
|
|
380ec1e917 | ||
|
|
ee16b06b3a | ||
|
|
8149f883b0 | ||
|
|
c75249b5b0 | ||
|
|
942a7747ac | ||
|
|
96b336acc3 | ||
|
|
520a3b0220 | ||
|
|
230750ff88 | ||
|
|
84a6e36d84 | ||
|
|
203f0300e3 | ||
|
|
1cb12fec35 | ||
|
|
e5195ecfb5 | ||
|
|
7ee2361700 | ||
|
|
47a92ff273 | ||
|
|
fc99692219 | ||
|
|
bd7dde229a | ||
|
|
21a5c788e1 | ||
|
|
0b286d1fe3 | ||
|
|
987f540cde | ||
|
|
ed3386f044 | ||
|
|
36fa1ba655 | ||
|
|
77182a755d | ||
|
|
f05a4784fd | ||
|
|
7f7c056ecd | ||
|
|
bbb402f762 | ||
|
|
4d70d8065e |
29
.dockerignore
Normal file
29
.dockerignore
Normal file
@@ -0,0 +1,29 @@
|
||||
# we try to aoid adding files to the docker images that change often
|
||||
# or that are not needed for running the docker image
|
||||
# tis greatly reduces the amount of times we need to rerun `npm install` when building image locally
|
||||
# https://codefresh.io/blog/not-ignore-dockerignore/
|
||||
# https://docs.docker.com/engine/reference/builder/#dockerignore-file
|
||||
|
||||
# consider them hidden
|
||||
.*
|
||||
# you can add exceptions like in .gitignore to maintain a whitelist:
|
||||
# e.g.
|
||||
#!.babelrc
|
||||
|
||||
# not going to run tests inside the docker container
|
||||
test/
|
||||
|
||||
# do not copy over node_modules we will run `npm install` anyway
|
||||
node_modules
|
||||
|
||||
# output from test runs and similar things
|
||||
*.log
|
||||
coverage/
|
||||
|
||||
# IDE config files
|
||||
jsconfig.json
|
||||
*.iml
|
||||
|
||||
# let's not get to recursive ;)
|
||||
Dockerfile*
|
||||
docker-compose*.yaml
|
||||
12
.editorconfig
Normal file
12
.editorconfig
Normal file
@@ -0,0 +1,12 @@
|
||||
# top-most EditorConfig file
|
||||
root = true
|
||||
|
||||
# Unix-style newlines with a newline ending every file
|
||||
[*]
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
|
||||
# 2 space indentation
|
||||
[{.,}*.{js,yml,yaml}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
5
.eslintignore
Normal file
5
.eslintignore
Normal file
@@ -0,0 +1,5 @@
|
||||
node_modules
|
||||
lib/static
|
||||
coverage/
|
||||
lib/GUI/
|
||||
wiki/
|
||||
85
.eslintrc.yml
Normal file
85
.eslintrc.yml
Normal file
@@ -0,0 +1,85 @@
|
||||
# vim: syntax=yaml
|
||||
|
||||
#
|
||||
# List of very light restrictions designed to prevent obvious errors,
|
||||
# not impose our own code style upon other contributors.
|
||||
#
|
||||
# This is supposed to be used with `eslint --reset`
|
||||
#
|
||||
# Created to work with eslint@0.18.0
|
||||
#
|
||||
|
||||
extends: ["eslint:recommended", "google"]
|
||||
|
||||
env:
|
||||
node: true
|
||||
browser: true
|
||||
es6: true
|
||||
|
||||
rules:
|
||||
# useful to have in node.js,
|
||||
# if you're sure you don't need to handle error, rename it to "_err"
|
||||
handle-callback-err: 2
|
||||
|
||||
# just to make sure we don't forget to remove them when releasing
|
||||
no-debugger: 2
|
||||
|
||||
# add "falls through" for those
|
||||
no-fallthrough: 2
|
||||
|
||||
# enforce use curly always
|
||||
# curly: 1
|
||||
|
||||
# just warnings about whitespace weirdness here
|
||||
eol-last: 1
|
||||
no-irregular-whitespace: 1
|
||||
no-mixed-spaces-and-tabs: [1, smart-tabs]
|
||||
no-trailing-spaces: 1
|
||||
|
||||
# probably always an error, tell me if it's not
|
||||
no-new-require: 2
|
||||
|
||||
# single most important rule here, without it linting won't even
|
||||
# make any sense
|
||||
no-undef: 2
|
||||
|
||||
# in practice, those are always errors
|
||||
no-unreachable: 2
|
||||
|
||||
# useful for code clean-up
|
||||
no-unused-vars: [2, {"vars": "all", "args": "none"}]
|
||||
|
||||
max-len: [1, 160]
|
||||
|
||||
# camelcase is standard, but this should be 1 and then 2 soon
|
||||
camelcase: 0
|
||||
|
||||
# jsdoc is mandatory
|
||||
require-jsdoc: 2
|
||||
valid-jsdoc: 2
|
||||
|
||||
# this feature is cool but not supported by Node 4, disable via comments
|
||||
prefer-spread: 1
|
||||
prefer-rest-params: 1
|
||||
|
||||
# encorage use es6
|
||||
no-var: 2
|
||||
|
||||
# configuration that should be upgraded progresivelly
|
||||
no-constant-condition: 2
|
||||
no-empty: 2
|
||||
|
||||
# loop over objects http://eslint.org/docs/rules/guard-for-in
|
||||
guard-for-in: 2
|
||||
|
||||
# this must be used within classes
|
||||
no-invalid-this: 2
|
||||
|
||||
# All object must be uppercase
|
||||
new-cap: 2
|
||||
|
||||
# readbility is important, no multiple inline declarations
|
||||
one-var: 2
|
||||
|
||||
# console not allowed unless for testing
|
||||
no-console: [2, {"allow": ["log", "warn"]}]
|
||||
41
.github/issue_template.md
vendored
Normal file
41
.github/issue_template.md
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
#### My reason:
|
||||
|
||||
<!--
|
||||
a brief explanation of the issue, suggestion, feature
|
||||
-->
|
||||
|
||||
#### Steps to reproduce:
|
||||
|
||||
<!--
|
||||
(if it applies)
|
||||
how can I do in order to reproduce it? environment?
|
||||
-->
|
||||
#### App Version:
|
||||
<!--
|
||||
Define which version the issue happens and whether previous version the behaviour is correct
|
||||
-->
|
||||
|
||||
#### Config file:
|
||||
<!--
|
||||
Provide your config file might be really helpful. Please be aware to hide sensisive data before post.
|
||||
-->
|
||||
|
||||
#### Additional information:
|
||||
|
||||
<!--
|
||||
provide the following information would be helpful
|
||||
-->
|
||||
|
||||
- `$ set DEBUG=express:* verdaccio` enable extreme verdaccio debug mode
|
||||
- `$ npm --verbose` prints:
|
||||
- `$ npm config get registry` prints:
|
||||
- Verdaccio terminal output
|
||||
- Windows, OS X/macOS, or Linux?:
|
||||
- Verdaccio configuration file, eg: `cat ~/.config/verdaccio/config.yaml`
|
||||
<!-- Remove those does not apply for you -->
|
||||
- Container:
|
||||
- I use local environment
|
||||
- I develop / deploy using Docker.
|
||||
- I deploy to a PaaS.
|
||||
|
||||
#### Additional verbose log:
|
||||
16
.github/pull_request_template.md
vendored
Normal file
16
.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
**Type:** bug / feature
|
||||
|
||||
The following has been addressed in the PR:
|
||||
|
||||
<!-- Remove the sections that your PR does not apply -->
|
||||
* There is a related issue
|
||||
* Unit or Functional tests are included in the PR
|
||||
|
||||
<!--
|
||||
Our bots should ensure:
|
||||
* The PR passes CI testing
|
||||
-->
|
||||
|
||||
**Description:**
|
||||
|
||||
Resolves #???
|
||||
25
.gitignore
vendored
25
.gitignore
vendored
@@ -1,9 +1,22 @@
|
||||
node_modules
|
||||
package.json
|
||||
npm-debug.log
|
||||
sinopia-*.tgz
|
||||
verdaccio-*.tgz
|
||||
.DS_Store
|
||||
|
||||
###
|
||||
bin/storage*
|
||||
bin/*.yaml
|
||||
test/test-storage*
|
||||
!bin/verdaccio
|
||||
test-storage*
|
||||
.verdaccio_test_env
|
||||
node_modules
|
||||
|
||||
|
||||
# Istanbul
|
||||
coverage/
|
||||
.nyc*
|
||||
|
||||
# Visual Studio Code
|
||||
.vscode/*
|
||||
.jscsrc
|
||||
.jshintrc
|
||||
jsconfig.json
|
||||
|
||||
.idea/
|
||||
|
||||
7
.npmignore
Normal file
7
.npmignore
Normal file
@@ -0,0 +1,7 @@
|
||||
node_modules
|
||||
npm-debug.log
|
||||
coverage/
|
||||
verdaccio-*.tgz
|
||||
test-storage*
|
||||
/.*
|
||||
wiki/
|
||||
9
.travis.yml
Normal file
9
.travis.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- '4'
|
||||
- '6'
|
||||
- '7'
|
||||
- '8'
|
||||
sudo: false
|
||||
script: npm install . && npm run test-travis
|
||||
after_success: npm run coverage:codecov
|
||||
74
AUTHORS
Normal file
74
AUTHORS
Normal file
@@ -0,0 +1,74 @@
|
||||
030 <chocolatey030@gmail.com>
|
||||
Alex Kocharin <alex@kocharin.ru>
|
||||
Alex Kocharin <rlidwka@kocharin.ru>
|
||||
Alex Vernacchia <avernacchia@exacttarget.com>
|
||||
Alexander Makarenko <estliberitas@gmail.com>
|
||||
Alexandre-io <Alexandre-io@users.noreply.github.com>
|
||||
Aram Drevekenin <grimsniffer@gmail.com>
|
||||
Bart Dubois <dubcio@o2.pl>
|
||||
Barthélemy Vessemont <bvessemont@gmail.com>
|
||||
Brandon Nicholls <brandon.nicholls@gmail.com>
|
||||
Bren Norris <bnorris@enterrasolutions.com>
|
||||
Brett Trotter <brett.trotter@webfilings.com>
|
||||
Brian Peacock <bpeacock@fastfig.com>
|
||||
Cedric Darne <cdarne@hibernum.com>
|
||||
Chad Killingsworth <chad.killingsworth@banno.com>
|
||||
Chris Breneman <crispy@cluenet.org>
|
||||
Cody Droz <cody-geest@uiowa.edu>
|
||||
Daniel Rodríguez Rivero <rdanielo@gmail.com>
|
||||
Denis Babineau <denis.babineau@gmail.com>
|
||||
Emmanuel Narh <narhe@advisory.com>
|
||||
Fabio Poloni <fabio@APP-roved.com>
|
||||
Facundo Chambó <fchambo@despegar.com>
|
||||
Guilherme Bernal <dev@lbguilherme.com>
|
||||
Jakub Jirutka <jakub@jirutka.cz>
|
||||
James Newell <j.newell@nib.com.au>
|
||||
Jan Vansteenkiste <jan@vstone.eu>
|
||||
Jannis Achstetter <jannis.achstetter@schneider-electric.com>
|
||||
Jeremy Moritz <jeremy@jeremymoritz.com>
|
||||
John Gozde <johng@pandell.com>
|
||||
Jon de la Motte <jondlm@gmail.com>
|
||||
Joseph Gentle <me@josephg.com>
|
||||
José De Paz <josedepaz@users.noreply.github.com>
|
||||
Juan Carlos Picado <juan@encuestame.org>
|
||||
Juan Carlos Picado <juanpicado19@gmail.com>
|
||||
Juan Picado <juanpicado19@gmail.com>
|
||||
Juan Picado @jotadeveloper <juanpicado19@gmail.com>
|
||||
Kalman Speier <kalman.speier@gmail.com>
|
||||
Keyvan Fatehi <keyvanfatehi@gmail.com>
|
||||
Kody J. Peterson <kodypeterson@users.noreply.github.com>
|
||||
Madison Grubb <madison.grubb@itential.com>
|
||||
Manuel de Brito Fontes <aledbf@gmail.com>
|
||||
Mark Doeswijk <mark.doeswijk@marviq.com>
|
||||
Meeeeow <i@aka.mn>
|
||||
Meeeeow <me@async.sh>
|
||||
Michael Arnel <michael.arnel@gmail.com>
|
||||
Michael Crowe <michael@developrise.com>
|
||||
Miguel Mejias <miguelangelmejias@dorna.com>
|
||||
Miroslav Bajtoš <miroslav@strongloop.com>
|
||||
Nate Ziarek <natez@OSX12-L-NATEZ.local>
|
||||
Nick <nick.edelenbos@trimm.nl>
|
||||
Piotr Synowiec <psynowiec@gmail.com>
|
||||
Rafael Cesar <rafa.cesar@gmail.com>
|
||||
Robert Ewald <r3wald@gmail.com>
|
||||
Robert Groh <robert.groh@medesso.de>
|
||||
Robin Persson <rprssn@gmail.com>
|
||||
Romain Lai-King <romain.laiking@opentrust.com>
|
||||
Ryan Graham <r.m.graham@gmail.com>
|
||||
Ryan Graham <ryan@codingintrigue.co.uk>
|
||||
Sam Day <sday@atlassian.com>
|
||||
Tarun Garg <tarun1793@users.noreply.github.com>
|
||||
Thomas Cort <thomasc@ssimicro.com>
|
||||
Tom Vincent <git@tlvince.com>
|
||||
Trent Earl <trent@trentearl.com>
|
||||
Yannick Croissant <yannick.croissant@gmail.com>
|
||||
Yannick Galatol <ygalatol@teads.tv>
|
||||
cklein <trancesilken@gmail.com>
|
||||
danielo515 <rdanielo@gmail.com>
|
||||
jmwilkinson <j.wilkinson@f5.com>
|
||||
jotadeveloper <juanpicado19@gmail.com>
|
||||
jotadeveloper <juanpicado@users.noreply.github.com>
|
||||
maxlaverse <max@laverse.net>
|
||||
saheba <saheba@users.noreply.github.com>
|
||||
steve-p-com <github@steve-p.com>
|
||||
trent.earl <trent.earl@malauzai.com>
|
||||
406
CHANGELOG.md
Normal file
406
CHANGELOG.md
Normal file
@@ -0,0 +1,406 @@
|
||||
## 2.2.5 (July 4, 2017)
|
||||
|
||||
- Fixed adding the verdaccio user into the group - [#241](https://github.com/verdaccio/verdaccio/pull/241)
|
||||
|
||||
## 2.2.3 (July 4, 2017)
|
||||
|
||||
- Updated Dockerfile & added proper signal handling - [#239](https://github.com/verdaccio/verdaccio/pull/239)
|
||||
|
||||
## 2.2.2 (July 2, 2017)
|
||||
|
||||
- Improve Docker Build - [#181](https://github.com/verdaccio/verdaccio/pull/181)
|
||||
- Bugfix #73 `npm-latest` support - [#228](https://github.com/verdaccio/verdaccio/pull/228)
|
||||
- Add [documentation](https://github.com/verdaccio/verdaccio/tree/master/wiki) - [#229](https://github.com/verdaccio/verdaccio/pull/229)
|
||||
|
||||
## 2.2.1 (June 17, 2017)
|
||||
|
||||
- config section moved up, some keywords added - [#211](https://github.com/verdaccio/verdaccio/pull/211)
|
||||
- docs: update docs with behind reverse proxy - [#214](https://github.com/verdaccio/verdaccio/pull/214)
|
||||
- Add remote ip to request log - [#216](https://github.com/verdaccio/verdaccio/pull/216)
|
||||
|
||||
## 2.2.0 (June 8, 2017)
|
||||
- Allow url_prefix to be only the path - ([@BartDubois ]((https://github.com/BartDubois))) in [#197](https://github.com/verdaccio/verdaccio/pull/197)
|
||||
- Apache reverse proxy configuration - ([@mysiar ]((https://github.com/mysiar))) in [#198](https://github.com/verdaccio/verdaccio/pull/198)
|
||||
- don't blindly clobber local dist-tags - ([@rmg ]((https://github.com/rmg))) in [#206](https://github.com/verdaccio/verdaccio/pull/206)
|
||||
- Adds cache option to uplinks - ([@silkentrance ]((https://github.com/silkentrance))) in [#132](https://github.com/verdaccio/verdaccio/pull/132)
|
||||
|
||||
## 2.1.7 (May 14, 2017)
|
||||
- Fixed publish fail in YARN - ([@W1U02]((https://github.com/W1U02)) in [#183](https://github.com/verdaccio/verdaccio/pull/183)
|
||||
|
||||
## 2.1.6 (May 12, 2017)
|
||||
- Fix https certificates safety check - ([@juanpicado]((https://github.com/juanpicado))) in [#189](https://github.com/verdaccio/verdaccio/pull/189)
|
||||
- Fix upstream search not work with gzip - ([@Meeeeow](https://github.com/Meeeeow) in [#170](https://github.com/verdaccio/verdaccio/pull/170))
|
||||
- Add additional requirement to output message - ([@marnel ](https://github.com/marnel) in [#184](https://github.com/verdaccio/verdaccio/pull/184))
|
||||
- Implement npm ping endpoint - ([@juanpicado]((https://github.com/juanpicado))) in [#179](https://github.com/verdaccio/verdaccio/pull/179)
|
||||
- Add support for multiple notification endpoints to existing webhook - ([@ryan-codingintrigue]((https://github.com/ryan-codingintrigue)))
|
||||
in [#108](https://github.com/verdaccio/verdaccio/pull/108)
|
||||
|
||||
|
||||
|
||||
## 2.1.5 (April 22, 2017)
|
||||
- fix upstream search - ([@Meeeeow](https://github.com/Meeeeow) in [#166](https://github.com/verdaccio/verdaccio/pull/166))
|
||||
- Fix search feature - ([@Meeeeow](https://github.com/Meeeeow) in [#163](https://github.com/verdaccio/verdaccio/pull/163))
|
||||
- add docs about run behind proxy - ([@Meeeeow](https://github.com/Meeeeow) in [#160](https://github.com/verdaccio/verdaccio/pull/160))
|
||||
|
||||
## 2.1.4 (April 13, 2017)
|
||||
- Added Nexus Repository OSS as similar existing software - ([@nedelenbos030](https://github.com/nedelenbos) in [#147](https://github.com/verdaccio/verdaccio/pull/147))
|
||||
- Increase verbose on notify request - ([@juanpicado](https://github.com/juanpicado) in [#153](https://github.com/verdaccio/verdaccio/pull/153))
|
||||
- Add fallback support to previous config files - ([@juanpicado](https://github.com/juanpicado) in [#155](https://github.com/verdaccio/verdaccio/pull/155))
|
||||
- Allows retrieval of all local package contents via http://server/-/search/* - ([@Verikon](https://github.com/Verikon) in [#152](https://github.com/verdaccio/verdaccio/pull/155))
|
||||
|
||||
## 2.1.3 (March 29, 2017)
|
||||
- [GH-83] create systemd service - ([@030](https://github.com/030) in [#89](https://github.com/verdaccio/verdaccio/pull/89))
|
||||
- optional scope in the readme package name. - ([@psychocode](https://github.com/psychocode) in [#136](https://github.com/verdaccio/verdaccio/pull/136))
|
||||
- Added docker image for rpi - ([@danielo515](https://github.com/danielo515) in [#137](https://github.com/verdaccio/verdaccio/pull/137))
|
||||
- Allow configuring a tagline that is displayed on the webpage between. ([@jachstet-sea](https://github.com/jachstet-sea) in [#143](https://github.com/verdaccio/verdaccio/pull/143))
|
||||
|
||||
## 2.1.2 (March 9, 2017)
|
||||
- Contribute guidelines - ([@juanpicado](https://github.com/juanpicado) in [#133](https://github.com/verdaccio/verdaccio/pull/133))
|
||||
- fix(plugin-loader): plugins verdaccio-* overwrite by sinopia- ([@Alexandre-io](https://github.com/Alexandre-io) in [#129](https://github.com/verdaccio/verdaccio/pull/129))
|
||||
|
||||
## 2.1.1 (February 7, 2017)
|
||||
|
||||
- [GH-86] updated readme to point to new chef cookbook ([@kgrubb](https://github.com/kgrubb) in [#117](https://github.com/verdaccio/verdaccio/pull/117))
|
||||
- [GH-88] rename to Verdaccio instead of Sinopia ([@kgrubb](https://github.com/kgrubb) in [#93](https://github.com/verdaccio/verdaccio/pull/93))
|
||||
- Unit testing coverage ([@juanpicado](https://github.com/juanpicado) in [#116](https://github.com/verdaccio/verdaccio/issues/116))
|
||||
- Allow htpasswd-created users to log in [@imsnif](https://github.com/imsnif) in [#112](https://github.com/verdaccio/verdaccio/issues/112))
|
||||
- remove travis io.js support ([@juanpicado](https://github.com/juanpicado) in [#115](https://github.com/verdaccio/verdaccio/issues/115))
|
||||
- rename clean up ([@juanpicado](https://github.com/juanpicado) in [#114](https://github.com/verdaccio/verdaccio/issues/114))
|
||||
- _npmUser / author not showing up ([@juanpicado](https://github.com/juanpicado) in [#65](https://github.com/verdaccio/verdaccio/issues/65))
|
||||
- Docs: correct config attribute `proxy_access` ([@robertgroh](https://github.com/robertgroh) in [#96](https://github.com/verdaccio/verdaccio/pull/96))
|
||||
- Problem with docker.yaml ([@josedepaz](https://github.com/josedepaz) in [#72](https://github.com/verdaccio/verdaccio/pull/72))
|
||||
- Prevent logging of user and password ([@tlvince](https://github.com/tlvince) in [#94](https://github.com/verdaccio/verdaccio/pull/94))
|
||||
- Updated README.md to reflect the availability of the docker image ([@jmwilkinson](https://github.com/jmwilkinson)) in [#71](https://github.com/verdaccio/verdaccio/pull/71))
|
||||
|
||||
## 2.1.0 (October 11, 2016)
|
||||
|
||||
- Use __dirname to resolve local plugins ([@aledbf](https://github.com/aledbf) in [#25](https://github.com/verdaccio/verdaccio/pull/25))
|
||||
- Fix npm cli logout ([@plitex](https://github.com/plitex) in [#47](https://github.com/verdaccio/verdaccio/pull/47))
|
||||
- Add log format: pretty-timestamped ([@jachstet-sea](https://github.com/jachstet-sea) in [#68](https://github.com/verdaccio/verdaccio/pull/68))
|
||||
- Allow adding/overriding HTTP headers of uplinks via config ([@jachstet-sea](https://github.com/jachstet-sea) in [#67](https://github.com/verdaccio/verdaccio/pull/67))
|
||||
- Update Dockerfile to fix failed start ([@denisbabineau](https://github.com/denisbabineau) in [#62](https://github.com/verdaccio/verdaccio/pull/62))
|
||||
- Update the configs to fully support proxying scoped packages ([@ChadKillingsworth](https://github.com/ChadKillingsworth) in [#60](https://github.com/verdaccio/verdaccio/pull/60))
|
||||
- Prevent the server from crashing if a repo is accessed that the user does not have access to ([@crowebird](https://github.com/crowebird) in [#58](https://github.com/verdaccio/verdaccio/pull/58))
|
||||
- Hook system, for integration into things like slack
|
||||
- Register entry partial even if custom template is provided ([@plitex](https://github.com/plitex) in [#46](https://github.com/verdaccio/verdaccio/pull/46))
|
||||
- Rename process to verdaccio ([@juanpicado](https://github.com/juanpicado) in [#57](https://github.com/verdaccio/verdaccio/pull/57))
|
||||
|
||||
|
||||
## 7 Jun 2015, version 1.4.0
|
||||
|
||||
- avoid sending X-Forwarded-For through proxies (issues [#19](https://github.com/rlidwka/sinopia/issues/19), [#254](https://github.com/rlidwka/sinopia/issues/254))
|
||||
- fix multiple issues in search (issues [#239](https://github.com/rlidwka/sinopia/issues/239), [#253](https://github.com/rlidwka/sinopia/pull/253))
|
||||
- fix "maximum stack trace exceeded" errors in auth (issue [#258](https://github.com/rlidwka/sinopia/issues/258))
|
||||
|
||||
## 10 May 2015, version 1.3.0
|
||||
|
||||
- add dist-tags endpoints (issue [#211](https://github.com/rlidwka/sinopia/issues/211))
|
||||
|
||||
## 22 Apr 2015, version 1.2.2
|
||||
|
||||
- fix access control regression in `1.2.1` (issue [#238](https://github.com/rlidwka/sinopia/issues/238))
|
||||
- add a possibility to bind on unix sockets (issue [#237](https://github.com/rlidwka/sinopia/issues/237))
|
||||
|
||||
## 11 Apr 2015, version 1.2.1
|
||||
|
||||
- added more precise authorization control to auth plugins (issue [#207](https://github.com/rlidwka/sinopia/pull/207))
|
||||
|
||||
## 29 Mar 2015, version 1.1.0
|
||||
|
||||
- add a possibility to listen on multiple ports (issue [#172](https://github.com/rlidwka/sinopia/issues/172))
|
||||
- added https support (issues [#71](https://github.com/rlidwka/sinopia/issues/71), [#166](https://github.com/rlidwka/sinopia/issues/166))
|
||||
- added an option to use a custom template for web UI (issue [#208](https://github.com/rlidwka/sinopia/pull/208))
|
||||
- remove "from" and "resolved" fields from shrinkwrap (issue [#204](https://github.com/rlidwka/sinopia/issues/204))
|
||||
- fix hanging when rendering readme (issue [#206](https://github.com/rlidwka/sinopia/issues/206))
|
||||
- fix logger-related crash when using sinopia as a library
|
||||
- all requests to uplinks should now have proper headers
|
||||
|
||||
## 12 Feb 2015, version 1.0.1
|
||||
|
||||
- fixed issue with `max_users` option (issue [#184](https://github.com/rlidwka/sinopia/issues/184))
|
||||
- fixed issue with not being able to disable the web interface (issue [#195](https://github.com/rlidwka/sinopia/pull/195))
|
||||
- fixed 500 error while logging in with npm (issue [#200](https://github.com/rlidwka/sinopia/pull/200))
|
||||
|
||||
## 26 Jan 2015, version 1.0.0
|
||||
|
||||
- switch markdown parser from `remarkable` to `markdown-it`
|
||||
- update `npm-shrinkwrap.json`
|
||||
- now downloading tarballs from upstream using the same protocol as for metadata (issue [#166](https://github.com/rlidwka/sinopia/issues/166))
|
||||
|
||||
## 22 Dec 2014, version 1.0.0-beta.2
|
||||
|
||||
- fix windows behavior when `$HOME` isn't set (issue [#177](https://github.com/rlidwka/sinopia/issues/177))
|
||||
- fix sanitization for highlighted code blocks in readme (issue [render-readme/#1](https://github.com/rlidwka/render-readme/issues/1))
|
||||
|
||||
## 15 Dec 2014, version 1.0.0-beta
|
||||
|
||||
- Markdown rendering is now a lot safer (switched to remarkable+sanitizer).
|
||||
- Header in web interface is now static instead of fixed.
|
||||
- `GET /-/all?local` now returns list of all local packages (issue [#179](https://github.com/rlidwka/sinopia/pull/179))
|
||||
|
||||
## 5 Dec 2014, version 1.0.0-alpha.3
|
||||
|
||||
- Fixed an issue with scoped packages in tarballs
|
||||
|
||||
## 25 Nov 2014, version 1.0.0-alpha
|
||||
|
||||
- Config file is now created in `$XDG_CONFIG_HOME` instead of current directory.
|
||||
|
||||
It is printed to stdout each time sinopia starts, so you hopefully won't have any trouble locating it.
|
||||
|
||||
The change is made so sinopia will pick up the same config no matter which directory it is started from.
|
||||
|
||||
- Default config file is now a lot shorter, and it is very permissive by default. You could use sinopia without modifying it on your own computer, but definitely should change it on production.
|
||||
|
||||
- Added auth tokens. For now, auth token is just a username+password encrypted for security reasons, so it isn't much different from basic auth, but allows to avoid "always-auth" npm setting.
|
||||
|
||||
- Added scoped packages.
|
||||
|
||||
Please note that default `*` mask won't apply to them. You have to use masks like `@scope/*` to match scoped packages, or `**` to match everything.
|
||||
|
||||
- Enabled web interface by default. Wow, it looks almost ready now!
|
||||
|
||||
- All dependencies are bundled now, so uncompatible changes in 3rd party stuff in the future won't ruin the day.
|
||||
|
||||
## 1 Nov 2014, version 0.13.2
|
||||
|
||||
- fix `EPERM`-related crashes on windows (issue [#67](https://github.com/rlidwka/sinopia/issues/67))
|
||||
|
||||
## 22 Oct 2014, version 0.13.0
|
||||
|
||||
- web interface:
|
||||
- web page layout improved (issue [#141](https://github.com/rlidwka/sinopia/pull/141))
|
||||
- latest version is now displayed correctly (issues [#120](https://github.com/rlidwka/sinopia/issues/120), [#123](https://github.com/rlidwka/sinopia/issues/123), [#143](https://github.com/rlidwka/sinopia/pull/143))
|
||||
- fixed web interface working behind reverse proxy (issues [#145](https://github.com/rlidwka/sinopia/issues/145), [#147](https://github.com/rlidwka/sinopia/issues/147))
|
||||
|
||||
## 2 Oct 2014, version 0.12.1
|
||||
|
||||
- web interface:
|
||||
- update markdown CSS (issue [#137](https://github.com/rlidwka/sinopia/pull/137))
|
||||
- jquery is now served locally (issue [#133](https://github.com/rlidwka/sinopia/pull/133))
|
||||
|
||||
- bugfixes:
|
||||
- fix "offset out of bounds" issues (issue [sinopia-htpasswd/#2](https://github.com/rlidwka/sinopia-htpasswd/issues/2))
|
||||
- "max_users" in htpasswd plugin now work correctly (issue [sinopia-htpasswd/#3](https://github.com/rlidwka/sinopia-htpasswd/issues/3))
|
||||
- fix `ENOTDIR, open '.sinopia-db.json'` error in npm search (issue [#122](https://github.com/rlidwka/sinopia/issues/122))
|
||||
|
||||
## 25 Sep 2014, version 0.12.0
|
||||
|
||||
- set process title to `sinopia`
|
||||
|
||||
- web interface bugfixes:
|
||||
- save README data for each package (issue [#100](https://github.com/rlidwka/sinopia/issues/100))
|
||||
- fix crashes related to READMEs (issue [#128](https://github.com/rlidwka/sinopia/issues/128))
|
||||
|
||||
## 18 Sep 2014, version 0.11.3
|
||||
|
||||
- fix 500 error in adduser function in sinopia-htpasswd (issue [#121](https://github.com/rlidwka/sinopia/issues/121))
|
||||
- fix fd leak in authenticate function in sinopia-htpasswd (issue [#116](https://github.com/rlidwka/sinopia/issues/116))
|
||||
|
||||
## 15 Sep 2014, version 0.11.1
|
||||
|
||||
- mark crypt3 as optional (issue [#119](https://github.com/rlidwka/sinopia/issues/119))
|
||||
|
||||
## 15 Sep 2014, version 0.11.0
|
||||
|
||||
- Added auth plugins (issue [#99](https://github.com/rlidwka/sinopia/pull/99))
|
||||
|
||||
Now you can create your own auth plugin based on [sinopia-htpasswd](https://github.com/rlidwka/sinopia-htpasswd) package.
|
||||
|
||||
- WIP: web interface (issue [#73](https://github.com/rlidwka/sinopia/pull/73))
|
||||
|
||||
It is disabled by default, and not ready for production yet. Use at your own risk. We will enable it in the next major release.
|
||||
|
||||
- Some modules are now bundled by default, so users won't have to install stuff from git. We'll see what issues it causes, maybe all modules will be bundled in the future like in npm.
|
||||
|
||||
## 14 Sep 2014, version 0.10.x
|
||||
|
||||
*A bunch of development releases that are broken in various ways. Please use 0.11.x instead.*
|
||||
|
||||
## 7 Sep 2014, version 0.9.3
|
||||
|
||||
- fix several bugs that could cause "can't set headers" exception
|
||||
|
||||
## 3 Sep 2014, version 0.9.2
|
||||
|
||||
- allow "pretty" format for logging into files (issue [#88](https://github.com/rlidwka/sinopia/pull/88))
|
||||
- remove outdated user existence check (issue [#115](https://github.com/rlidwka/sinopia/pull/115))
|
||||
|
||||
## 11 Aug 2014, version 0.9.1
|
||||
|
||||
- filter falsey _npmUser values (issue [#95](https://github.com/rlidwka/sinopia/pull/95))
|
||||
- option not to cache third-party files (issue [#85](https://github.com/rlidwka/sinopia/issues/85))
|
||||
|
||||
## 26 Jul 2014, version 0.9.0
|
||||
|
||||
- new features:
|
||||
- add search functionality (issue [#65](https://github.com/rlidwka/sinopia/pull/65))
|
||||
- allow users to authenticate using .htpasswd (issue [#44](https://github.com/rlidwka/sinopia/issues/44))
|
||||
- allow user registration with "npm adduser" (issue [#44](https://github.com/rlidwka/sinopia/issues/44))
|
||||
|
||||
- bugfixes:
|
||||
- avoid crashing when res.socket is null (issue [#89](https://github.com/rlidwka/sinopia/issues/89))
|
||||
|
||||
## 20 Jun 2014, version 0.8.2
|
||||
|
||||
- allow '@' in package/tarball names (issue [#75](https://github.com/rlidwka/sinopia/issues/75))
|
||||
- other minor fixes (issues [#77](https://github.com/rlidwka/sinopia/issues/77), [#80](https://github.com/rlidwka/sinopia/issues/80))
|
||||
|
||||
## 14 Apr 2014, version 0.8.1
|
||||
|
||||
- "latest" tag is now always present in any package (issue [#63](https://github.com/rlidwka/sinopia/issues/63))
|
||||
- tags created with new npm versions (>= 1.3.19) can now be published correctly
|
||||
|
||||
## 1 Apr 2014, version 0.8.0
|
||||
|
||||
- use gzip compression whenever possible (issue [#54](https://github.com/rlidwka/sinopia/issues/54))
|
||||
- set `ignore_latest_tag` to false, it should now be more compatible with npm registry
|
||||
- make `fs-ext` optional (issue [#61](https://github.com/rlidwka/sinopia/issues/61))
|
||||
|
||||
## 29 Mar 2014, version 0.7.1
|
||||
|
||||
- added `ignore_latest_tag` config param (issues [#55](https://github.com/rlidwka/sinopia/issues/55), [#59](https://github.com/rlidwka/sinopia/issues/59))
|
||||
- reverted PR [#56](https://github.com/rlidwka/sinopia/issues/56) (see discussion in [#57](https://github.com/rlidwka/sinopia/issues/57))
|
||||
|
||||
## 13 Mar 2014, version 0.7.0
|
||||
|
||||
- config changes:
|
||||
- breaking change: all time intervals are now specified in *seconds* instead of *milliseconds* for the sake of consistency. Change `timeout` if you have one!
|
||||
- all time intervals now can be specified in [nginx notation](http://wiki.nginx.org/ConfigNotation), for example `1m 30s` will specify a 90 seconds timeout
|
||||
- added `maxage` option to avoid asking public registry for the same data too often (issue [#47](https://github.com/rlidwka/sinopia/issues/47))
|
||||
- added `max_fails` and `fail_timeout` options to reduce amount of requests to public registry when it's down (issue [#7](https://github.com/rlidwka/sinopia/issues/7))
|
||||
|
||||
- bug fixes:
|
||||
- fix crash when headers are sent twice (issue [#52](https://github.com/rlidwka/sinopia/issues/52))
|
||||
- all tarballs are returned with `Content-Length`, which allows [yapm](https://github.com/rlidwka/yapm) to estimate download time
|
||||
- when connection to public registry is interrupted when downloading a tarball, we no longer save incomplete tarball to the disk
|
||||
|
||||
- other changes:
|
||||
- 404 errors are returned in couchdb-like manner (issue [#56](https://github.com/rlidwka/sinopia/issues/56))
|
||||
|
||||
## 5 Mar 2014, version 0.6.7
|
||||
|
||||
- pin down express@3 version, since sinopia doesn't yet work with express@4
|
||||
|
||||
## 28 Feb 2014, version 0.6.5
|
||||
|
||||
- old SSL keys for npm are removed, solves `SELF_SIGNED_CERT_IN_CHAIN` error
|
||||
|
||||
## 3 Feb 2014, version 0.6.3
|
||||
|
||||
- validate tags and versions (issue [#40](https://github.com/rlidwka/sinopia/issues/40))
|
||||
- don't crash when process.getuid doesn't exist (issue [#41](https://github.com/rlidwka/sinopia/issues/41))
|
||||
|
||||
## 18 Jan 2014, version 0.6.2
|
||||
|
||||
- adding config param to specify upload limits (issue [#39](https://github.com/rlidwka/sinopia/issues/39))
|
||||
- making loose semver versions work (issue [#38](https://github.com/rlidwka/sinopia/issues/38))
|
||||
|
||||
## 13 Jan 2014, version 0.6.1
|
||||
|
||||
- support setting different storage paths for different packages (issue [#35](https://github.com/rlidwka/sinopia/issues/35))
|
||||
|
||||
## 30 Dec 2013, version 0.6.0
|
||||
|
||||
- tag support (issue [#8](https://github.com/rlidwka/sinopia/issues/8))
|
||||
- adding support for npm 1.3.19+ behaviour (issue [#31](https://github.com/rlidwka/sinopia/issues/31))
|
||||
- removing all support for proxying publish requests to uplink (too complex)
|
||||
|
||||
## 26 Dec 2013, version 0.5.9
|
||||
|
||||
- fixing bug with bad Accept header (issue [#32](https://github.com/rlidwka/sinopia/issues/32))
|
||||
|
||||
## 20 Dec 2013, version 0.5.8
|
||||
|
||||
- fixed a warning from js-yaml
|
||||
- don't color multiline strings in logs output
|
||||
- better error messages in various cases
|
||||
- test format changed
|
||||
|
||||
## 15 Dec 2013, version 0.5.7
|
||||
|
||||
- try to fetch package from uplinks if user requested a tarball we don't know about (issue [#29](https://github.com/rlidwka/sinopia/issues/29))
|
||||
- security fix: set express.js to production mode so we won't return stack traces to the user in case of errors
|
||||
|
||||
## 11 Dec 2013, version 0.5.6
|
||||
|
||||
- fixing a few crashes related to tags
|
||||
|
||||
## 8 Dec 2013, version 0.5.4
|
||||
|
||||
- latest tag always shows highest version available (issue [#8](https://github.com/rlidwka/sinopia/issues/8))
|
||||
- added a configurable timeout for requests to uplinks (issue [#18](https://github.com/rlidwka/sinopia/issues/18))
|
||||
- users with bad authentication header are considered not logged in (issue [#17](https://github.com/rlidwka/sinopia/issues/17))
|
||||
|
||||
## 24 Nov 2013, version 0.5.3
|
||||
|
||||
- added proxy support for requests to uplinks (issue [#13](https://github.com/rlidwka/sinopia/issues/13))
|
||||
- changed license from default BSD to WTFPL
|
||||
|
||||
## 26 Oct 2013, version 0.5.2
|
||||
|
||||
- server now supports unpublishing local packages
|
||||
- added fs-ext dependency (flock)
|
||||
- fixed a few face conditions
|
||||
|
||||
## 20 Oct 2013, version 0.5.1
|
||||
|
||||
- fixed a few errors related to logging
|
||||
|
||||
## 12 Oct 2013, version 0.5.0
|
||||
|
||||
- using bunyan as a log engine
|
||||
- pretty-formatting colored logs to stdout by default
|
||||
- ask user before creating any config files
|
||||
|
||||
## 5 Oct 2013, version 0.4.3
|
||||
|
||||
- basic tags support for npm (read-only)
|
||||
- npm star/unstar calls now return proper error
|
||||
|
||||
## 29 Sep 2013, version 0.4.2
|
||||
|
||||
## 28 Sep 2013, version 0.4.1
|
||||
|
||||
- using mocha for tests now
|
||||
- making use of streams2 api, doesn't work on 0.8 anymore
|
||||
- basic support for uploading packages to other registries
|
||||
|
||||
## 27 Sep 2013, version 0.4.0
|
||||
|
||||
- basic test suite
|
||||
- storage path in config is now relative to config file location, not cwd
|
||||
- proper cleanup for temporary files
|
||||
|
||||
## 12 Jul 2013, version 0.3.2
|
||||
|
||||
## 4 Jul 2013, version 0.3.1
|
||||
|
||||
- using ETag header for all json output, based on md5
|
||||
|
||||
## 20 Jun 2013, version 0.3.0
|
||||
|
||||
- compression for http responses
|
||||
- requests for files to uplinks are now streams (no buffering)
|
||||
- tarballs are now cached locally
|
||||
|
||||
## 19 Jun 2013, version 0.2.0
|
||||
|
||||
- config file changed, packages is now specified with minimatch
|
||||
- ability to retrieve all packages from another registry (i.e. npmjs)
|
||||
|
||||
## 14 Jun 2013, version 0.1.1
|
||||
|
||||
- config is now autogenerated
|
||||
- tarballs are now read/written from fs using streams (no buffering)
|
||||
|
||||
## 9 Jun 2013, version 0.1.0
|
||||
|
||||
- first npm version
|
||||
- ability to publish packages and retrieve them locally
|
||||
- basic authentication/access control
|
||||
|
||||
## 22 May 2013, version 0.0.0
|
||||
|
||||
- first commits
|
||||
|
||||
46
CODE_OF_CONDUCT.md
Normal file
46
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,46 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment include:
|
||||
|
||||
* Using welcoming and inclusive language
|
||||
* Being respectful of differing viewpoints and experiences
|
||||
* Gracefully accepting constructive criticism
|
||||
* Focusing on what is best for the community
|
||||
* Showing empathy towards other community members
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
* The use of sexualized language or imagery and unwelcome sexual attention or advances
|
||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or electronic address, without explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a professional setting
|
||||
|
||||
## Our Responsibilities
|
||||
|
||||
Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at verdaccio.npm@gmail.com. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
|
||||
|
||||
[homepage]: http://contributor-covenant.org
|
||||
[version]: http://contributor-covenant.org/version/1/4/
|
||||
208
CONTRIBUTING.md
Normal file
208
CONTRIBUTING.md
Normal file
@@ -0,0 +1,208 @@
|
||||
# Contributing to Verdaccio
|
||||
|
||||
We are happy you wish to contribute this project, for that reason we want to board you with this guide.
|
||||
|
||||
## How I contribute?
|
||||
|
||||
|
||||
### Ways to contribute
|
||||
|
||||
There are many ways to contribute to the Verdaccio Project. Here’s a list of technical contributions with increasing levels of involvement and required knowledge of Verdaccio's code and operations.
|
||||
|
||||
* [Reporting a Bug](CONTRIBUTING.md#reporting-a-bug)
|
||||
* [Request Features](CONTRIBUTING.md#request-features)
|
||||
* [Plugins](CONTRIBUTING.md#plugins)
|
||||
|
||||
Please read carefully this document. It will guide you to provide maintainers and readers valuable information to boots the process solve the issue or evaluate your proposal.
|
||||
|
||||
## Reporting a Bug
|
||||
|
||||
We welcome clear bug reports. If you've found a bug in Verdaccio that isn't a security risk, please file a report in our [issue tracker](https://github.com/verdaccio/verdaccio/issues). Before you file your issue, search to see if it has already been reported. If so, up-vote (using GitHub reactions) or add additional helpful details to the existing issue to show that it's affecting multiple people.
|
||||
|
||||
### Check if there's a simple solution in the wiki.
|
||||
|
||||
Some of the most popular topics can be found in our [wiki](https://github.com/verdaccio/verdaccio/wiki), that would be the first place to look at the topic you are interested.
|
||||
|
||||
### Questions & Chat
|
||||
|
||||
We have tagged questions for easy follow up under the tag [questions](https://github.com/verdaccio/verdaccio/labels/question). Additionaly, I'd recommend to deliver questions in the new chat as **#questions/#development** channels at [gitter](https://gitter.im/verdaccio/).
|
||||
|
||||
### Look at the past
|
||||
|
||||
* Verdaccio is a fork of `sinopia@1.4.0`, thereforce, there is a huge [database of tickets](https://github.com/rlidwka/sinopia/issues) in the original projet. It's a good place to find answers.
|
||||
* Questions under the tag of [sinopia](http://stackoverflow.com/questions/tagged/sinopia) or [verdaccio](http://stackoverflow.com/search?q=verdaccio) at Stackoverflow might be helpful.
|
||||
|
||||
### Using the issue tracker
|
||||
|
||||
The issue tracker is a channel were mostly users/developers post.
|
||||
|
||||
#### I want to report a bug
|
||||
|
||||
We considere a bug a feature that is not working as is described in the documentation. Before reporte a bug follow the next steps:
|
||||
|
||||
1. Use the GitHub issue search — check if the issue has already been reported.
|
||||
|
||||
2. Check if the issue has been fixed — try to reproduce it using the latest master or development branch in the repository.
|
||||
|
||||
Verdaccio still does not support all npm commands due either in the initial design were not considered important or nobody has request it yet.
|
||||
|
||||
## Request Features
|
||||
|
||||
A new feature is always welcome, thus, analyse whether you ir idea fits in the scope of the project and elaborate your request providing enough context, for instance:
|
||||
|
||||
* A wide description the advantages of your request.
|
||||
* It's compatible with `npm` and `yarn`?
|
||||
* You might implement your feature and provide a forked repository as example.
|
||||
* Whatever you have on mind 🤓.
|
||||
|
||||
### Submitting a Pull Request
|
||||
The following are the general steps you should follow in creating a pull request. Subsequent pull requests only need
|
||||
to follow step 3 and beyond:
|
||||
|
||||
1. Fork the repository on GitHub
|
||||
2. Clone the forked repository to your machine
|
||||
3. Create a "feature" branch in your local repository
|
||||
4. Make your changes and commit them to your local repository
|
||||
5. Rebase and push your commits to your GitHub remote fork/repository
|
||||
6. Issue a Pull Request to the official repository
|
||||
7. Your Pull Request is reviewed by a committer and merged into the repository
|
||||
|
||||
*Note*: While there are other ways to accomplish the steps using other tools, the examples here will assume the most
|
||||
actions will be performed via the `git` command line.
|
||||
|
||||
### 1. Fork the Repository
|
||||
|
||||
When logged in to your GitHub account, and you are viewing one of the main repositories, you will see the *Fork* button.
|
||||
Clicking this button will show you which repositories you can fork to. Choose your own account. Once the process
|
||||
finishes, you will have your own repository that is "forked" from the official one.
|
||||
|
||||
Forking is a GitHub term and not a git term. Git is a wholly distributed source control system and simply worries
|
||||
about local and remote repositories and allows you to manage your code against them. GitHub then adds this additional
|
||||
layer of structure of how repositories can relate to each other.
|
||||
|
||||
### 2. Clone the Forked Repository
|
||||
|
||||
Once you have successfully forked your repository, you will need to clone it locally to your machine:
|
||||
|
||||
```bash
|
||||
$ git clone --recursive git@github.com:username/verdaccio.git verdaccio
|
||||
```
|
||||
|
||||
This will clone your fork to your current path in a directory named `verdaccio`.
|
||||
|
||||
You should also set up the `upstream` repository. This will allow you to take changes from the "master" repository
|
||||
and merge them into your local clone and then push them to your GitHub fork:
|
||||
|
||||
```bash
|
||||
$ cd verdaccio
|
||||
$ git remote add upstream git@github.com:verdaccio/verdaccio.git
|
||||
$ git fetch upstream
|
||||
```
|
||||
|
||||
Then you can retrieve upstream changes and rebase on them into your code like this:
|
||||
|
||||
```bash
|
||||
$ git pull --rebase upstream master
|
||||
```
|
||||
|
||||
For more information on maintaining a fork, please see the GitHub Help article [Fork a Repo](https://help.github.com/articles/fork-a-repo/) and information on
|
||||
[rebasing](https://git-scm.com/book/en/v2/Git-Branching-Rebasing) from git.
|
||||
|
||||
### 3. Create a Branch
|
||||
|
||||
The easiest workflow is to keep your master branch in sync with the upstream branch and do not locate any of your own
|
||||
commits in that branch. When you want to work on a new feature, you then ensure you are on the master branch and create
|
||||
a new branch from there. While the name of the branch can be anything, it can often be easy to use the issue number
|
||||
you might be working on (if an issue was opened prior to opening a pull request). For example:
|
||||
|
||||
```bash
|
||||
$ git checkout -b issue-12345 master
|
||||
Switched to a new branch 'issue-12345'
|
||||
```
|
||||
|
||||
You will then be on the feature branch. You can verify what branch you are on like this:
|
||||
|
||||
```bash
|
||||
$ git status
|
||||
# On branch issue-12345
|
||||
nothing to commit, working directory clean
|
||||
```
|
||||
|
||||
### 4. Make Changes and Commit
|
||||
|
||||
#### Before commit
|
||||
|
||||
At this point you have ready your changes, your new feature it's ready to be shipped, but, to avoid delays to merge, please be aware the build must past.
|
||||
|
||||
Before commit, run the test command:
|
||||
|
||||
```bash
|
||||
npm test
|
||||
```
|
||||
It won't have **eslint** errors and **all test must past**. Then, and only then, you should push and ship your **PR**.
|
||||
|
||||
*At the moment of this writing, there are plenty of warning to clean, but please warnings are not fails, but try to don't commit code with warnings*
|
||||
|
||||
#### After testing your changes
|
||||
|
||||
Now you just need to make your changes. Once you have finished your changes (and tested them) you need to commit them
|
||||
to your local repository (assuming you have staged your changes for committing):
|
||||
|
||||
```bash
|
||||
$ git status
|
||||
# On branch issue-12345
|
||||
# Changes to be committed:
|
||||
# (use "git reset HEAD <file>..." to unstage)
|
||||
#
|
||||
# modified: somefile.js
|
||||
#
|
||||
$ git commit -m "Corrects some defect, fixes #12345, refs #12346"
|
||||
[t12345 0000000] Corrects some defect, fixes #12345, refs #12346
|
||||
1 file changed, 2 insertions(+), 2 deletions(-)
|
||||
```
|
||||
|
||||
### 5. Rebase and Push Changes
|
||||
|
||||
If you have been working on your contribution for a while, the upstream repository may have changed. You may want to
|
||||
ensure your work is on top of the latest changes so your pull request can be applied cleanly:
|
||||
|
||||
```bash
|
||||
$ git pull --rebase upstream master
|
||||
```
|
||||
|
||||
When you are ready to push your commit to your GitHub repository for the first time on this branch you would do the
|
||||
following:
|
||||
|
||||
```bash
|
||||
$ git push -u origin issue-12345
|
||||
```
|
||||
|
||||
After the first time, you simply need to do:
|
||||
|
||||
```bash
|
||||
$ git push
|
||||
```
|
||||
|
||||
### 6. Issue a Pull Request
|
||||
|
||||
In order to have your commits merged into the main repository, you need to create a pull request. The instructions for
|
||||
this can be found in the GitHub Help Article [Creating a Pull Request](https://help.github.com/articles/creating-a-pull-request/). Essentially you do the following:
|
||||
|
||||
1. Go to the site for your repository.
|
||||
2. Click the Pull Request button.
|
||||
3. Select the feature branch from your repository.
|
||||
4. Enter a title and description of your pull request in the description.
|
||||
5. Review the commit and files changed tabs.
|
||||
6. Click `Send Pull Request`
|
||||
|
||||
You will get notified about the status of your pull request based on your GitHub settings.
|
||||
|
||||
|
||||
## Plugins
|
||||
|
||||
Plugins are Add-ons that extend the functionality of the application. Whether you want develop your own plugin I'd suggest do the following:
|
||||
|
||||
1. Check whether there is a legacy sinopia plugin for the feature that you need at [npmjs](https://www.npmjs.com/search?q=sinopia).
|
||||
2. There is a [life-cycle to load a plugin](https://github.com/verdaccio/verdaccio/blob/master/lib/plugin-loader.js#L22) you should keep on mind.
|
||||
3. You are free to host your plugin in your repository, whether you want to host within in our organization, feel free to ask, we'll happy to host it.
|
||||
4. Try a describe widely your plugin to provide a deeply understanding to your users.
|
||||
79
ChangeLog.md
79
ChangeLog.md
@@ -1,79 +0,0 @@
|
||||
|
||||
8 Dec 2013, version 0.5.4
|
||||
|
||||
- latest tag always shows highest version available (issue #8)
|
||||
- added a configurable timeout for requests to uplinks (issue #18)
|
||||
- users with bad authentication header are considered not logged in (issue #17)
|
||||
|
||||
24 Nov 2013, version 0.5.3
|
||||
|
||||
- added proxy support for requests to uplinks (issue #13)
|
||||
- changed license from default BSD to WTFPL
|
||||
|
||||
26 Oct 2013, version 0.5.2
|
||||
|
||||
- server now supports unpublishing local packages
|
||||
- added fs-ext dependency (flock)
|
||||
- fixed a few face conditions
|
||||
|
||||
20 Oct 2013, version 0.5.1
|
||||
|
||||
- fixed a few errors related to logging
|
||||
|
||||
12 Oct 2013, version 0.5.0
|
||||
|
||||
- using bunyan as a log engine
|
||||
- pretty-formatting colored logs to stdout by default
|
||||
- ask user before creating any config files
|
||||
|
||||
5 Oct 2013, version 0.4.3
|
||||
|
||||
- basic tags support for npm (read-only)
|
||||
- npm star/unstar calls now return proper error
|
||||
|
||||
29 Sep 2013, version 0.4.2
|
||||
|
||||
28 Sep 2013, version 0.4.1
|
||||
|
||||
- using mocha for tests now
|
||||
- making use of streams2 api, doesn't work on 0.8 anymore
|
||||
- basic support for uploading packages to other registries
|
||||
|
||||
27 Sep 2013, version 0.4.0
|
||||
|
||||
- basic test suite
|
||||
- storage path in config is now relative to config file location, not cwd
|
||||
- proper cleanup for temporary files
|
||||
|
||||
12 Jul 2013, version 0.3.2
|
||||
|
||||
4 Jul 2013, version 0.3.1
|
||||
|
||||
- using ETag header for all json output, based on md5
|
||||
|
||||
20 Jun 2013, version 0.3.0
|
||||
|
||||
- compression for http responses
|
||||
- requests for files to uplinks are now streams (no buffering)
|
||||
- tarballs are now cached locally
|
||||
|
||||
19 Jun 2013, version 0.2.0
|
||||
|
||||
- config file changed, packages is now specified with minimatch
|
||||
- ability to retrieve all packages from another registry (i.e. npmjs)
|
||||
|
||||
14 Jun 2013, version 0.1.1
|
||||
|
||||
- config is now autogenerated
|
||||
- tarballs are now read/written from fs using streams (no buffering)
|
||||
|
||||
9 Jun 2013, version 0.1.0
|
||||
|
||||
- first npm version
|
||||
- ability to publish packages and retrieve them locally
|
||||
- basic authentication/access control
|
||||
|
||||
22 May 2013, version 0.0.0
|
||||
|
||||
- first commits
|
||||
|
||||
35
Dockerfile
Normal file
35
Dockerfile
Normal file
@@ -0,0 +1,35 @@
|
||||
FROM node:8.1.2-alpine
|
||||
LABEL maintainer="https://github.com/verdaccio/verdaccio"
|
||||
|
||||
RUN apk --no-cache add openssl && \
|
||||
wget -O /usr/local/bin/dumb-init https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64 && \
|
||||
chmod +x /usr/local/bin/dumb-init && \
|
||||
apk del openssl
|
||||
|
||||
ENV APPDIR /usr/local/app
|
||||
|
||||
WORKDIR $APPDIR
|
||||
|
||||
ADD . $APPDIR
|
||||
|
||||
RUN npm install
|
||||
|
||||
RUN mkdir -p /verdaccio/storage /verdaccio/conf
|
||||
ADD conf/docker.yaml /verdaccio/conf/config.yaml
|
||||
|
||||
RUN addgroup -S verdaccio && adduser -S -G verdaccio verdaccio && \
|
||||
chown -R verdaccio:verdaccio "$APPDIR" && \
|
||||
chown -R verdaccio:verdaccio /verdaccio
|
||||
|
||||
USER verdaccio
|
||||
|
||||
ENV PORT 4873
|
||||
ENV PROTOCOL http
|
||||
|
||||
EXPOSE $PORT
|
||||
|
||||
VOLUME ["/verdaccio"]
|
||||
|
||||
ENTRYPOINT ["/usr/local/bin/dumb-init", "--"]
|
||||
|
||||
CMD $APPDIR/bin/verdaccio --config /verdaccio/conf/config.yaml --listen $PROTOCOL://0.0.0.0:${PORT}
|
||||
13
Dockerfile.rpi
Normal file
13
Dockerfile.rpi
Normal file
@@ -0,0 +1,13 @@
|
||||
FROM hypriot/rpi-node:6-onbuild
|
||||
|
||||
RUN mkdir -p /verdaccio/storage /verdaccio/conf
|
||||
|
||||
WORKDIR /verdaccio
|
||||
|
||||
ADD conf/docker.yaml /verdaccio/conf/config.yaml
|
||||
|
||||
EXPOSE 4873
|
||||
|
||||
VOLUME ["/verdaccio/conf", "/verdaccio/storage"]
|
||||
|
||||
CMD ["/usr/src/app/bin/verdaccio", "--config", "/verdaccio/conf/config.yaml", "--listen", "0.0.0.0:4873"]
|
||||
39
Gruntfile.js
Normal file
39
Gruntfile.js
Normal file
@@ -0,0 +1,39 @@
|
||||
module.exports = function(grunt) {
|
||||
grunt.initConfig({
|
||||
pkg: grunt.file.readJSON('package.json'),
|
||||
browserify: {
|
||||
dist: {
|
||||
files: {
|
||||
'lib/static/main.js': ['lib/GUI/js/main.js'],
|
||||
},
|
||||
options: {
|
||||
debug: true,
|
||||
transform: ['browserify-handlebars'],
|
||||
},
|
||||
},
|
||||
},
|
||||
less: {
|
||||
dist: {
|
||||
files: {
|
||||
'lib/static/main.css': ['lib/GUI/css/main.less'],
|
||||
},
|
||||
options: {
|
||||
sourceMap: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
watch: {
|
||||
files: ['lib/GUI/**/*'],
|
||||
tasks: ['default'],
|
||||
},
|
||||
});
|
||||
|
||||
grunt.loadNpmTasks('grunt-browserify');
|
||||
grunt.loadNpmTasks('grunt-contrib-watch');
|
||||
grunt.loadNpmTasks('grunt-contrib-less');
|
||||
|
||||
grunt.registerTask('default', [
|
||||
'browserify',
|
||||
'less',
|
||||
]);
|
||||
};
|
||||
105
README.md
105
README.md
@@ -1,6 +1,16 @@
|
||||
Sinopia is a private/caching npm repository server.
|
||||
`verdaccio` is a fork of `sinopia`. It aims to keep backwards compatibility with `sinopia`, while keeping up with npm changes.
|
||||
|
||||
It allows you to have a local npm registry with zero configuration. You don't have to install and replicate an entire CouchDB database. Sinopia keeps its own small database and, if a package doesn't exist there, it asks npmjs.org for it keeping only those packages you use.
|
||||
`verdaccio` - a private/caching npm repository server
|
||||
|
||||
[](https://travis-ci.org/verdaccio/verdaccio)
|
||||
[](https://www.npmjs.org/package/verdaccio)
|
||||
[](https://www.npmjs.org/package/verdaccio)
|
||||
[](https://codecov.io/gh/verdaccio/verdaccio)
|
||||
[](https://gitter.im/verdaccio/)
|
||||
|
||||
It allows you to have a local npm registry with zero configuration. You don't have to install and replicate an entire CouchDB database. Verdaccio keeps its own small database and, if a package doesn't exist there, it asks npmjs.org for it keeping only those packages you use.
|
||||
|
||||
<p align="center"><img src="https://firebasestorage.googleapis.com/v0/b/jotadeveloper-8d2f3.appspot.com/o/verdaccio2-compressor.png?alt=media&token=c9b01824-26f2-4cba-bd6f-f352e08cb8ff"></p>
|
||||
|
||||
## Use cases
|
||||
|
||||
@@ -19,7 +29,7 @@ It allows you to have a local npm registry with zero configuration. You don't ha
|
||||
|
||||
3. Override public packages.
|
||||
|
||||
If you want to use a modified version of some 3rd-party package (for example, you found a bug, but maintainer didn't accepted pull request yet), you can publish your version locally under the same name.
|
||||
If you want to use a modified version of some 3rd-party package (for example, you found a bug, but maintainer didn't accept pull request yet), you can publish your version locally under the same name.
|
||||
|
||||
See [override public packages](#override-public-packages) section for details.
|
||||
|
||||
@@ -28,37 +38,72 @@ It allows you to have a local npm registry with zero configuration. You don't ha
|
||||
```bash
|
||||
# installation and starting (application will create default
|
||||
# config in config.yaml you can edit later)
|
||||
$ npm install -g sinopia
|
||||
$ sinopia
|
||||
$ npm install -g verdaccio
|
||||
# or
|
||||
$ yarn add global verdaccio
|
||||
# run in your terminal
|
||||
$ verdaccio
|
||||
|
||||
# npm configuration
|
||||
$ npm set registry http://localhost:4873/
|
||||
|
||||
# if you have any restricted packages, you should add this:
|
||||
$ npm set always-auth true
|
||||
|
||||
# if you use HTTPS, add an appropriate CA information
|
||||
# ("null" means get CA list from OS)
|
||||
$ npm set ca null
|
||||
```
|
||||
|
||||
|
||||
Now you can navigate to [http://localhost:4873/](http://localhost:4873/) where your local packages will be listed and can be searched.
|
||||
|
||||
## Configuration
|
||||
|
||||
When you start a server, it auto-creates a config file.
|
||||
|
||||
**For instructions on how to run Verdaccio as a service, with a nice URL or behind a proxy have a look at the [server-side configure document](wiki/server.md).**
|
||||
|
||||
When you start a server, it auto-creates a config file that adds one user (password is printed to stdout only once).
|
||||
### Docker
|
||||
|
||||
Below are the most commony needed informations,
|
||||
every aspect of Docker and verdaccio is [documented separately](wiki/docker.md)
|
||||
|
||||
#### Prebuilt images
|
||||
|
||||
To pull the latest pre-built [docker image](https://hub.docker.com/r/verdaccio/verdaccio/):
|
||||
|
||||
`docker pull verdaccio/verdaccio`
|
||||
|
||||
Since version 2 images for every versions are availabel as [tags](https://hub.docker.com/r/verdaccio/verdaccio/tags/).
|
||||
|
||||
#### Running verdaccio using Docker
|
||||
|
||||
To run the docker container:
|
||||
```bash
|
||||
docker run -it --rm --name verdaccio -p 4873:4873 verdaccio/verdaccio
|
||||
```
|
||||
|
||||
#### Using docker-compose
|
||||
|
||||
1. Get the latest version of [docker-compose](https://github.com/docker/compose).
|
||||
2. Build and run the container:
|
||||
|
||||
```bash
|
||||
$ docker-compose up --build
|
||||
```
|
||||
|
||||
### Ansible
|
||||
|
||||
A Verdaccio playbook [is available at galaxy](https://galaxy.ansible.com/030/verdaccio) source: https://github.com/030/ansible-verdaccio
|
||||
|
||||
### Chef
|
||||
|
||||
The Verdaccio Chef cookbook [is available via the chef supermarket](https://supermarket.chef.io/cookbooks/verdaccio). source: https://github.com/kgrubb/verdaccio-cookbook
|
||||
|
||||
## Adding a new user
|
||||
|
||||
There is no utility to add a new user but you can at least use node on the command-line to generate a password. You will need to edit the config and add the user manually.
|
||||
|
||||
Start node and enter the following code replacing 'newpass' with the password you want to get the hash for.
|
||||
```bash
|
||||
$ node
|
||||
> crypto.createHash('sha1').update('newpass').digest('hex')
|
||||
'6c55803d6f1d7a177a0db3eb4b343b0d50f9c111'
|
||||
> [CTRL-D]
|
||||
npm adduser --registry http://localhost:4873/
|
||||
```
|
||||
|
||||
This will prompt you for user credentials which will be saved on the `verdaccio` server.
|
||||
|
||||
## Using private packages
|
||||
|
||||
@@ -68,7 +113,7 @@ It is recommended that you define a prefix for your private packages, for exampl
|
||||
|
||||
## Using public packages from npmjs.org
|
||||
|
||||
If some package doesn't exist in the storage, server will try to fetch it from npmjs.org. If npmjs.org is down, it serves packages from cache pretending that no other packages exist. Sinopia will download only what's needed (= requested by clients), and this information will be cached, so if client will ask the same thing second time, it can be served without asking npmjs.org for it.
|
||||
If some package doesn't exist in the storage, server will try to fetch it from npmjs.org. If npmjs.org is down, it serves packages from cache pretending that no other packages exist. Verdaccio will download only what's needed (= requested by clients), and this information will be cached, so if client will ask the same thing second time, it can be served without asking npmjs.org for it.
|
||||
|
||||
Example: if you successfully request express@3.0.1 from this server once, you'll able to do that again (with all it's dependencies) anytime even if npmjs.org is down. But say express@3.0.0 will not be downloaded until it's actually needed by somebody. And if npmjs.org is offline, this server would say that only express@3.0.1 (= only what's in the cache) is published, but nothing else.
|
||||
|
||||
@@ -80,7 +125,7 @@ There's two options here:
|
||||
|
||||
1. You want to create a separate fork and stop synchronizing with public version.
|
||||
|
||||
If you want to do that, you should modify your configuration file so sinopia won't make requests regarding this package to npmjs anymore. Add a separate entry for this package to *config.yaml* and remove `npmjs` from `proxy_access` list and restart the server.
|
||||
If you want to do that, you should modify your configuration file so verdaccio won't make requests regarding this package to npmjs anymore. Add a separate entry for this package to *config.yaml* and remove `npmjs` from `proxy` list and restart the server.
|
||||
|
||||
When you publish your package locally, you should probably start with version string higher than existing one, so it won't conflict with existing package in the cache.
|
||||
|
||||
@@ -90,28 +135,29 @@ There's two options here:
|
||||
|
||||
## Compatibility
|
||||
|
||||
Sinopia aims to support all features of a standard npm client that make sense to support in private repository. Unfortunately, it isn't always possible.
|
||||
Verdaccio aims to support all features of a standard npm client that make sense to support in private repository. Unfortunately, it isn't always possible.
|
||||
|
||||
Basic features:
|
||||
|
||||
- Installing packages (npm install, npm upgrade, etc.) - supported
|
||||
- Publishing packages (npm publish) - supported
|
||||
- Installing packages (npm install, npm upgrade, etc.) - **supported**
|
||||
- Publishing packages (npm publish) - **supported**
|
||||
|
||||
Advanced package control:
|
||||
|
||||
- Unpublishing packages (npm unpublish) - not yet supported, should be soon
|
||||
- Tagging (npm tag) - not yet supported, should be soon
|
||||
- Unpublishing packages (npm unpublish) - **supported**
|
||||
- Tagging (npm tag) - **supported**
|
||||
- Deprecation (npm deprecate) - not supported
|
||||
|
||||
User management:
|
||||
|
||||
- Registering new users (npm adduser {newuser}) - not supported, sinopia uses its own acl management system
|
||||
- Transferring ownership (npm owner add {user} {pkg}) - not supported, sinopia uses its own acl management system
|
||||
- Registering new users (npm adduser {newuser}) - **supported**
|
||||
- Transferring ownership (npm owner add {user} {pkg}) - not supported, verdaccio uses its own acl management system
|
||||
|
||||
Misc stuff:
|
||||
|
||||
- Searching (npm search) - not supported
|
||||
- Searching (npm search) - **supported** (cli / browser)
|
||||
- Starring (npm star, npm unstar) - not supported, doesn't make sense in private registry
|
||||
- Ping (npm ping) - **supported**
|
||||
|
||||
## Storage
|
||||
|
||||
@@ -119,6 +165,8 @@ No CouchDB here. This application is supposed to work with zero configuration, s
|
||||
|
||||
If you want to use a database instead, ask for it, we'll come up with some kind of a plugin system.
|
||||
|
||||
About the storage there is a running discussion [here](https://github.com/verdaccio/verdaccio/issues?q=is%3Aissue+is%3Aopen+label%3Astorage).
|
||||
|
||||
## Similar existing things
|
||||
|
||||
- npm + git (I mean, using git+ssh:// dependencies) - most people seem to use this, but it's a terrible idea... *npm update* doesn't work, can't use git subdirectories this way, etc.
|
||||
@@ -126,5 +174,6 @@ If you want to use a database instead, ask for it, we'll come up with some kind
|
||||
- [shadow-npm](https://github.com/dominictarr/shadow-npm), [public service](http://shadow-npm.net/) - it uses the same code as npmjs.org + service is dead
|
||||
- [gemfury](http://www.gemfury.com/l/npm-registry) and others - those are closed-source cloud services, and I'm not in a mood to trust my private code to somebody (security through obscurity yeah!)
|
||||
- npm-registry-proxy, npm-delegate, npm-proxy - those are just proxies...
|
||||
- [nexus-repository-oss](https://www.sonatype.com/nexus-repository-oss) - Repository manager that handles more than just NPM dependencies
|
||||
- Is there something else?
|
||||
|
||||
- [codebox-npm](https://github.com/craftship/codebox-npm) - Serverless private npm registry using
|
||||
|
||||
58
SERVER.md
58
SERVER.md
@@ -1,58 +0,0 @@
|
||||
This is mostly basic linux server configuration stuff but I felt it important to document and share the steps I took to get sinopia running permanently on my server. You will need root (or sudo) permissions for the following.
|
||||
|
||||
## Running as a separate user
|
||||
First create the sinopia user:
|
||||
```bash
|
||||
$ sudo adduser --disabled-login --gecos 'Sinopia NPM mirror' sinopia
|
||||
```
|
||||
|
||||
You create a shell as the sinopia user using the following command:
|
||||
```bash
|
||||
$ sudo su sinopia
|
||||
$ cd ~
|
||||
```
|
||||
|
||||
The 'cd ~' command send you to the home directory of the sinopia user. Make sure you run sinopia at least once to generate the config file. Edit it according to your needs.
|
||||
|
||||
## Listening on all addresses
|
||||
If you want to listen to every external address set the listen directive in the config to:
|
||||
```
|
||||
# you can specify listen address (or simply a port)
|
||||
listen: 0.0.0.0:4873
|
||||
```
|
||||
|
||||
## Keeping sinopia running forever
|
||||
We can use the node package called 'forever' to keep sinopia running all the time.
|
||||
https://github.com/nodejitsu/forever
|
||||
|
||||
First install forever globally:
|
||||
```bash
|
||||
$ sudo npm install -g forever
|
||||
```
|
||||
|
||||
Make sure you've started sinopia at least once to generate the config file and write down the created admin user. You can then use the following command to start sinopia:
|
||||
```bash
|
||||
$ forever start `which sinopia`
|
||||
```
|
||||
|
||||
You can check the documentation for more information on how to use forever.
|
||||
|
||||
## Surviving server restarts
|
||||
We can use crontab and forever together to restart sinopia after a server reboot.
|
||||
When you're logged in as the sinopia user do the following:
|
||||
|
||||
```bash
|
||||
$ crontab -e
|
||||
```
|
||||
|
||||
This might ask you to choose an editor. Pick your favorite and proceed.
|
||||
Add the following entry to the file:
|
||||
```
|
||||
@reboot /usr/bin/forever start /usr/lib/node_modules/sinopia/bin/sinopia
|
||||
```
|
||||
|
||||
The locations may vary depending on your server setup. If you want to know where your files are you can use the 'which' command:
|
||||
```bash
|
||||
$ which forever
|
||||
$ which sinopia
|
||||
```
|
||||
@@ -1,4 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
require('../lib/cli')
|
||||
|
||||
3
bin/verdaccio
Executable file
3
bin/verdaccio
Executable file
@@ -0,0 +1,3 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
require('../lib/cli');
|
||||
1
conf/README.md
Normal file
1
conf/README.md
Normal file
@@ -0,0 +1 @@
|
||||
This directory is for config examples.
|
||||
50
conf/default.yaml
Normal file
50
conf/default.yaml
Normal file
@@ -0,0 +1,50 @@
|
||||
#
|
||||
# This is the default config file. It allows all users to do anything,
|
||||
# so don't use it on production systems.
|
||||
#
|
||||
# Look here for more config file examples:
|
||||
# https://github.com/verdaccio/verdaccio/tree/master/conf
|
||||
#
|
||||
|
||||
# path to a directory with all packages
|
||||
storage: ./storage
|
||||
|
||||
auth:
|
||||
htpasswd:
|
||||
file: ./htpasswd
|
||||
# Maximum amount of users allowed to register, defaults to "+inf".
|
||||
# You can set this to -1 to disable registration.
|
||||
#max_users: 1000
|
||||
|
||||
# a list of other known repositories we can talk to
|
||||
uplinks:
|
||||
npmjs:
|
||||
url: https://registry.npmjs.org/
|
||||
|
||||
packages:
|
||||
'@*/*':
|
||||
# scoped packages
|
||||
access: $all
|
||||
publish: $authenticated
|
||||
proxy: npmjs
|
||||
|
||||
'**':
|
||||
# allow all users (including non-authenticated users) to read and
|
||||
# publish all packages
|
||||
#
|
||||
# you can specify usernames/groupnames (depending on your auth plugin)
|
||||
# and three keywords: "$all", "$anonymous", "$authenticated"
|
||||
access: $all
|
||||
|
||||
# allow all known users to publish packages
|
||||
# (anyone can register by default, remember?)
|
||||
publish: $authenticated
|
||||
|
||||
# if package is not available locally, proxy requests to 'npmjs' registry
|
||||
proxy: npmjs
|
||||
|
||||
# log settings
|
||||
logs:
|
||||
- {type: stdout, format: pretty, level: http}
|
||||
#- {type: file, path: verdaccio.log, level: info}
|
||||
|
||||
53
conf/docker.yaml
Normal file
53
conf/docker.yaml
Normal file
@@ -0,0 +1,53 @@
|
||||
#
|
||||
# This is the config file used for the docker images.
|
||||
# It allows all users to do anything, so don't use it on production systems.
|
||||
#
|
||||
# Do not configure host and port under `listen` in this file
|
||||
# as it will be ignored when using docker.
|
||||
# see https://github.com/verdaccio/verdaccio/blob/master/wiki/docker.md#docker-and-custom-port-configuration
|
||||
#
|
||||
# Look here for more config file examples:
|
||||
# https://github.com/verdaccio/verdaccio/tree/master/conf
|
||||
#
|
||||
|
||||
# path to a directory with all packages
|
||||
storage: /verdaccio/storage
|
||||
|
||||
auth:
|
||||
htpasswd:
|
||||
file: /verdaccio/conf/htpasswd
|
||||
# Maximum amount of users allowed to register, defaults to "+inf".
|
||||
# You can set this to -1 to disable registration.
|
||||
#max_users: 1000
|
||||
|
||||
# a list of other known repositories we can talk to
|
||||
uplinks:
|
||||
npmjs:
|
||||
url: https://registry.npmjs.org/
|
||||
|
||||
packages:
|
||||
'@*/*':
|
||||
# scoped packages
|
||||
access: $all
|
||||
publish: $all
|
||||
proxy: npmjs
|
||||
|
||||
'**':
|
||||
# allow all users (including non-authenticated users) to read and
|
||||
# publish all packages
|
||||
#
|
||||
# you can specify usernames/groupnames (depending on your auth plugin)
|
||||
# and three keywords: "$all", "$anonymous", "$authenticated"
|
||||
access: $all
|
||||
|
||||
# allow all known users to publish packages
|
||||
# (anyone can register by default, remember?)
|
||||
publish: $all
|
||||
|
||||
# if package is not available locally, proxy requests to 'npmjs' registry
|
||||
proxy: npmjs
|
||||
|
||||
# log settings
|
||||
logs:
|
||||
- {type: stdout, format: pretty, level: http}
|
||||
#- {type: file, path: verdaccio.log, level: info}
|
||||
186
conf/full.yaml
Normal file
186
conf/full.yaml
Normal file
@@ -0,0 +1,186 @@
|
||||
# path to a directory with all packages
|
||||
storage: ./storage
|
||||
|
||||
# a list of users
|
||||
#
|
||||
# This could be deprecated soon, use auth plugins instead (see htpasswd below).
|
||||
users:
|
||||
admin:
|
||||
# crypto.createHash('sha1').update(pass).digest('hex')
|
||||
password: a94a8fe5ccb19ba61c4c0873d391e987982fbbd3
|
||||
|
||||
web:
|
||||
# web interface is disabled by default in 0.x, will be enabled soon in 1.x
|
||||
# when all its issues will be fixed
|
||||
#
|
||||
# set this to `true` if you want to experiment with web ui now;
|
||||
# this has a lot of issues, e.g. no auth yet, so use at your own risk
|
||||
#enable: true
|
||||
|
||||
title: Verdaccio
|
||||
# logo: logo.png
|
||||
# template: custom.hbs
|
||||
# tagline: "Some <b>HTML</b> enabled tagline that sits between the actual \
|
||||
#header and the list of packages. You can even add <a \
|
||||
#href=\"https://github.com\">links</a>!"
|
||||
|
||||
auth:
|
||||
htpasswd:
|
||||
file: ./htpasswd
|
||||
# Maximum amount of users allowed to register, defaults to "+inf".
|
||||
# You can set this to -1 to disable registration.
|
||||
#max_users: 1000
|
||||
|
||||
# a list of other known repositories we can talk to
|
||||
uplinks:
|
||||
npmjs:
|
||||
url: https://registry.npmjs.org/
|
||||
|
||||
# amount of time to wait for repository to respond
|
||||
# before giving up and use the local cached copy
|
||||
#timeout: 30s
|
||||
|
||||
# maximum time in which data is considered up to date
|
||||
#
|
||||
# default is 2 minutes, so server won't request the same data from
|
||||
# uplink if a similar request was made less than 2 minutes ago
|
||||
#maxage: 2m
|
||||
|
||||
# if two subsequent requests fail, no further requests will be sent to
|
||||
# this uplink for five minutes
|
||||
#max_fails: 2
|
||||
#fail_timeout: 5m
|
||||
|
||||
# timeouts are defined in the same way as nginx, see:
|
||||
# http://wiki.nginx.org/ConfigNotation
|
||||
|
||||
# add/override HTTP headers sent to the uplink server
|
||||
# this allows for HTTP Basic auth for example:
|
||||
#headers:
|
||||
# authorization: "Basic YourBase64EncodedCredentials=="
|
||||
|
||||
# set this to false to prevent tarballs from this upstream
|
||||
# to be stored in the local storage (defaults to true)
|
||||
#cache: false
|
||||
|
||||
packages:
|
||||
# uncomment this for packages with "local-" prefix to be available
|
||||
# for admin only, it's a recommended way of handling private packages
|
||||
#'local-*':
|
||||
# access: admin
|
||||
# publish: admin
|
||||
# # you can override storage directory for a group of packages this way:
|
||||
# storage: 'local_storage'
|
||||
|
||||
'**':
|
||||
# allow all users to read packages (including non-authenticated users)
|
||||
#
|
||||
# you can specify usernames/groupnames (depending on your auth plugin)
|
||||
# and three keywords: "$all", "$anonymous", "$authenticated"
|
||||
access: $all
|
||||
|
||||
# allow 'admin' to publish packages
|
||||
publish: admin
|
||||
|
||||
# if package is not available locally, proxy requests to 'npmjs' registry
|
||||
proxy: npmjs
|
||||
|
||||
#####################################################################
|
||||
# Advanced settings
|
||||
#####################################################################
|
||||
|
||||
# if you use nginx with custom path, use this to override links
|
||||
#url_prefix: https://dev.company.local/verdaccio/
|
||||
|
||||
# You can specify listen address (or simply a port).
|
||||
# If you add multiple values, verdaccio will listen on all of them.
|
||||
#
|
||||
# Examples:
|
||||
#
|
||||
#listen:
|
||||
# - localhost:4873 # default value
|
||||
# - http://localhost:4873 # same thing
|
||||
# - 0.0.0.0:4873 # listen on all addresses (INADDR_ANY)
|
||||
# - https://example.org:4873 # if you want to use https
|
||||
# - [::1]:4873 # ipv6
|
||||
# - unix:/tmp/verdaccio.sock # unix socket
|
||||
|
||||
# Configure HTTPS, it is required if you use "https" protocol above.
|
||||
#https:
|
||||
# key: path/to/server.key
|
||||
# cert: path/to/server.crt
|
||||
# ca: path/to/server.pem
|
||||
|
||||
# type: file | stdout | stderr
|
||||
# level: trace | debug | info | http (default) | warn | error | fatal
|
||||
#
|
||||
# parameters for file: name is filename
|
||||
# {type: 'file', path: 'verdaccio.log', level: 'debug'},
|
||||
#
|
||||
# parameters for stdout and stderr: format: json | pretty | pretty-timestamped
|
||||
# {type: 'stdout', format: 'pretty', level: 'debug'},
|
||||
logs:
|
||||
- {type: stdout, format: pretty, level: http}
|
||||
#- {type: file, path: verdaccio.log, level: info}
|
||||
|
||||
# you can specify proxy used with all requests in wget-like manner here
|
||||
# (or set up ENV variables with the same name)
|
||||
#http_proxy: http://something.local/
|
||||
#https_proxy: https://something.local/
|
||||
#no_proxy: localhost,127.0.0.1
|
||||
|
||||
# maximum size of uploaded json document
|
||||
# increase it if you have "request entity too large" errors
|
||||
#max_body_size: 1mb
|
||||
|
||||
# Notify Settings
|
||||
# Notify was built primarily to use with Slack's Incoming
|
||||
# webhooks, but will also deliver a simple payload to
|
||||
# any endpoint. Currently only active for publish / create
|
||||
# commands.
|
||||
notify:
|
||||
# Choose a method. Technically this will accept any HTTP
|
||||
# request method, but probably stick to GET or POST
|
||||
method: POST
|
||||
# Only run this notification if the package name matches the regular
|
||||
# expression
|
||||
packagePattern: ^example-package$
|
||||
# Any flags to be used with the regular expression
|
||||
packagePatternFlags: i
|
||||
# If this endpoint requires specific headers, set them here
|
||||
# as an array of key: value objects.
|
||||
headers: [{'Content-type': 'application/x-www-form-urlencoded'}]
|
||||
# set the URL endpoint for this call
|
||||
endpoint: https://hooks.slack.com/...
|
||||
# Finally, the content you will be sending in the body.
|
||||
# This data will first be run through Handlebars to parse
|
||||
# any Handlebar expressions. All data housed in the metadata object
|
||||
# is available for use within the expressions.
|
||||
content: ' {{ handlebar-expression }}'
|
||||
# For Slack, follow the following format:
|
||||
# content: '{ "text": "Package *{{ name }}* published to version *{{ dist-tags.latest }}*", "username": "Verdaccio", "icon_emoji": ":package:" }'
|
||||
|
||||
# Multiple notification endpoints can be created by specifying a collection
|
||||
'example-package-1'
|
||||
method: POST
|
||||
# Only run this notification if the package name matches the regular
|
||||
# expression
|
||||
packagePattern: ^example-package-regex$
|
||||
# Any flags to be used with the regular expression
|
||||
# since verdaccio 2.2.2 this property has been disabled read #108
|
||||
# it will be re-enabled after 2.5.0
|
||||
# packagePatternFlags: i
|
||||
# If this endpoint requires specific headers, set them here
|
||||
# as an array of key: value objects.
|
||||
headers: [{'Content-type': 'application/x-www-form-urlencoded'}]
|
||||
# headers supports as well a literal object
|
||||
headers: {'Content-type': 'application/x-www-form-urlencoded'}
|
||||
# set the URL endpoint for this call
|
||||
endpoint: https://hooks.slack.com/...
|
||||
# Finally, the content you will be sending in the body.
|
||||
# This data will first be run through Handlebars to parse
|
||||
# any Handlebar expressions. All data housed in the metadata object
|
||||
# is available for use within the expressions.
|
||||
content: ' {{ handlebar-expression }}'
|
||||
# For Slack, follow the following format:
|
||||
# content: '{ "text": "Package *{{ name }}* published to version *{{ dist-tags.latest }}*", "username": "Verdaccio", "icon_emoji": ":package:" }'
|
||||
14
docker-compose.yaml
Normal file
14
docker-compose.yaml
Normal file
@@ -0,0 +1,14 @@
|
||||
version: '2.1'
|
||||
services:
|
||||
verdaccio:
|
||||
build: .
|
||||
container_name: verdaccio
|
||||
environment:
|
||||
- PORT
|
||||
ports:
|
||||
- $PORT:$PORT
|
||||
volumes:
|
||||
- verdaccio:/verdaccio
|
||||
volumes:
|
||||
verdaccio:
|
||||
driver: local
|
||||
9
index.js
Normal file
9
index.js
Normal file
@@ -0,0 +1,9 @@
|
||||
module.exports = require('./lib');
|
||||
|
||||
/** package
|
||||
{ "name": "verdaccio",
|
||||
"version": "0.0.0",
|
||||
"dependencies": {"js-yaml": "*"},
|
||||
"scripts": {"postinstall": "js-yaml package.yaml > package.json ; npm install"}
|
||||
}
|
||||
**/
|
||||
8
lib/GUI/.eslintrc
Normal file
8
lib/GUI/.eslintrc
Normal file
@@ -0,0 +1,8 @@
|
||||
|
||||
env:
|
||||
node: true
|
||||
browser: true
|
||||
|
||||
globals:
|
||||
jQuery: true
|
||||
|
||||
6358
lib/GUI/css/bootstrap.css
vendored
Normal file
6358
lib/GUI/css/bootstrap.css
vendored
Normal file
File diff suppressed because it is too large
Load Diff
56
lib/GUI/css/fontello.less
Normal file
56
lib/GUI/css/fontello.less
Normal file
@@ -0,0 +1,56 @@
|
||||
@font-face {
|
||||
font-family: 'fontello';
|
||||
src: url('../static/fontello.eot?10872183');
|
||||
src: url('../static/fontello.eot?10872183#iefix') format('embedded-opentype'),
|
||||
url('../static/fontello.woff?10872183') format('woff'),
|
||||
url('../static/fontello.ttf?10872183') format('truetype'),
|
||||
url('../static/fontello.svg?10872183#fontello') format('svg');
|
||||
font-weight: normal;
|
||||
font-style: normal;
|
||||
}
|
||||
/* Chrome hack: SVG is rendered more smooth in Windozze. 100% magic, uncomment if you need it. */
|
||||
/* Note, that will break hinting! In other OS-es font will be not as sharp as it could be */
|
||||
/*
|
||||
@media screen and (-webkit-min-device-pixel-ratio:0) {
|
||||
@font-face {
|
||||
font-family: 'fontello';
|
||||
src: url('../font/fontello.svg?10872183#fontello') format('svg');
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
[class^="icon-"]:before, [class*=" icon-"]:before {
|
||||
font-family: "fontello";
|
||||
font-style: normal;
|
||||
font-weight: normal;
|
||||
speak: none;
|
||||
|
||||
display: inline-block;
|
||||
text-decoration: inherit;
|
||||
width: 1em;
|
||||
margin-right: .2em;
|
||||
text-align: center;
|
||||
/* opacity: .8; */
|
||||
|
||||
/* For safety - reset parent styles, that can break glyph codes*/
|
||||
font-variant: normal;
|
||||
text-transform: none;
|
||||
|
||||
/* fix buttons height, for twitter bootstrap */
|
||||
line-height: 1em;
|
||||
|
||||
/* Animation center compensation - margins should be symmetric */
|
||||
/* remove if not needed */
|
||||
margin-left: .2em;
|
||||
|
||||
/* you can be more comfortable with increased icons size */
|
||||
/* font-size: 120%; */
|
||||
|
||||
/* Uncomment for 3D effect */
|
||||
/* text-shadow: 1px 1px 1px rgba(127, 127, 127, 0.3); */
|
||||
}
|
||||
|
||||
.icon-search:before { content: '\e801'; } /* '' */
|
||||
.icon-cancel:before { content: '\e803'; } /* '' */
|
||||
.icon-right-open:before { content: '\e802'; } /* '' */
|
||||
.icon-angle-right:before { content: '\e800'; } /* '' */
|
||||
136
lib/GUI/css/helpers.less
Normal file
136
lib/GUI/css/helpers.less
Normal file
@@ -0,0 +1,136 @@
|
||||
//
|
||||
// copied from https://github.com/bpeacock/helpers.less
|
||||
//
|
||||
// author: Brian Peacock
|
||||
// license: MIT
|
||||
//
|
||||
|
||||
.backface-visibility(@style) {
|
||||
-webkit-backface-visibility: @style;
|
||||
-moz-backface-visibility: @style;
|
||||
-ms-backface-visibility: @style;
|
||||
-o-backface-visibility: @style;
|
||||
backface-visibility: @style;
|
||||
}
|
||||
|
||||
.perspective(@style) {
|
||||
-webkit-perspective: @style;
|
||||
-moz-perspective: @style;
|
||||
-ms-perspective: @style;
|
||||
-o-perspective: @style;
|
||||
perspective: @style;
|
||||
}
|
||||
|
||||
.border-radius(@radius) {
|
||||
-webkit-border-radius: @radius;
|
||||
-moz-border-radius: @radius;
|
||||
border-radius: @radius;
|
||||
}
|
||||
|
||||
.border-radius-topleft(@radius) {
|
||||
-moz-border-radius-topleft: @radius;
|
||||
border-top-left-radius: @radius;
|
||||
}
|
||||
|
||||
.border-radius-topright(@radius) {
|
||||
-moz-border-radius-topright: @radius;
|
||||
border-top-right-radius: @radius;
|
||||
}
|
||||
|
||||
.border-radius-bottomleft(@radius) {
|
||||
-moz-border-radius-bottomleft: @radius;
|
||||
border-bottom-left-radius: @radius;
|
||||
}
|
||||
|
||||
.border-radius-bottomright(@radius) {
|
||||
-moz-border-radius-bottomright: @radius;
|
||||
border-bottom-right-radius: @radius;
|
||||
}
|
||||
|
||||
.circle(@diameter) {
|
||||
width: @diameter;
|
||||
height: @diameter;
|
||||
.border-radius(@diameter/2);
|
||||
}
|
||||
|
||||
.no-select() {
|
||||
-moz-user-select: none;
|
||||
-ms-user-select: none;
|
||||
-khtml-user-select: none;
|
||||
-webkit-user-select: none;
|
||||
-o-user-select: none;
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
.do-select() {
|
||||
-moz-user-select: text;
|
||||
-ms-user-select: text;
|
||||
-khtml-user-select: text;
|
||||
-webkit-user-select: text;
|
||||
-o-user-select: text;
|
||||
user-select: text;
|
||||
}
|
||||
|
||||
.border-box() {
|
||||
-moz-box-sizing: border-box;
|
||||
-webkit-box-sizing: border-box;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
.box-shadow(@value1, @value2:X, ...) {
|
||||
@value: ~`"@{arguments}".replace(/[\[\]]|\,\sX/g, '')`;
|
||||
-moz-box-shadow: @value;
|
||||
-webkit-box-shadow: @value;
|
||||
box-shadow: @value;
|
||||
}
|
||||
|
||||
.transition(@value1, @value2:X, ...) {
|
||||
@value: ~`"@{arguments}".replace(/[\[\]]|\,\sX/g, '')`;
|
||||
|
||||
-webkit-transition: @value;
|
||||
-moz-transition: @value;
|
||||
-ms-transition: @value;
|
||||
-o-transition: @value;
|
||||
transition: @value;
|
||||
}
|
||||
|
||||
.transformTransition(@value1, @value2:X, ...) {
|
||||
@value: ~`"@{arguments}".replace(/[\[\]]|\,\sX/g, '')`;
|
||||
|
||||
-webkit-transition: -webkit-transform @value;
|
||||
-moz-transition: -moz-transform @value;
|
||||
-ms-transition: -ms-transform @value;
|
||||
-o-transition: -o-transform @value;
|
||||
transition: transform @value;
|
||||
}
|
||||
|
||||
.animation(@value1, @value2:X, ...) {
|
||||
@value: ~`"@{arguments}".replace(/[\[\]]|\,\sX/g, '')`;
|
||||
|
||||
-webkit-animation: @value;
|
||||
-moz-animation: @value;
|
||||
-o-animation: @value;
|
||||
animation: @value;
|
||||
}
|
||||
|
||||
.transform(@value1, @value2:X, ...) {
|
||||
@value: ~`"@{arguments}".replace(/[\[\]]|\,\sX/g, '')`;
|
||||
|
||||
-webkit-transform: @value;
|
||||
-moz-transform: @value;
|
||||
-o-transform: @value;
|
||||
-ms-transform: @value;
|
||||
transform: @value;
|
||||
}
|
||||
|
||||
.rotate(@deg) {
|
||||
.transform(rotate(@deg));
|
||||
}
|
||||
|
||||
.scale(@ratio) {
|
||||
.transform(scale(@ratio, @ratio));
|
||||
}
|
||||
|
||||
.translate(@x, @y) {
|
||||
.transform(translate(@x, @y));
|
||||
}
|
||||
153
lib/GUI/css/highlight.js.less
Normal file
153
lib/GUI/css/highlight.js.less
Normal file
@@ -0,0 +1,153 @@
|
||||
/*
|
||||
|
||||
Original style from softwaremaniacs.org (c) Ivan Sagalaev <Maniac@SoftwareManiacs.Org>
|
||||
|
||||
*/
|
||||
|
||||
.hljs {
|
||||
display: block; padding: 0.5em;
|
||||
background: #F0F0F0;
|
||||
}
|
||||
|
||||
.hljs,
|
||||
.hljs-subst,
|
||||
.hljs-tag .hljs-title,
|
||||
.lisp .hljs-title,
|
||||
.clojure .hljs-built_in,
|
||||
.nginx .hljs-title {
|
||||
color: black;
|
||||
}
|
||||
|
||||
.hljs-string,
|
||||
.hljs-title,
|
||||
.hljs-constant,
|
||||
.hljs-parent,
|
||||
.hljs-tag .hljs-value,
|
||||
.hljs-rules .hljs-value,
|
||||
.hljs-rules .hljs-value .hljs-number,
|
||||
.hljs-preprocessor,
|
||||
.hljs-pragma,
|
||||
.haml .hljs-symbol,
|
||||
.ruby .hljs-symbol,
|
||||
.ruby .hljs-symbol .hljs-string,
|
||||
.hljs-aggregate,
|
||||
.hljs-template_tag,
|
||||
.django .hljs-variable,
|
||||
.smalltalk .hljs-class,
|
||||
.hljs-addition,
|
||||
.hljs-flow,
|
||||
.hljs-stream,
|
||||
.bash .hljs-variable,
|
||||
.apache .hljs-tag,
|
||||
.apache .hljs-cbracket,
|
||||
.tex .hljs-command,
|
||||
.tex .hljs-special,
|
||||
.erlang_repl .hljs-function_or_atom,
|
||||
.asciidoc .hljs-header,
|
||||
.markdown .hljs-header,
|
||||
.coffeescript .hljs-attribute {
|
||||
color: #800;
|
||||
}
|
||||
|
||||
.smartquote,
|
||||
.hljs-comment,
|
||||
.hljs-annotation,
|
||||
.hljs-template_comment,
|
||||
.diff .hljs-header,
|
||||
.hljs-chunk,
|
||||
.asciidoc .hljs-blockquote,
|
||||
.markdown .hljs-blockquote {
|
||||
color: #888;
|
||||
}
|
||||
|
||||
.hljs-number,
|
||||
.hljs-date,
|
||||
.hljs-regexp,
|
||||
.hljs-literal,
|
||||
.hljs-hexcolor,
|
||||
.smalltalk .hljs-symbol,
|
||||
.smalltalk .hljs-char,
|
||||
.go .hljs-constant,
|
||||
.hljs-change,
|
||||
.lasso .hljs-variable,
|
||||
.makefile .hljs-variable,
|
||||
.asciidoc .hljs-bullet,
|
||||
.markdown .hljs-bullet,
|
||||
.asciidoc .hljs-link_url,
|
||||
.markdown .hljs-link_url {
|
||||
color: #080;
|
||||
}
|
||||
|
||||
.hljs-label,
|
||||
.hljs-javadoc,
|
||||
.ruby .hljs-string,
|
||||
.hljs-decorator,
|
||||
.hljs-filter .hljs-argument,
|
||||
.hljs-localvars,
|
||||
.hljs-array,
|
||||
.hljs-attr_selector,
|
||||
.hljs-important,
|
||||
.hljs-pseudo,
|
||||
.hljs-pi,
|
||||
.haml .hljs-bullet,
|
||||
.hljs-doctype,
|
||||
.hljs-deletion,
|
||||
.hljs-envvar,
|
||||
.hljs-shebang,
|
||||
.apache .hljs-sqbracket,
|
||||
.nginx .hljs-built_in,
|
||||
.tex .hljs-formula,
|
||||
.erlang_repl .hljs-reserved,
|
||||
.hljs-prompt,
|
||||
.asciidoc .hljs-link_label,
|
||||
.markdown .hljs-link_label,
|
||||
.vhdl .hljs-attribute,
|
||||
.clojure .hljs-attribute,
|
||||
.asciidoc .hljs-attribute,
|
||||
.lasso .hljs-attribute,
|
||||
.coffeescript .hljs-property,
|
||||
.hljs-phony {
|
||||
color: #88F
|
||||
}
|
||||
|
||||
.hljs-keyword,
|
||||
.hljs-id,
|
||||
.hljs-title,
|
||||
.hljs-built_in,
|
||||
.hljs-aggregate,
|
||||
.css .hljs-tag,
|
||||
.hljs-javadoctag,
|
||||
.hljs-phpdoc,
|
||||
.hljs-yardoctag,
|
||||
.smalltalk .hljs-class,
|
||||
.hljs-winutils,
|
||||
.bash .hljs-variable,
|
||||
.apache .hljs-tag,
|
||||
.go .hljs-typename,
|
||||
.tex .hljs-command,
|
||||
.asciidoc .hljs-strong,
|
||||
.markdown .hljs-strong,
|
||||
.hljs-request,
|
||||
.hljs-status {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.asciidoc .hljs-emphasis,
|
||||
.markdown .hljs-emphasis {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
.nginx .hljs-built_in {
|
||||
font-weight: normal;
|
||||
}
|
||||
|
||||
.coffeescript .javascript,
|
||||
.javascript .xml,
|
||||
.lasso .markup,
|
||||
.tex .hljs-formula,
|
||||
.xml .javascript,
|
||||
.xml .vbscript,
|
||||
.xml .css,
|
||||
.xml .hljs-cdata {
|
||||
opacity: 0.5;
|
||||
}
|
||||
7
lib/GUI/css/main.less
Normal file
7
lib/GUI/css/main.less
Normal file
@@ -0,0 +1,7 @@
|
||||
@import "helpers.less";
|
||||
@import (less) "bootstrap.css";
|
||||
@import "markdown.less";
|
||||
@import "highlight.js.less";
|
||||
@import "fontello.less";
|
||||
@import "styles.less";
|
||||
@import "responsive.less";
|
||||
700
lib/GUI/css/markdown.less
Normal file
700
lib/GUI/css/markdown.less
Normal file
@@ -0,0 +1,700 @@
|
||||
/*** Sourced from this Gist: https://github.com/sindresorhus/github-markdown-css ***/
|
||||
@font-face {
|
||||
font-family: octicons-anchor;
|
||||
src: url(data:font/woff;charset=utf-8;base64,d09GRgABAAAAAAYcAA0AAAAACjQAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAABGRlRNAAABMAAAABwAAAAca8vGTk9TLzIAAAFMAAAARAAAAFZG1VHVY21hcAAAAZAAAAA+AAABQgAP9AdjdnQgAAAB0AAAAAQAAAAEACICiGdhc3AAAAHUAAAACAAAAAj//wADZ2x5ZgAAAdwAAADRAAABEKyikaNoZWFkAAACsAAAAC0AAAA2AtXoA2hoZWEAAALgAAAAHAAAACQHngNFaG10eAAAAvwAAAAQAAAAEAwAACJsb2NhAAADDAAAAAoAAAAKALIAVG1heHAAAAMYAAAAHwAAACABEAB2bmFtZQAAAzgAAALBAAAFu3I9x/Nwb3N0AAAF/AAAAB0AAAAvaoFvbwAAAAEAAAAAzBdyYwAAAADP2IQvAAAAAM/bz7t4nGNgZGFgnMDAysDB1Ml0hoGBoR9CM75mMGLkYGBgYmBlZsAKAtJcUxgcPsR8iGF2+O/AEMPsznAYKMwIkgMA5REMOXicY2BgYGaAYBkGRgYQsAHyGMF8FgYFIM0ChED+h5j//yEk/3KoSgZGNgYYk4GRCUgwMaACRoZhDwCs7QgGAAAAIgKIAAAAAf//AAJ4nHWMMQrCQBBF/0zWrCCIKUQsTDCL2EXMohYGSSmorScInsRGL2DOYJe0Ntp7BK+gJ1BxF1stZvjz/v8DRghQzEc4kIgKwiAppcA9LtzKLSkdNhKFY3HF4lK69ExKslx7Xa+vPRVS43G98vG1DnkDMIBUgFN0MDXflU8tbaZOUkXUH0+U27RoRpOIyCKjbMCVejwypzJJG4jIwb43rfl6wbwanocrJm9XFYfskuVC5K/TPyczNU7b84CXcbxks1Un6H6tLH9vf2LRnn8Ax7A5WQAAAHicY2BkYGAA4teL1+yI57f5ysDNwgAC529f0kOmWRiYVgEpDgYmEA8AUzEKsQAAAHicY2BkYGB2+O/AEMPCAAJAkpEBFbAAADgKAe0EAAAiAAAAAAQAAAAEAAAAAAAAKgAqACoAiAAAeJxjYGRgYGBhsGFgYgABEMkFhAwM/xn0QAIAD6YBhwB4nI1Ty07cMBS9QwKlQapQW3VXySvEqDCZGbGaHULiIQ1FKgjWMxknMfLEke2A+IJu+wntrt/QbVf9gG75jK577Lg8K1qQPCfnnnt8fX1NRC/pmjrk/zprC+8D7tBy9DHgBXoWfQ44Av8t4Bj4Z8CLtBL9CniJluPXASf0Lm4CXqFX8Q84dOLnMB17N4c7tBo1AS/Qi+hTwBH4rwHHwN8DXqQ30XXAS7QaLwSc0Gn8NuAVWou/gFmnjLrEaEh9GmDdDGgL3B4JsrRPDU2hTOiMSuJUIdKQQayiAth69r6akSSFqIJuA19TrzCIaY8sIoxyrNIrL//pw7A2iMygkX5vDj+G+kuoLdX4GlGK/8Lnlz6/h9MpmoO9rafrz7ILXEHHaAx95s9lsI7AHNMBWEZHULnfAXwG9/ZqdzLI08iuwRloXE8kfhXYAvE23+23DU3t626rbs8/8adv+9DWknsHp3E17oCf+Z48rvEQNZ78paYM38qfk3v/u3l3u3GXN2Dmvmvpf1Srwk3pB/VSsp512bA/GG5i2WJ7wu430yQ5K3nFGiOqgtmSB5pJVSizwaacmUZzZhXLlZTq8qGGFY2YcSkqbth6aW1tRmlaCFs2016m5qn36SbJrqosG4uMV4aP2PHBmB3tjtmgN2izkGQyLWprekbIntJFing32a5rKWCN/SdSoga45EJykyQ7asZvHQ8PTm6cslIpwyeyjbVltNikc2HTR7YKh9LBl9DADC0U/jLcBZDKrMhUBfQBvXRzLtFtjU9eNHKin0x5InTqb8lNpfKv1s1xHzTXRqgKzek/mb7nB8RZTCDhGEX3kK/8Q75AmUM/eLkfA+0Hi908Kx4eNsMgudg5GLdRD7a84npi+YxNr5i5KIbW5izXas7cHXIMAau1OueZhfj+cOcP3P8MNIWLyYOBuxL6DRylJ4cAAAB4nGNgYoAALjDJyIAOWMCiTIxMLDmZedkABtIBygAAAA==) format('woff');
|
||||
}
|
||||
|
||||
.readme {
|
||||
-ms-text-size-adjust: 100%;
|
||||
-webkit-text-size-adjust: 100%;
|
||||
color: #333;
|
||||
overflow: hidden;
|
||||
font-family: "Helvetica Neue", Helvetica, "Segoe UI", Arial, freesans, sans-serif;
|
||||
font-size: 16px;
|
||||
line-height: 1.6;
|
||||
word-wrap: break-word;
|
||||
|
||||
|
||||
a {
|
||||
background: transparent;
|
||||
|
||||
&:active,
|
||||
&:hover {
|
||||
outline: 0;
|
||||
}
|
||||
}
|
||||
|
||||
strong {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
h1 {
|
||||
text-align: left;
|
||||
font-size: 2em;
|
||||
margin: 0.67em 0;
|
||||
}
|
||||
|
||||
img {
|
||||
border: 0;
|
||||
}
|
||||
|
||||
hr {
|
||||
-moz-box-sizing: content-box;
|
||||
box-sizing: content-box;
|
||||
height: 0;
|
||||
}
|
||||
|
||||
pre {
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
code,
|
||||
kbd,
|
||||
pre {
|
||||
font-family: monospace, monospace;
|
||||
font-size: 1em;
|
||||
}
|
||||
|
||||
input {
|
||||
color: inherit;
|
||||
font: inherit;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
html input[disabled] {
|
||||
cursor: default;
|
||||
}
|
||||
|
||||
input {
|
||||
line-height: normal;
|
||||
}
|
||||
|
||||
input[type="checkbox"] {
|
||||
-moz-box-sizing: border-box;
|
||||
box-sizing: border-box;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
border-spacing: 0;
|
||||
}
|
||||
|
||||
td,
|
||||
th {
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
* {
|
||||
-moz-box-sizing: border-box;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
input {
|
||||
font: 13px/1.4 Helvetica, arial, freesans, clean, sans-serif, "Segoe UI Emoji", "Segoe UI Symbol";
|
||||
}
|
||||
|
||||
a {
|
||||
color: #4183c4;
|
||||
text-decoration: none;
|
||||
|
||||
&:hover,
|
||||
&:focus,
|
||||
&:active {
|
||||
text-decoration: underline;
|
||||
}
|
||||
}
|
||||
|
||||
hr {
|
||||
height: 0;
|
||||
margin: 15px 0;
|
||||
overflow: hidden;
|
||||
background: transparent;
|
||||
border: 0;
|
||||
border-bottom: 1px solid #ddd;
|
||||
|
||||
&:before {
|
||||
display: table;
|
||||
content: "";
|
||||
}
|
||||
|
||||
&:after {
|
||||
display: table;
|
||||
clear: both;
|
||||
content: "";
|
||||
}
|
||||
}
|
||||
|
||||
h1,
|
||||
h2,
|
||||
h3,
|
||||
h4,
|
||||
h5,
|
||||
h6 {
|
||||
margin-top: 15px;
|
||||
margin-bottom: 15px;
|
||||
line-height: 1.1;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 30px;
|
||||
}
|
||||
|
||||
h2 {
|
||||
font-size: 21px;
|
||||
}
|
||||
|
||||
h3 {
|
||||
font-size: 16px;
|
||||
}
|
||||
|
||||
h4 {
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
h5 {
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
h6 {
|
||||
font-size: 11px;
|
||||
}
|
||||
|
||||
blockquote {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
ul,
|
||||
ol {
|
||||
padding: 0;
|
||||
margin-top: 0;
|
||||
margin-bottom: 0;
|
||||
|
||||
ol {
|
||||
list-style-type: lower-roman;
|
||||
|
||||
ol {
|
||||
list-style-type: lower-alpha;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ul {
|
||||
ul {
|
||||
ol {
|
||||
list-style-type: lower-alpha;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ol {
|
||||
ul {
|
||||
ol {
|
||||
list-style-type: lower-alpha;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
dd {
|
||||
margin-left: 0;
|
||||
}
|
||||
|
||||
code {
|
||||
font: 12px Consolas, "Liberation Mono", Menlo, Courier, monospace;
|
||||
}
|
||||
|
||||
pre {
|
||||
margin-top: 0;
|
||||
margin-bottom: 0;
|
||||
font: 12px Consolas, "Liberation Mono", Menlo, Courier, monospace;
|
||||
}
|
||||
|
||||
kbd {
|
||||
background-color: #e7e7e7;
|
||||
background-image: -webkit-linear-gradient(#fefefe, #e7e7e7);
|
||||
background-image: linear-gradient(#fefefe, #e7e7e7);
|
||||
background-repeat: repeat-x;
|
||||
border-radius: 2px;
|
||||
border: 1px solid #cfcfcf;
|
||||
color: #000;
|
||||
padding: 3px 5px;
|
||||
line-height: 10px;
|
||||
font: 11px Consolas, "Liberation Mono", Menlo, Courier, monospace;
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
>*:first-child {
|
||||
margin-top: 0 !important;
|
||||
}
|
||||
|
||||
>*:last-child {
|
||||
margin-bottom: 0 !important;
|
||||
}
|
||||
|
||||
.anchor {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
display: block;
|
||||
padding-right: 6px;
|
||||
padding-left: 30px;
|
||||
margin-left: -30px;
|
||||
}
|
||||
|
||||
.anchor:focus {
|
||||
outline: none;
|
||||
}
|
||||
|
||||
h1,
|
||||
h2,
|
||||
h3,
|
||||
h4,
|
||||
h5,
|
||||
h6 {
|
||||
position: relative;
|
||||
margin-top: 1em;
|
||||
margin-bottom: 16px;
|
||||
font-weight: bold;
|
||||
line-height: 1.4;
|
||||
|
||||
.octicon-link {
|
||||
display: none;
|
||||
color: #000;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
&:hover {
|
||||
.anchor {
|
||||
height: 1em;
|
||||
padding-left: 8px;
|
||||
margin-left: -30px;
|
||||
line-height: 1;
|
||||
text-decoration: none;
|
||||
|
||||
.octicon-link {
|
||||
display: inline-block;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
h1 {
|
||||
padding-bottom: 0.3em;
|
||||
font-size: 2.25em;
|
||||
line-height: 1.2;
|
||||
border-bottom: 1px solid #eee;
|
||||
}
|
||||
|
||||
h2 {
|
||||
padding-bottom: 0.3em;
|
||||
font-size: 1.75em;
|
||||
line-height: 1.225;
|
||||
border-bottom: 1px solid #eee;
|
||||
}
|
||||
|
||||
h3 {
|
||||
font-size: 1.5em;
|
||||
line-height: 1.43;
|
||||
}
|
||||
|
||||
h4 {
|
||||
font-size: 1.25em;
|
||||
}
|
||||
|
||||
h5 {
|
||||
font-size: 1em;
|
||||
}
|
||||
|
||||
h6 {
|
||||
font-size: 1em;
|
||||
color: #777;
|
||||
}
|
||||
|
||||
p,
|
||||
blockquote,
|
||||
ul,
|
||||
ol,
|
||||
dl,
|
||||
table,
|
||||
pre {
|
||||
margin-top: 0;
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
hr {
|
||||
height: 4px;
|
||||
padding: 0;
|
||||
margin: 16px 0;
|
||||
background-color: #e7e7e7;
|
||||
border: 0 none;
|
||||
}
|
||||
|
||||
ul,
|
||||
ol {
|
||||
padding-left: 2em;
|
||||
|
||||
ul, ol {
|
||||
margin-top: 0;
|
||||
margin-bottom: 0;
|
||||
}
|
||||
}
|
||||
|
||||
li {
|
||||
>p {
|
||||
margin-top: 16px;
|
||||
}
|
||||
}
|
||||
|
||||
dl {
|
||||
padding: 0;
|
||||
|
||||
dt {
|
||||
padding: 0;
|
||||
margin-top: 16px;
|
||||
font-size: 1em;
|
||||
font-style: italic;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
dd {
|
||||
padding: 0 16px;
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
}
|
||||
|
||||
blockquote {
|
||||
padding: 0 15px;
|
||||
color: #777;
|
||||
border-left: 4px solid #ddd;
|
||||
|
||||
>:first-child {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
>:last-child {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
}
|
||||
|
||||
table {
|
||||
display: block;
|
||||
width: 100%;
|
||||
overflow: auto;
|
||||
word-break: normal;
|
||||
word-break: keep-all;
|
||||
|
||||
th {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
th,
|
||||
td {
|
||||
padding: 6px 13px;
|
||||
border: 1px solid #ddd;
|
||||
}
|
||||
|
||||
tr {
|
||||
background-color: #fff;
|
||||
border-top: 1px solid #ccc;
|
||||
|
||||
&:nth-child(2n) {
|
||||
background-color: #f8f8f8;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
img {
|
||||
max-width: 100%;
|
||||
-moz-box-sizing: border-box;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
code {
|
||||
padding: 0;
|
||||
padding-top: 0.2em;
|
||||
padding-bottom: 0.2em;
|
||||
margin: 0;
|
||||
font-size: 85%;
|
||||
background-color: rgba(0,0,0,0.04);
|
||||
border-radius: 3px;
|
||||
|
||||
&:before,
|
||||
&:after {
|
||||
letter-spacing: -0.2em;
|
||||
content: "\00a0";
|
||||
}
|
||||
}
|
||||
|
||||
pre {
|
||||
padding: 16px;
|
||||
overflow: auto;
|
||||
font-size: 85%;
|
||||
line-height: 1.45;
|
||||
background-color: #f7f7f7;
|
||||
border-radius: 3px;
|
||||
word-wrap: normal;
|
||||
|
||||
>code {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
font-size: 100%;
|
||||
word-break: normal;
|
||||
white-space: pre;
|
||||
background: transparent;
|
||||
border: 0;
|
||||
display: inline;
|
||||
max-width: initial;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
overflow: initial;
|
||||
line-height: inherit;
|
||||
word-wrap: normal;
|
||||
background-color: transparent;
|
||||
border: 0;
|
||||
|
||||
&:before,
|
||||
&:after {
|
||||
content: normal;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.highlight {
|
||||
margin-bottom: 16px;
|
||||
background: #fff;
|
||||
|
||||
pre {
|
||||
padding: 16px;
|
||||
overflow: auto;
|
||||
font-size: 85%;
|
||||
line-height: 1.45;
|
||||
background-color: #f7f7f7;
|
||||
border-radius: 3px;
|
||||
margin-bottom: 0;
|
||||
word-break: normal;
|
||||
}
|
||||
}
|
||||
|
||||
.highlight{
|
||||
.mf,
|
||||
.mh,
|
||||
.mi,
|
||||
.mo,
|
||||
.il,
|
||||
.m {
|
||||
color: #945277;
|
||||
}
|
||||
|
||||
.s,
|
||||
.sb,
|
||||
.sc,
|
||||
.sd,
|
||||
.s2,
|
||||
.se,
|
||||
.sh,
|
||||
.si,
|
||||
.sx,
|
||||
.s1 {
|
||||
color: #df5000;
|
||||
}
|
||||
|
||||
.kc,
|
||||
.kd,
|
||||
.kn,
|
||||
.kp,
|
||||
.kr,
|
||||
.kt,
|
||||
.k,
|
||||
.o {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.kt {
|
||||
color: #458;
|
||||
}
|
||||
|
||||
.c,
|
||||
.cm,
|
||||
.c1 {
|
||||
color: #998;
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
.cp,
|
||||
.cs {
|
||||
color: #999;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.cs {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
.n {
|
||||
color: #333;
|
||||
}
|
||||
|
||||
.na,
|
||||
.nv,
|
||||
.vc,
|
||||
.vg,
|
||||
.vi {
|
||||
color: #008080;
|
||||
}
|
||||
|
||||
.nb {
|
||||
color: #0086B3;
|
||||
}
|
||||
|
||||
.nc {
|
||||
color: #458;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.no {
|
||||
color: #094e99;
|
||||
}
|
||||
|
||||
.ni {
|
||||
color: #800080;
|
||||
}
|
||||
|
||||
.ne {
|
||||
color: #990000;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.nf {
|
||||
color: #945277;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.nn {
|
||||
color: #555;
|
||||
}
|
||||
|
||||
.nt {
|
||||
color: #000080;
|
||||
}
|
||||
|
||||
.err {
|
||||
color: #a61717;
|
||||
background-color: #e3d2d2;
|
||||
}
|
||||
|
||||
.gd {
|
||||
color: #000;
|
||||
background-color: #fdd;
|
||||
|
||||
.x {
|
||||
color: #000;
|
||||
background-color: #faa;
|
||||
}
|
||||
}
|
||||
|
||||
.ge {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
.gr {
|
||||
color: #aa0000;
|
||||
}
|
||||
|
||||
.gh {
|
||||
color: #999;
|
||||
}
|
||||
|
||||
.gi {
|
||||
color: #000;
|
||||
background-color: #dfd;
|
||||
|
||||
.x {
|
||||
color: #000;
|
||||
background-color: #afa;
|
||||
}
|
||||
}
|
||||
|
||||
.go {
|
||||
color: #888;
|
||||
}
|
||||
|
||||
.gp {
|
||||
color: #555;
|
||||
}
|
||||
|
||||
.gs {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.gu {
|
||||
color: #800080;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.gt {
|
||||
color: #aa0000;
|
||||
}
|
||||
|
||||
.ow {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.w {
|
||||
color: #bbb;
|
||||
}
|
||||
|
||||
.sr {
|
||||
color: #017936;
|
||||
}
|
||||
|
||||
.ss {
|
||||
color: #8b467f;
|
||||
}
|
||||
|
||||
.bp {
|
||||
color: #999;
|
||||
}
|
||||
|
||||
.gc {
|
||||
color: #999;
|
||||
background-color: #EAF2F5;
|
||||
}
|
||||
}
|
||||
|
||||
.octicon {
|
||||
font: normal normal 16px octicons-anchor;
|
||||
line-height: 1;
|
||||
display: inline-block;
|
||||
text-decoration: none;
|
||||
-webkit-font-smoothing: antialiased;
|
||||
-moz-osx-font-smoothing: grayscale;
|
||||
-webkit-user-select: none;
|
||||
-moz-user-select: none;
|
||||
-ms-user-select: none;
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
.octicon-link {
|
||||
&:before {
|
||||
content: '\f05c';
|
||||
}
|
||||
}
|
||||
|
||||
.task-list-item {
|
||||
list-style-type: none;
|
||||
|
||||
+.task-list-item {
|
||||
margin-top: 3px;
|
||||
}
|
||||
|
||||
input {
|
||||
float: left;
|
||||
margin: 0.3em 0 0.25em -1.6em;
|
||||
vertical-align: middle;
|
||||
}
|
||||
}
|
||||
}
|
||||
38
lib/GUI/css/responsive.less
Normal file
38
lib/GUI/css/responsive.less
Normal file
@@ -0,0 +1,38 @@
|
||||
@media (max-width: 992px) {
|
||||
.body {
|
||||
.main-header {
|
||||
.npm-logo {
|
||||
width: 100px;
|
||||
float: left;
|
||||
}
|
||||
|
||||
.packages-header {
|
||||
border-bottom: none;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 768px) {
|
||||
.body {
|
||||
.content {
|
||||
padding-top: @mainHeaderHeight + @packagesHeaderHeight + @smRegistryInfoHeight + 10;
|
||||
|
||||
.entry {
|
||||
.title {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
.author {
|
||||
float: none !important;
|
||||
clear: both;
|
||||
padding: 0 0 5px 18px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.no-results {
|
||||
margin: 10px 0 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
197
lib/GUI/css/styles.less
Normal file
197
lib/GUI/css/styles.less
Normal file
@@ -0,0 +1,197 @@
|
||||
//vars
|
||||
@npmRed: #cc3d33;
|
||||
@white: #fff;
|
||||
@entryBg: #F3F3F3;
|
||||
@mainHeaderHeight: 50px;
|
||||
@packagesHeaderHeight: 60px;
|
||||
@headerBorderWidth: 2px;
|
||||
@smRegistryInfoHeight: 25px;
|
||||
|
||||
/*** Main Styles ***/
|
||||
.body {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
left:0;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
|
||||
.main-header {
|
||||
background: @white;
|
||||
|
||||
.navbar {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
.npm-logo {
|
||||
width: 79px;
|
||||
height: @mainHeaderHeight;
|
||||
// https://example.org/verdaccio/-/static/../../-/logo
|
||||
background-image: url( ../../-/logo );
|
||||
background-repeat: no-repeat;
|
||||
background-position: center center;
|
||||
|
||||
>a {
|
||||
display: block;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
}
|
||||
|
||||
.setup {
|
||||
line-height: 1.3em;
|
||||
padding-top: 5px;
|
||||
}
|
||||
|
||||
.packages-header {
|
||||
border-bottom: @headerBorderWidth solid #e6e6e6;
|
||||
|
||||
.search-container {
|
||||
top: 9px;
|
||||
|
||||
.search-icon {
|
||||
background: #e6e6e6;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.sm-registry-info {
|
||||
height: @smRegistryInfoHeight;
|
||||
line-height: 1.7em;
|
||||
}
|
||||
}
|
||||
|
||||
.content {
|
||||
padding-top: 10px;
|
||||
|
||||
.entry {
|
||||
.transition(height .3s);
|
||||
padding: 9px 10px;
|
||||
overflow: hidden;
|
||||
border-bottom: 1px solid #E7E7E7;
|
||||
|
||||
&:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
&:nth-child( even ) {
|
||||
background: @entryBg;
|
||||
}
|
||||
|
||||
.title {
|
||||
margin: 0 0 5px 10px;
|
||||
}
|
||||
|
||||
.description {
|
||||
margin: 0 0 0 18px;
|
||||
font-size: 13px;
|
||||
}
|
||||
|
||||
.name:hover {
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.name:before {
|
||||
margin: 0;
|
||||
margin-left: -10px;
|
||||
.transformTransition(.2s);
|
||||
}
|
||||
|
||||
&.open .name:before {
|
||||
.rotate(90deg);
|
||||
}
|
||||
|
||||
.version {
|
||||
color: #666;
|
||||
}
|
||||
|
||||
.author {
|
||||
color: #666;
|
||||
}
|
||||
|
||||
.readme {
|
||||
font-size: 14px;
|
||||
margin-top: 10px;
|
||||
background: @white;
|
||||
padding: 10px 12px;
|
||||
.border-radius(3px);
|
||||
border: 1px solid darken( @entryBg, 10% );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.pkg-search-container {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.pkg-search-container {
|
||||
.search-ajax {
|
||||
display: block;
|
||||
margin: 50px auto;
|
||||
}
|
||||
}
|
||||
|
||||
.no-results {
|
||||
text-align: center;
|
||||
margin: 50px 0;
|
||||
color: #888;
|
||||
|
||||
big {
|
||||
font-size: 38px;
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
|
||||
code {
|
||||
font-size: 1.2em;
|
||||
}
|
||||
}
|
||||
|
||||
.red {
|
||||
color: @npmRed;
|
||||
}
|
||||
|
||||
.light-red {
|
||||
color: lighten( @npmRed, 10% );
|
||||
}
|
||||
|
||||
.white {
|
||||
color: @white !important;
|
||||
}
|
||||
|
||||
.red-bg {
|
||||
background: @npmRed;
|
||||
}
|
||||
|
||||
.light-red-bg {
|
||||
background: lighten( @npmRed, 10% );
|
||||
}
|
||||
|
||||
.no-bg {
|
||||
background: none !important;
|
||||
}
|
||||
|
||||
.no-border {
|
||||
border: none !important;
|
||||
}
|
||||
.no-rnd-cnr {
|
||||
.border-radius( 0 );
|
||||
}
|
||||
|
||||
.center {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.login-btn {
|
||||
margin-left: 10px;
|
||||
margin-top: 5px;
|
||||
}
|
||||
|
||||
.pad-right-10 {
|
||||
padding-right: 10px;
|
||||
}
|
||||
|
||||
.inline-block {
|
||||
display: inline-block;
|
||||
}
|
||||
23
lib/GUI/entry.hbs
Normal file
23
lib/GUI/entry.hbs
Normal file
@@ -0,0 +1,23 @@
|
||||
<div class="entry" data-name="{{ name }}" data-version="{{ version }}">
|
||||
<div class="row">
|
||||
<div class="col-md-8 col-sm-8">
|
||||
<h4 class="title">
|
||||
<a class='name icon-angle-right red' href='javascript:void(0)'>{{ name }}</a>
|
||||
<small class='version'>v{{ version }}</small>
|
||||
</h4>
|
||||
</div>
|
||||
<div class="col-md-4 col-sm-4">
|
||||
<div class="author pull-right">
|
||||
{{!-- I can't make hbs helper work without break code style --}}
|
||||
{{#with author}}
|
||||
<small>By: {{{ name }}}</small>
|
||||
{{/with}}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<p class="description">{{ description }}</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
134
lib/GUI/index.hbs
Normal file
134
lib/GUI/index.hbs
Normal file
@@ -0,0 +1,134 @@
|
||||
<!doctype html>
|
||||
<html lang="en-us">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>{{ name }}</title>
|
||||
|
||||
<link rel="icon" type="image/png" href="{{ baseUrl }}/-/static/favicon.png"/>
|
||||
<link rel="stylesheet" type="text/css" href="{{ baseUrl }}/-/static/main.css">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
</head>
|
||||
<body class="body">
|
||||
<header class="main-header">
|
||||
<nav class="navbar navbar-default red-bg white no-border no-rnd-cnr" role="navigation">
|
||||
<div class="container">
|
||||
<div class="navbar-header clearfix">
|
||||
<div class="npm-logo brand">
|
||||
<a href="{{ baseUrl }}"></a>
|
||||
</div>
|
||||
|
||||
<!-- login/logout for small devices -->
|
||||
<div class="pull-right visible-xs pad-right-10">
|
||||
<div>
|
||||
{{#if username}}
|
||||
<p class="white no-bg navbar-text pad-right-10 inline-block">Hi {{username}}</p>
|
||||
<button type="submit" class="btn btn-danger inline-block js-userLogoutBtn">Logout</button>
|
||||
{{else}}
|
||||
<p class="white no-bg navbar-text pad-right-10 inline-block"> </p>
|
||||
<button type="submit" class="btn btn-danger inline-block" data-toggle="modal" data-target="#login-form" onclick="return false">Login</button>
|
||||
{{/if}}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="navbar-left hidden-xs"> </div>
|
||||
|
||||
<div class="navbar-left setup hidden-xs">
|
||||
<code class="white no-bg">npm set registry {{ baseUrl }}</code><br>
|
||||
<code class="white no-bg">npm adduser --registry {{ baseUrl }}</code>
|
||||
</div>
|
||||
|
||||
<!-- login/logout for large devices -->
|
||||
<div class="navbar-collapse collapse">
|
||||
<div class="navbar-right">
|
||||
<form class="navbar-form navbar-right">
|
||||
{{#if username}}
|
||||
<p class="white no-bg pad-right-10 inline-block">Hi {{username}}</p>
|
||||
<button type="submit" class="btn btn-danger inline-block js-userLogoutBtn">Logout</button>
|
||||
{{else}}
|
||||
<button type="submit" class="btn btn-danger inline-block" data-toggle="modal" data-target="#login-form" onclick="return false">Login</button>
|
||||
{{/if}}
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
</div>
|
||||
</nav>
|
||||
<header class="sm-registry-info light-red-bg center hidden-sm hidden-lg hidden-md">
|
||||
<code class="white no-bg">{{ baseUrl }}</code><br>
|
||||
</header>
|
||||
<header class="packages-header container">
|
||||
{{#if tagline}}
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
{{{tagline}}}
|
||||
</div>
|
||||
</div>
|
||||
{{/if}}
|
||||
<div class="row">
|
||||
<div class="col-md-5 hidden-xs hidden-sm">
|
||||
<h2 class="title">Available Packages</h2>
|
||||
</div>
|
||||
<div class="col-md-4 col-md-offset-3 col-sm-12">
|
||||
<form id='search-form'>
|
||||
<div class="input-group input-group-lg search-container">
|
||||
<input type="text" class="form-control" placeholder="Search for packages">
|
||||
<span class="input-group-btn">
|
||||
<button class="btn btn-default search-icon js-search-btn"><i class="icon-search"></i></button>
|
||||
</span>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
</header>
|
||||
|
||||
<section class="content container packages-container" id="all-packages">
|
||||
{{#each packages}}
|
||||
{{> entry}}
|
||||
{{/each}}
|
||||
|
||||
{{#unless packages.length}}
|
||||
<div class='no-results'>
|
||||
<big>No Packages</big><br>
|
||||
Use <code>npm publish</code>
|
||||
</div>
|
||||
{{/unless}}
|
||||
</section>
|
||||
|
||||
<section class="content container pkg-search-container" id="search-results"></section>
|
||||
|
||||
<div class="modal fade" id="login-form" tabindex="-1" role="dialog" aria-labelledby="login-form-label" aria-hidden="true">
|
||||
<div class="modal-dialog modal-sm">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal"><span aria-hidden="true">×</span><span class="sr-only">Close</span></button>
|
||||
<h5 class="modal-title" id="login-form-label">Welcome back</h5>
|
||||
</div>
|
||||
<form role="form" action="{{ baseUrl }}/-/login" method="post" id="login-form" autocomplete="off">
|
||||
<div class="modal-body">
|
||||
<div class="form-group">
|
||||
<label for="user" class="sr-only">Email</label>
|
||||
<input name="user" id="user" class="form-control" placeholder="Username" type="text">
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="pass" class="sr-only">Password</label>
|
||||
<input name="pass" id="pass" class="form-control" placeholder="Password" type="password">
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<button type="button" class="btn btn-default" data-dismiss="modal">Close</button>
|
||||
<button type="submit" class="btn btn-primary">Log in</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<form action="{{ baseUrl }}/-/logout" method="post" class="hide" id="userLogoutForm"></form>
|
||||
|
||||
<script src="{{ baseUrl }}/-/static/jquery.min.js"></script>
|
||||
<script type='text/javascript' src='{{ baseUrl }}/-/static/main.js'></script>
|
||||
</body>
|
||||
</html>
|
||||
278
lib/GUI/js/bootstrap-modal.js
vendored
Normal file
278
lib/GUI/js/bootstrap-modal.js
vendored
Normal file
@@ -0,0 +1,278 @@
|
||||
/* ========================================================================
|
||||
* Bootstrap: modal.js v3.3.0
|
||||
* http://getbootstrap.com/javascript/#modals
|
||||
* ========================================================================
|
||||
* Copyright 2011-2014 Twitter, Inc.
|
||||
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
|
||||
* ======================================================================== */
|
||||
|
||||
|
||||
+function($) {
|
||||
'use strict';
|
||||
|
||||
// MODAL CLASS DEFINITION
|
||||
// ======================
|
||||
|
||||
let Modal = function(element, options) {
|
||||
this.options = options;
|
||||
this.$body = $(document.body);
|
||||
this.$element = $(element);
|
||||
this.$backdrop =
|
||||
this.isShown = null;
|
||||
this.scrollbarWidth = 0;
|
||||
|
||||
if (this.options.remote) {
|
||||
this.$element
|
||||
.find('.modal-content')
|
||||
.load(this.options.remote, $.proxy(function() {
|
||||
this.$element.trigger('loaded.bs.modal');
|
||||
}, this));
|
||||
}
|
||||
};
|
||||
|
||||
Modal.VERSION = '3.3.0';
|
||||
|
||||
Modal.TRANSITION_DURATION = 300;
|
||||
Modal.BACKDROP_TRANSITION_DURATION = 150;
|
||||
|
||||
Modal.DEFAULTS = {
|
||||
backdrop: true,
|
||||
keyboard: true,
|
||||
show: true,
|
||||
};
|
||||
|
||||
Modal.prototype.toggle = function(_relatedTarget) {
|
||||
return this.isShown ? this.hide() : this.show(_relatedTarget);
|
||||
};
|
||||
|
||||
Modal.prototype.show = function(_relatedTarget) {
|
||||
let that = this;
|
||||
let e = $.Event('show.bs.modal', {relatedTarget: _relatedTarget});
|
||||
|
||||
this.$element.trigger(e);
|
||||
|
||||
if (this.isShown || e.isDefaultPrevented()) return;
|
||||
|
||||
this.isShown = true;
|
||||
|
||||
this.checkScrollbar();
|
||||
this.$body.addClass('modal-open');
|
||||
|
||||
this.setScrollbar();
|
||||
this.escape();
|
||||
|
||||
this.$element.on('click.dismiss.bs.modal', '[data-dismiss="modal"]', $.proxy(this.hide, this));
|
||||
|
||||
this.backdrop(function() {
|
||||
let transition = $.support.transition && that.$element.hasClass('fade');
|
||||
|
||||
if (!that.$element.parent().length) {
|
||||
that.$element.appendTo(that.$body); // don't move modals dom position
|
||||
}
|
||||
|
||||
that.$element
|
||||
.show()
|
||||
.scrollTop(0);
|
||||
|
||||
if (transition) {
|
||||
that.$element[0].offsetWidth; // force reflow
|
||||
}
|
||||
|
||||
that.$element
|
||||
.addClass('in')
|
||||
.attr('aria-hidden', false);
|
||||
|
||||
that.enforceFocus();
|
||||
|
||||
let e = $.Event('shown.bs.modal', {relatedTarget: _relatedTarget});
|
||||
|
||||
transition ?
|
||||
that.$element.find('.modal-dialog') // wait for modal to slide in
|
||||
.one('bsTransitionEnd', function() {
|
||||
that.$element.trigger('focus').trigger(e);
|
||||
})
|
||||
.emulateTransitionEnd(Modal.TRANSITION_DURATION) :
|
||||
that.$element.trigger('focus').trigger(e);
|
||||
});
|
||||
};
|
||||
|
||||
Modal.prototype.hide = function(e) {
|
||||
if (e) e.preventDefault();
|
||||
|
||||
e = $.Event('hide.bs.modal');
|
||||
|
||||
this.$element.trigger(e);
|
||||
|
||||
if (!this.isShown || e.isDefaultPrevented()) return;
|
||||
|
||||
this.isShown = false;
|
||||
|
||||
this.escape();
|
||||
|
||||
$(document).off('focusin.bs.modal');
|
||||
|
||||
this.$element
|
||||
.removeClass('in')
|
||||
.attr('aria-hidden', true)
|
||||
.off('click.dismiss.bs.modal');
|
||||
|
||||
$.support.transition && this.$element.hasClass('fade') ?
|
||||
this.$element
|
||||
.one('bsTransitionEnd', $.proxy(this.hideModal, this))
|
||||
.emulateTransitionEnd(Modal.TRANSITION_DURATION) :
|
||||
this.hideModal();
|
||||
};
|
||||
|
||||
Modal.prototype.enforceFocus = function() {
|
||||
$(document)
|
||||
.off('focusin.bs.modal') // guard against infinite focus loop
|
||||
.on('focusin.bs.modal', $.proxy(function(e) {
|
||||
if (this.$element[0] !== e.target && !this.$element.has(e.target).length) {
|
||||
this.$element.trigger('focus');
|
||||
}
|
||||
}, this));
|
||||
};
|
||||
|
||||
Modal.prototype.escape = function() {
|
||||
if (this.isShown && this.options.keyboard) {
|
||||
this.$element.on('keydown.dismiss.bs.modal', $.proxy(function(e) {
|
||||
e.which == 27 && this.hide();
|
||||
}, this));
|
||||
} else if (!this.isShown) {
|
||||
this.$element.off('keydown.dismiss.bs.modal');
|
||||
}
|
||||
};
|
||||
|
||||
Modal.prototype.hideModal = function() {
|
||||
let that = this;
|
||||
this.$element.hide();
|
||||
this.backdrop(function() {
|
||||
that.$body.removeClass('modal-open');
|
||||
that.resetScrollbar();
|
||||
that.$element.trigger('hidden.bs.modal');
|
||||
});
|
||||
};
|
||||
|
||||
Modal.prototype.removeBackdrop = function() {
|
||||
this.$backdrop && this.$backdrop.remove();
|
||||
this.$backdrop = null;
|
||||
};
|
||||
|
||||
Modal.prototype.backdrop = function(callback) {
|
||||
let that = this;
|
||||
let animate = this.$element.hasClass('fade') ? 'fade' : '';
|
||||
|
||||
if (this.isShown && this.options.backdrop) {
|
||||
let doAnimate = $.support.transition && animate;
|
||||
|
||||
this.$backdrop = $('<div class="modal-backdrop ' + animate + '" />')
|
||||
.prependTo(this.$element)
|
||||
.on('click.dismiss.bs.modal', $.proxy(function(e) {
|
||||
if (e.target !== e.currentTarget) return;
|
||||
this.options.backdrop == 'static'
|
||||
? this.$element[0].focus.call(this.$element[0])
|
||||
: this.hide.call(this);
|
||||
}, this));
|
||||
|
||||
if (doAnimate) this.$backdrop[0].offsetWidth; // force reflow
|
||||
|
||||
this.$backdrop.addClass('in');
|
||||
|
||||
if (!callback) return;
|
||||
|
||||
doAnimate ?
|
||||
this.$backdrop
|
||||
.one('bsTransitionEnd', callback)
|
||||
.emulateTransitionEnd(Modal.BACKDROP_TRANSITION_DURATION) :
|
||||
callback();
|
||||
} else if (!this.isShown && this.$backdrop) {
|
||||
this.$backdrop.removeClass('in');
|
||||
|
||||
let callbackRemove = function() {
|
||||
that.removeBackdrop();
|
||||
callback && callback();
|
||||
};
|
||||
$.support.transition && this.$element.hasClass('fade') ?
|
||||
this.$backdrop
|
||||
.one('bsTransitionEnd', callbackRemove)
|
||||
.emulateTransitionEnd(Modal.BACKDROP_TRANSITION_DURATION) :
|
||||
callbackRemove();
|
||||
} else if (callback) {
|
||||
callback();
|
||||
}
|
||||
};
|
||||
|
||||
Modal.prototype.checkScrollbar = function() {
|
||||
this.scrollbarWidth = this.measureScrollbar();
|
||||
};
|
||||
|
||||
Modal.prototype.setScrollbar = function() {
|
||||
let bodyPad = parseInt((this.$body.css('padding-right') || 0), 10);
|
||||
if (this.scrollbarWidth) this.$body.css('padding-right', bodyPad + this.scrollbarWidth);
|
||||
};
|
||||
|
||||
Modal.prototype.resetScrollbar = function() {
|
||||
this.$body.css('padding-right', '');
|
||||
};
|
||||
|
||||
Modal.prototype.measureScrollbar = function() { // thx walsh
|
||||
if (document.body.clientWidth >= window.innerWidth) return 0;
|
||||
let scrollDiv = document.createElement('div');
|
||||
scrollDiv.className = 'modal-scrollbar-measure';
|
||||
this.$body.append(scrollDiv);
|
||||
let scrollbarWidth = scrollDiv.offsetWidth - scrollDiv.clientWidth;
|
||||
this.$body[0].removeChild(scrollDiv);
|
||||
return scrollbarWidth;
|
||||
};
|
||||
|
||||
|
||||
// MODAL PLUGIN DEFINITION
|
||||
// =======================
|
||||
|
||||
function Plugin(option, _relatedTarget) {
|
||||
return this.each(function() {
|
||||
let $this = $(this);
|
||||
let data = $this.data('bs.modal');
|
||||
let options = $.extend({}, Modal.DEFAULTS, $this.data(), typeof option == 'object' && option);
|
||||
|
||||
if (!data) $this.data('bs.modal', (data = new Modal(this, options)));
|
||||
if (typeof option == 'string') data[option](_relatedTarget);
|
||||
else if (options.show) data.show(_relatedTarget);
|
||||
});
|
||||
}
|
||||
|
||||
let old = $.fn.modal;
|
||||
|
||||
$.fn.modal = Plugin;
|
||||
$.fn.modal.Constructor = Modal;
|
||||
|
||||
|
||||
// MODAL NO CONFLICT
|
||||
// =================
|
||||
|
||||
$.fn.modal.noConflict = function() {
|
||||
$.fn.modal = old;
|
||||
return this;
|
||||
};
|
||||
|
||||
|
||||
// MODAL DATA-API
|
||||
// ==============
|
||||
|
||||
$(document).on('click.bs.modal.data-api', '[data-toggle="modal"]', function(e) {
|
||||
let $this = $(this);
|
||||
let href = $this.attr('href');
|
||||
let $target = $($this.attr('data-target') || (href && href.replace(/.*(?=#[^\s]+$)/, ''))); // strip for ie7
|
||||
let option = $target.data('bs.modal') ? 'toggle' : $.extend({remote: !/#/.test(href) && href}, $target.data(), $this.data());
|
||||
|
||||
if ($this.is('a')) e.preventDefault();
|
||||
|
||||
$target.one('show.bs.modal', function(showEvent) {
|
||||
if (showEvent.isDefaultPrevented()) return; // only register focus restorer if modal will actually get shown
|
||||
$target.one('hidden.bs.modal', function() {
|
||||
$this.is(':visible') && $this.trigger('focus');
|
||||
});
|
||||
});
|
||||
Plugin.call($target, option, this);
|
||||
});
|
||||
}(jQuery);
|
||||
72
lib/GUI/js/entry.js
Normal file
72
lib/GUI/js/entry.js
Normal file
@@ -0,0 +1,72 @@
|
||||
let $ = require('unopinionate').selector;
|
||||
let onClick = require('onclick');
|
||||
let transitionComplete = require('transition-complete');
|
||||
|
||||
$(function() {
|
||||
onClick('.entry .name', function() {
|
||||
let $this = $(this);
|
||||
let $entry = $this.closest('.entry');
|
||||
|
||||
if ($entry.hasClass('open')) {
|
||||
// Close entry
|
||||
$entry
|
||||
.height($entry.outerHeight())
|
||||
.removeClass('open');
|
||||
|
||||
setTimeout(function() {
|
||||
$entry.css('height', $entry.attr('data-height') + 'px');
|
||||
}, 0);
|
||||
|
||||
transitionComplete(function() {
|
||||
$entry.find('.readme').remove();
|
||||
$entry.css('height', 'auto');
|
||||
});
|
||||
} else {
|
||||
// Open entry
|
||||
$('.entry.open').each(function() {
|
||||
// Close open entries
|
||||
let $entry = $(this);
|
||||
$entry
|
||||
.height($entry.outerHeight())
|
||||
.removeClass('open');
|
||||
|
||||
setTimeout(function() {
|
||||
$entry.css('height', $entry.attr('data-height') + 'px');
|
||||
}, 0);
|
||||
|
||||
transitionComplete(function() {
|
||||
$entry.find('.readme').remove();
|
||||
$entry.css('height', 'auto');
|
||||
});
|
||||
});
|
||||
|
||||
// Add the open class
|
||||
$entry.addClass('open');
|
||||
|
||||
// Explicitly set heights for transitions
|
||||
let height = $entry.outerHeight();
|
||||
$entry
|
||||
.attr('data-height', height)
|
||||
.css('height', height);
|
||||
|
||||
// Get the data
|
||||
$.ajax({
|
||||
url: '-/readme/'
|
||||
+ encodeURIComponent($entry.attr('data-name')) + '/'
|
||||
+ encodeURIComponent($entry.attr('data-version')),
|
||||
dataType: 'text',
|
||||
success: function(html) {
|
||||
let $readme = $('<div class=\'readme\'>')
|
||||
.html(html)
|
||||
.appendTo($entry);
|
||||
|
||||
$entry.height(height + $readme.outerHeight());
|
||||
|
||||
transitionComplete(function() {
|
||||
$entry.css('height', 'auto');
|
||||
});
|
||||
},
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
13
lib/GUI/js/main.js
Normal file
13
lib/GUI/js/main.js
Normal file
@@ -0,0 +1,13 @@
|
||||
// twitter bootstrap stuff;
|
||||
// not in static 'cause I want it to be bundled with the rest of javascripts
|
||||
require('./bootstrap-modal');
|
||||
|
||||
// our own files
|
||||
require('./search');
|
||||
require('./entry');
|
||||
|
||||
let $ = require('unopinionate').selector;
|
||||
$(document).on('click', '.js-userLogoutBtn', function() {
|
||||
$('#userLogoutForm').submit();
|
||||
return false;
|
||||
});
|
||||
77
lib/GUI/js/search.js
Normal file
77
lib/GUI/js/search.js
Normal file
@@ -0,0 +1,77 @@
|
||||
let $ = require('unopinionate').selector;
|
||||
let template = require('../entry.hbs');
|
||||
|
||||
$(function() {
|
||||
;(function(window, document) {
|
||||
var $form = $('#search-form')
|
||||
var $input = $form.find('input')
|
||||
var $searchResults = $('#search-results')
|
||||
var $pkgListing = $('#all-packages')
|
||||
var $searchBtn = $('.js-search-btn')
|
||||
var request
|
||||
var lastQuery = ''
|
||||
|
||||
var toggle = function(validQuery) {
|
||||
$searchResults.toggleClass('show', validQuery)
|
||||
$pkgListing.toggleClass('hide', validQuery)
|
||||
|
||||
$searchBtn.find('i').toggleClass('icon-cancel', validQuery)
|
||||
$searchBtn.find('i').toggleClass('icon-search', !validQuery)
|
||||
}
|
||||
|
||||
$form.bind('submit keyup', function(e) {
|
||||
var query, isValidQuery
|
||||
|
||||
|
||||
e.preventDefault();
|
||||
|
||||
query = $input.val()
|
||||
isValidQuery = (query !== '')
|
||||
|
||||
toggle(isValidQuery)
|
||||
|
||||
if (!isValidQuery) {
|
||||
if (request && typeof request.abort === 'function') {
|
||||
request.abort();
|
||||
}
|
||||
|
||||
$searchResults.html('')
|
||||
return;
|
||||
}
|
||||
|
||||
if (request && typeof request.abort === 'function') {
|
||||
request.abort();
|
||||
}
|
||||
|
||||
if (query !== lastQuery) {
|
||||
lastQuery = query
|
||||
$searchResults.html(
|
||||
'<img class=\'search-ajax\' src=\'-/static/ajax.gif\' alt=\'Spinner\'/>');
|
||||
}
|
||||
|
||||
request = $.getJSON('-/search/' + query, function( results ) {
|
||||
if (results.length > 0) {
|
||||
let html = '';
|
||||
|
||||
$.each(results, function(i, entry) {
|
||||
html += template(entry);
|
||||
});
|
||||
|
||||
$searchResults.html(html);
|
||||
} else {
|
||||
$searchResults.html(
|
||||
'<div class=\'no-results\'><big>No Results</big></div>');
|
||||
}
|
||||
}).fail(function () {
|
||||
$searchResults.html(
|
||||
"<div class='no-results'><big>No Results</big></div>")
|
||||
})
|
||||
})
|
||||
|
||||
$(document).on('click', '.icon-cancel', function(e) {
|
||||
e.preventDefault();
|
||||
$input.val('');
|
||||
$form.keyup();
|
||||
});
|
||||
})(window, window.document);
|
||||
});
|
||||
471
lib/auth.js
Normal file
471
lib/auth.js
Normal file
@@ -0,0 +1,471 @@
|
||||
/* eslint prefer-spread: "off" */
|
||||
/* eslint prefer-rest-params: "off" */
|
||||
|
||||
'use strict';
|
||||
|
||||
const Crypto = require('crypto');
|
||||
const jju = require('jju');
|
||||
const Error = require('http-errors');
|
||||
const Logger = require('./logger');
|
||||
const load_plugins = require('./plugin-loader').load_plugins;
|
||||
const pkgJson = require('../package.json');
|
||||
/**
|
||||
* Handles the authentification, load auth plugins.
|
||||
*/
|
||||
class Auth {
|
||||
|
||||
/**
|
||||
* @param {*} config config reference
|
||||
*/
|
||||
constructor(config) {
|
||||
this.config = config;
|
||||
this.logger = Logger.logger.child({sub: 'auth'});
|
||||
this.secret = config.secret;
|
||||
|
||||
const plugin_params = {
|
||||
config: config,
|
||||
logger: this.logger,
|
||||
};
|
||||
|
||||
if (config.users_file) {
|
||||
if (!config.auth || !config.auth.htpasswd) {
|
||||
// b/w compat
|
||||
config.auth = config.auth || {};
|
||||
config.auth.htpasswd = {file: config.users_file};
|
||||
}
|
||||
}
|
||||
|
||||
this.plugins = load_plugins(config, config.auth, plugin_params, function(p) {
|
||||
return p.authenticate || p.allow_access || p.allow_publish;
|
||||
});
|
||||
|
||||
this.plugins.unshift({
|
||||
verdaccio_version: pkgJson.version,
|
||||
|
||||
authenticate: function(user, password, cb) {
|
||||
if (config.users != null
|
||||
&& config.users[user] != null
|
||||
&& (Crypto.createHash('sha1').update(password).digest('hex')
|
||||
=== config.users[user].password)
|
||||
) {
|
||||
return cb(null, [user]);
|
||||
}
|
||||
|
||||
return cb();
|
||||
},
|
||||
|
||||
adduser: function(user, password, cb) {
|
||||
if (config.users && config.users[user])
|
||||
return cb( Error[403]('this user already exists') );
|
||||
|
||||
return cb();
|
||||
},
|
||||
});
|
||||
|
||||
const allow_action = function(action) {
|
||||
return function(user, pkg, cb) {
|
||||
let ok = pkg[action].reduce(function(prev, curr) {
|
||||
if (user.groups.indexOf(curr) !== -1) return true;
|
||||
return prev;
|
||||
}, false);
|
||||
|
||||
if (ok) return cb(null, true);
|
||||
|
||||
if (user.name) {
|
||||
cb( Error[403]('user ' + user.name + ' is not allowed to ' + action + ' package ' + pkg.name) );
|
||||
} else {
|
||||
cb( Error[403]('unregistered users are not allowed to ' + action + ' package ' + pkg.name) );
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
this.plugins.push({
|
||||
authenticate: function(user, password, cb) {
|
||||
return cb( Error[403]('bad username/password, access denied') );
|
||||
},
|
||||
|
||||
add_user: function(user, password, cb) {
|
||||
return cb( Error[409]('registration is disabled') );
|
||||
},
|
||||
|
||||
allow_access: allow_action('access'),
|
||||
allow_publish: allow_action('publish'),
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Authenticate an user.
|
||||
* @param {*} user
|
||||
* @param {*} password
|
||||
* @param {*} cb
|
||||
*/
|
||||
authenticate(user, password, cb) {
|
||||
const plugins = this.plugins.slice(0)
|
||||
;(function next() {
|
||||
let p = plugins.shift();
|
||||
|
||||
if (typeof(p.authenticate) !== 'function') {
|
||||
return next();
|
||||
}
|
||||
|
||||
p.authenticate(user, password, function(err, groups) {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
if (groups != null && groups != false) {
|
||||
return cb(err, authenticatedUser(user, groups));
|
||||
}
|
||||
next();
|
||||
});
|
||||
})();
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new user.
|
||||
* @param {*} user
|
||||
* @param {*} password
|
||||
* @param {*} cb
|
||||
*/
|
||||
add_user(user, password, cb) {
|
||||
let self = this;
|
||||
let plugins = this.plugins.slice(0)
|
||||
|
||||
;(function next() {
|
||||
let p = plugins.shift();
|
||||
let n = 'adduser';
|
||||
if (typeof(p[n]) !== 'function') {
|
||||
n = 'add_user';
|
||||
}
|
||||
if (typeof(p[n]) !== 'function') {
|
||||
next();
|
||||
} else {
|
||||
p[n](user, password, function(err, ok) {
|
||||
if (err) return cb(err);
|
||||
if (ok) return self.authenticate(user, password, cb);
|
||||
next();
|
||||
});
|
||||
}
|
||||
})();
|
||||
}
|
||||
|
||||
/**
|
||||
* Allow user to access a package.
|
||||
* @param {*} package_name
|
||||
* @param {*} user
|
||||
* @param {*} callback
|
||||
*/
|
||||
allow_access(package_name, user, callback) {
|
||||
let plugins = this.plugins.slice(0);
|
||||
let pkg = Object.assign({name: package_name},
|
||||
this.config.getMatchedPackagesSpec(package_name))
|
||||
|
||||
;(function next() {
|
||||
let p = plugins.shift();
|
||||
|
||||
if (typeof(p.allow_access) !== 'function') {
|
||||
return next();
|
||||
}
|
||||
|
||||
p.allow_access(user, pkg, function(err, ok) {
|
||||
if (err) return callback(err);
|
||||
if (ok) return callback(null, ok);
|
||||
next(); // cb(null, false) causes next plugin to roll
|
||||
});
|
||||
})();
|
||||
}
|
||||
|
||||
/**
|
||||
* Allow user to publish a package.
|
||||
* @param {*} package_name
|
||||
* @param {*} user
|
||||
* @param {*} callback
|
||||
*/
|
||||
allow_publish(package_name, user, callback) {
|
||||
let plugins = this.plugins.slice(0);
|
||||
let pkg = Object.assign({name: package_name},
|
||||
this.config.getMatchedPackagesSpec(package_name))
|
||||
|
||||
;(function next() {
|
||||
let p = plugins.shift();
|
||||
|
||||
if (typeof(p.allow_publish) !== 'function') {
|
||||
return next();
|
||||
}
|
||||
|
||||
p.allow_publish(user, pkg, function(err, ok) {
|
||||
if (err) return callback(err);
|
||||
if (ok) return callback(null, ok);
|
||||
next(); // cb(null, false) causes next plugin to roll
|
||||
});
|
||||
})();
|
||||
}
|
||||
|
||||
/**
|
||||
* Set up a basic middleware.
|
||||
* @return {Function}
|
||||
*/
|
||||
basic_middleware() {
|
||||
let self = this;
|
||||
let credentials;
|
||||
return function(req, res, _next) {
|
||||
req.pause();
|
||||
|
||||
const next = function(err) {
|
||||
req.resume();
|
||||
// uncomment this to reject users with bad auth headers
|
||||
// return _next.apply(null, arguments)
|
||||
|
||||
// swallow error, user remains unauthorized
|
||||
// set remoteUserError to indicate that user was attempting authentication
|
||||
if (err) {
|
||||
req.remote_user.error = err.message;
|
||||
}
|
||||
return _next();
|
||||
};
|
||||
|
||||
if (req.remote_user != null && req.remote_user.name !== undefined) {
|
||||
return next();
|
||||
}
|
||||
req.remote_user = buildAnonymousUser();
|
||||
|
||||
let authorization = req.headers.authorization;
|
||||
if (authorization == null) return next();
|
||||
|
||||
let parts = authorization.split(' ');
|
||||
|
||||
if (parts.length !== 2) {
|
||||
return next( Error[400]('bad authorization header') );
|
||||
}
|
||||
|
||||
const scheme = parts[0];
|
||||
if (scheme === 'Basic') {
|
||||
credentials = new Buffer(parts[1], 'base64').toString();
|
||||
} else if (scheme === 'Bearer') {
|
||||
credentials = self.aes_decrypt(new Buffer(parts[1], 'base64')).toString('utf8');
|
||||
if (!credentials) {
|
||||
return next();
|
||||
}
|
||||
} else {
|
||||
return next();
|
||||
}
|
||||
|
||||
const index = credentials.indexOf(':');
|
||||
if (index < 0) {
|
||||
return next();
|
||||
}
|
||||
|
||||
const user = credentials.slice(0, index);
|
||||
const pass = credentials.slice(index + 1);
|
||||
|
||||
self.authenticate(user, pass, function(err, user) {
|
||||
if (!err) {
|
||||
req.remote_user = user;
|
||||
next();
|
||||
} else {
|
||||
req.remote_user = buildAnonymousUser();
|
||||
next(err);
|
||||
}
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Set up the bearer middleware.
|
||||
* @return {Function}
|
||||
*/
|
||||
bearer_middleware() {
|
||||
let self = this;
|
||||
return function(req, res, _next) {
|
||||
req.pause();
|
||||
const next = function(_err) {
|
||||
req.resume();
|
||||
return _next.apply(null, arguments);
|
||||
};
|
||||
|
||||
if (req.remote_user != null && req.remote_user.name !== undefined) {
|
||||
return next();
|
||||
}
|
||||
req.remote_user = buildAnonymousUser();
|
||||
|
||||
let authorization = req.headers.authorization;
|
||||
if (authorization == null) {
|
||||
return next();
|
||||
}
|
||||
|
||||
let parts = authorization.split(' ');
|
||||
|
||||
if (parts.length !== 2) {
|
||||
return next( Error[400]('bad authorization header') );
|
||||
}
|
||||
|
||||
let scheme = parts[0];
|
||||
let token = parts[1];
|
||||
|
||||
if (scheme !== 'Bearer')
|
||||
return next();
|
||||
let user;
|
||||
try {
|
||||
user = self.decode_token(token);
|
||||
} catch(err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
req.remote_user = authenticatedUser(user.u, user.g);
|
||||
req.remote_user.token = token;
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Set up cookie middleware.
|
||||
* @return {Function}
|
||||
*/
|
||||
cookie_middleware() {
|
||||
let self = this;
|
||||
return function(req, res, _next) {
|
||||
req.pause();
|
||||
const next = function(_err) {
|
||||
req.resume();
|
||||
return _next();
|
||||
};
|
||||
|
||||
if (req.remote_user != null && req.remote_user.name !== undefined)
|
||||
return next();
|
||||
|
||||
req.remote_user = buildAnonymousUser();
|
||||
|
||||
let token = req.cookies.get('token');
|
||||
if (token == null) {
|
||||
return next();
|
||||
}
|
||||
let credentials = self.aes_decrypt(new Buffer(token, 'base64')).toString('utf8');
|
||||
if (!credentials) {
|
||||
return next();
|
||||
}
|
||||
|
||||
let index = credentials.indexOf(':');
|
||||
if (index < 0) {
|
||||
return next();
|
||||
}
|
||||
const user = credentials.slice(0, index);
|
||||
const pass = credentials.slice(index + 1);
|
||||
|
||||
self.authenticate(user, pass, function(err, user) {
|
||||
if (!err) {
|
||||
req.remote_user = user;
|
||||
next();
|
||||
} else {
|
||||
req.remote_user = buildAnonymousUser();
|
||||
next(err);
|
||||
}
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates the token.
|
||||
* @param {*} user
|
||||
* @return {String}
|
||||
*/
|
||||
issue_token(user) {
|
||||
let data = jju.stringify({
|
||||
u: user.name,
|
||||
g: user.real_groups && user.real_groups.length ? user.real_groups : undefined,
|
||||
t: ~~(Date.now()/1000),
|
||||
}, {indent: false});
|
||||
|
||||
data = new Buffer(data, 'utf8');
|
||||
const mac = Crypto.createHmac('sha256', this.secret).update(data).digest();
|
||||
return Buffer.concat([data, mac]).toString('base64');
|
||||
}
|
||||
|
||||
/**
|
||||
* Decodes the token.
|
||||
* @param {*} str
|
||||
* @param {*} expire_time
|
||||
* @return {Object}
|
||||
*/
|
||||
decode_token(str, expire_time) {
|
||||
const buf = new Buffer(str, 'base64');
|
||||
if (buf.length <= 32) {
|
||||
throw Error[401]('invalid token');
|
||||
}
|
||||
|
||||
let data = buf.slice(0, buf.length - 32);
|
||||
let their_mac = buf.slice(buf.length - 32);
|
||||
let good_mac = Crypto.createHmac('sha256', this.secret).update(data).digest();
|
||||
|
||||
their_mac = Crypto.createHash('sha512').update(their_mac).digest('hex');
|
||||
good_mac = Crypto.createHash('sha512').update(good_mac).digest('hex');
|
||||
if (their_mac !== good_mac) throw Error[401]('bad signature');
|
||||
|
||||
// make token expire in 24 hours
|
||||
// TODO: make configurable?
|
||||
expire_time = expire_time || 24*60*60;
|
||||
|
||||
data = jju.parse(data.toString('utf8'));
|
||||
if (Math.abs(data.t - ~~(Date.now()/1000)) > expire_time) {
|
||||
throw Error[401]('token expired');
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Encrypt a string.
|
||||
* @param {String} buf
|
||||
* @return {Buffer}
|
||||
*/
|
||||
aes_encrypt(buf) {
|
||||
const c = Crypto.createCipher('aes192', this.secret);
|
||||
const b1 = c.update(buf);
|
||||
const b2 = c.final();
|
||||
return Buffer.concat([b1, b2]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Dencrypt a string.
|
||||
* @param {String} buf
|
||||
* @return {Buffer}
|
||||
*/
|
||||
aes_decrypt(buf) {
|
||||
try {
|
||||
const c = Crypto.createDecipher('aes192', this.secret);
|
||||
const b1 = c.update(buf);
|
||||
const b2 = c.final();
|
||||
return Buffer.concat([b1, b2]);
|
||||
} catch(_) {
|
||||
return new Buffer(0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds an anonymous user in case none is logged in.
|
||||
* @return {Object} { name: xx, groups: [], real_groups: [] }
|
||||
*/
|
||||
function buildAnonymousUser() {
|
||||
return {
|
||||
name: undefined,
|
||||
// groups without '$' are going to be deprecated eventually
|
||||
groups: ['$all', '$anonymous', '@all', '@anonymous', 'all', 'undefined', 'anonymous'],
|
||||
real_groups: [],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Authenticate an user.
|
||||
* @param {*} name
|
||||
* @param {*} groups
|
||||
* @return {Object} { name: xx, groups: [], real_groups: [] }
|
||||
*/
|
||||
function authenticatedUser(name, groups) {
|
||||
let _groups = (groups || []).concat(['$all', '$authenticated', '@all', '@authenticated', 'all']);
|
||||
return {
|
||||
name: name,
|
||||
groups: _groups,
|
||||
real_groups: groups,
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = Auth;
|
||||
271
lib/cli.js
271
lib/cli.js
@@ -1,121 +1,200 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
var logger = require('./logger')
|
||||
logger.setup() // default setup
|
||||
/* eslint no-sync:0 */
|
||||
/* eslint no-empty:0 */
|
||||
'use strict';
|
||||
|
||||
var pkg_file = '../package.yaml'
|
||||
, fs = require('fs')
|
||||
, yaml = require('js-yaml')
|
||||
, commander = require('commander')
|
||||
, server = require('./index')
|
||||
, crypto = require('crypto')
|
||||
, pkg = require(pkg_file)
|
||||
const _ = require('lodash');
|
||||
|
||||
if (process.getuid && process.getuid() === 0) {
|
||||
global.console.error('Verdaccio doesn\'t need superuser privileges. Don\'t run it under root.');
|
||||
}
|
||||
|
||||
process.title = 'verdaccio';
|
||||
|
||||
try {
|
||||
// for debugging memory leaks
|
||||
// totally optional
|
||||
require('heapdump');
|
||||
} catch(err) { }
|
||||
|
||||
const logger = require('./logger');
|
||||
logger.setup(); // default setup
|
||||
|
||||
const commander = require('commander');
|
||||
const constants = require('constants');
|
||||
const fs = require('fs');
|
||||
const http = require('http');
|
||||
const https = require('https');
|
||||
const YAML = require('js-yaml');
|
||||
const Path = require('path');
|
||||
const URL = require('url');
|
||||
const server = require('./index');
|
||||
const Utils = require('./utils');
|
||||
const pkginfo = require('pkginfo')(module); // eslint-disable-line no-unused-vars
|
||||
const pkgVersion = module.exports.version;
|
||||
const pkgName = module.exports.name;
|
||||
|
||||
commander
|
||||
.option('-l, --listen <[host:]port>', 'host:port number to listen on (default: localhost:4873)')
|
||||
.option('-c, --config <config.yaml>', 'use this configuration file (default: ./config.yaml)')
|
||||
.version(pkg.version)
|
||||
.parse(process.argv)
|
||||
.option('-l, --listen <[host:]port>', 'host:port number to listen on (default: localhost:4873)')
|
||||
.option('-c, --config <config.yaml>', 'use this configuration file (default: ./config.yaml)')
|
||||
.version(pkgVersion)
|
||||
.parse(process.argv);
|
||||
|
||||
if (commander.args.length == 1 && !commander.config) {
|
||||
// handling "sinopia [config]" case if "-c" is missing in commandline
|
||||
commander.config = commander.args.pop()
|
||||
// handling "verdaccio [config]" case if "-c" is missing in commandline
|
||||
commander.config = commander.args.pop();
|
||||
}
|
||||
|
||||
if (commander.args.length != 0) {
|
||||
commander.help()
|
||||
commander.help();
|
||||
}
|
||||
|
||||
var config, config_path, have_question
|
||||
let config;
|
||||
let config_path;
|
||||
try {
|
||||
if (commander.config) {
|
||||
config_path = commander.config
|
||||
config = yaml.safeLoad(fs.readFileSync(config_path, 'utf8'))
|
||||
} else {
|
||||
config_path = './config.yaml'
|
||||
try {
|
||||
config = yaml.safeLoad(fs.readFileSync(config_path, 'utf8'))
|
||||
} catch(err) {
|
||||
var readline = require('readline')
|
||||
var rl = readline.createInterface(process.stdin, process.stdout)
|
||||
var timeout = setTimeout(function() {
|
||||
console.log('I got tired waiting for an answer. Exitting...')
|
||||
process.exit(1)
|
||||
}, 20000)
|
||||
|
||||
;(function askUser() {
|
||||
have_question = true
|
||||
rl.question('Config file doesn\'t exist, create a new one? (Y/n) ', function(x) {
|
||||
clearTimeout(timeout)
|
||||
if (x[0] == 'Y' || x[0] == 'y' || x === '') {
|
||||
rl.close()
|
||||
|
||||
var created_config = require('../lib/config_gen')()
|
||||
config = yaml.safeLoad(created_config.yaml)
|
||||
write_config_banner(created_config, config)
|
||||
fs.writeFileSync(config_path, created_config.yaml)
|
||||
afterConfigLoad()
|
||||
} else if (x[0] == 'N' || x[0] == 'n') {
|
||||
rl.close()
|
||||
console.log('So, you just accidentally run me in a wrong folder. Exitting...')
|
||||
process.exit(1)
|
||||
} else {
|
||||
askUser()
|
||||
}
|
||||
})
|
||||
})()
|
||||
}
|
||||
}
|
||||
} catch(err) {
|
||||
logger.logger.fatal({file: config_path, err: err}, 'cannot open config file @{file}: @{!err.message}')
|
||||
process.exit(1)
|
||||
if (commander.config) {
|
||||
config_path = Path.resolve(commander.config);
|
||||
} else {
|
||||
config_path = require('./config-path')();
|
||||
}
|
||||
config = YAML.safeLoad(fs.readFileSync(config_path, 'utf8'));
|
||||
logger.logger.warn({file: config_path}, 'config file - @{file}');
|
||||
} catch (err) {
|
||||
logger.logger.fatal({file: config_path, err: err}, 'cannot open config file @{file}: @{!err.message}');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!have_question) afterConfigLoad()
|
||||
afterConfigLoad();
|
||||
|
||||
function get_hostport() {
|
||||
// command line || config file || default
|
||||
var hostport = commander.listen || String(config.listen || '') || '4873'
|
||||
/**
|
||||
* Retrieve all addresses defined in the config file.
|
||||
* Verdaccio is able to listen multiple ports
|
||||
* eg:
|
||||
* listen:
|
||||
- localhost:5555
|
||||
- localhost:5557
|
||||
@return {Array}
|
||||
*/
|
||||
function get_listen_addresses() {
|
||||
// command line || config file || default
|
||||
let addresses;
|
||||
if (commander.listen) {
|
||||
addresses = [commander.listen];
|
||||
} else if (Array.isArray(config.listen)) {
|
||||
addresses = config.listen;
|
||||
} else if (config.listen) {
|
||||
addresses = [config.listen];
|
||||
} else {
|
||||
addresses = ['4873'];
|
||||
}
|
||||
addresses = addresses.map(function(addr) {
|
||||
let parsed_addr = Utils.parse_address(addr);
|
||||
|
||||
hostport = hostport.split(':')
|
||||
if (hostport.length < 2) {
|
||||
hostport = [undefined, hostport[0]]
|
||||
}
|
||||
if (hostport[0] == null) {
|
||||
hostport[0] = 'localhost'
|
||||
}
|
||||
return hostport
|
||||
if (!parsed_addr) {
|
||||
logger.logger.warn({addr: addr},
|
||||
'invalid address - @{addr}, we expect a port (e.g. "4873"),'
|
||||
+ ' host:port (e.g. "localhost:4873") or full url'
|
||||
+ ' (e.g. "http://localhost:4873/")');
|
||||
}
|
||||
|
||||
return parsed_addr;
|
||||
}).filter(Boolean);
|
||||
|
||||
return addresses;
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger the server after configuration has been loaded.
|
||||
*/
|
||||
function afterConfigLoad() {
|
||||
if (!config.user_agent) config.user_agent = 'Sinopia/'+pkg.version
|
||||
if (!config.self_path) config.self_path = config_path
|
||||
if (!config.self_path) {
|
||||
config.self_path = Path.resolve(config_path);
|
||||
}
|
||||
if (!config.https) {
|
||||
config.https = {enable: false};
|
||||
}
|
||||
const app = server(config);
|
||||
get_listen_addresses().forEach(function(addr) {
|
||||
let webServer;
|
||||
if (addr.proto === 'https') { // https
|
||||
if (!config.https || !config.https.key || !config.https.cert || !config.https.ca) {
|
||||
let conf_path = function(file) {
|
||||
if (!file) return config_path;
|
||||
return Path.resolve(Path.dirname(config_path), file);
|
||||
};
|
||||
|
||||
logger.setup(config.logs)
|
||||
logger.logger.fatal([
|
||||
'You need to specify "https.key", "https.cert" and "https.ca" to run https server',
|
||||
'',
|
||||
// commands are borrowed from node.js docs
|
||||
'To quickly create self-signed certificate, use:',
|
||||
' $ openssl genrsa -out ' + conf_path('verdaccio-key.pem') + ' 2048',
|
||||
' $ openssl req -new -sha256 -key ' + conf_path('verdaccio-key.pem') + ' -out ' + conf_path('verdaccio-csr.pem'),
|
||||
' $ openssl x509 -req -in ' + conf_path('verdaccio-csr.pem') +
|
||||
' -signkey ' + conf_path('verdaccio-key.pem') + ' -out ' + conf_path('verdaccio-cert.pem'),
|
||||
'',
|
||||
'And then add to config file (' + conf_path() + '):',
|
||||
' https:',
|
||||
' key: verdaccio-key.pem',
|
||||
' cert: verdaccio-cert.pem',
|
||||
' ca: verdaccio-cert.pem',
|
||||
].join('\n'));
|
||||
process.exit(2);
|
||||
}
|
||||
|
||||
var hostport = get_hostport()
|
||||
server(config).listen(hostport[1], hostport[0])
|
||||
logger.logger.warn({addr: 'http://'+hostport[0]+':'+hostport[1]+'/'}, 'Server is listening on @{addr}')
|
||||
try {
|
||||
webServer = https.createServer({
|
||||
secureProtocol: 'SSLv23_method', // disable insecure SSLv2 and SSLv3
|
||||
secureOptions: constants.SSL_OP_NO_SSLv2 | constants.SSL_OP_NO_SSLv3,
|
||||
key: fs.readFileSync(config.https.key),
|
||||
cert: fs.readFileSync(config.https.cert),
|
||||
ca: fs.readFileSync(config.https.ca),
|
||||
}, app);
|
||||
} catch (err) { // catch errors related to certificate loading
|
||||
logger.logger.fatal({err: err}, 'cannot create server: @{err.message}');
|
||||
process.exit(2);
|
||||
}
|
||||
} else { // http
|
||||
webServer = http.createServer(app);
|
||||
}
|
||||
|
||||
// undocumented stuff for tests
|
||||
if (typeof(process.send) === 'function') {
|
||||
process.send({sinopia_started: hostport})
|
||||
}
|
||||
}
|
||||
|
||||
function write_config_banner(def, config) {
|
||||
var hostport = get_hostport()
|
||||
console.log('===========================================================')
|
||||
console.log(' Creating a new configuration file: "%s"', config_path)
|
||||
console.log(' ')
|
||||
console.log(' If you want to setup npm to work with this registry,')
|
||||
console.log(' run following commands:')
|
||||
console.log(' ')
|
||||
console.log(' $ npm set registry http://%s:%s/', hostport[0], hostport[1])
|
||||
console.log(' $ npm set always-auth true')
|
||||
console.log(' $ npm adduser')
|
||||
console.log(' Username: %s', def.user)
|
||||
console.log(' Password: %s', def.pass)
|
||||
console.log('===========================================================')
|
||||
webServer
|
||||
.listen(addr.port || addr.path, addr.host)
|
||||
.on('error', function(err) {
|
||||
logger.logger.fatal({err: err}, 'cannot create server: @{err.message}');
|
||||
process.exit(2);
|
||||
});
|
||||
|
||||
logger.logger.warn({
|
||||
addr: ( addr.path
|
||||
? URL.format({
|
||||
protocol: 'unix',
|
||||
pathname: addr.path,
|
||||
})
|
||||
: URL.format({
|
||||
protocol: addr.proto,
|
||||
hostname: addr.host,
|
||||
port: addr.port,
|
||||
pathname: '/',
|
||||
})
|
||||
),
|
||||
version: pkgName + '/' + pkgVersion,
|
||||
}, 'http address - @{addr} - @{version}');
|
||||
});
|
||||
|
||||
// undocumented stuff for tests
|
||||
if (_.isFunction(process.send)) {
|
||||
process.send({
|
||||
verdaccio_started: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
process.on('uncaughtException', function(err) {
|
||||
logger.logger.fatal( {
|
||||
err: err,
|
||||
},
|
||||
'uncaught exception, please report this\n@{err.stack}' );
|
||||
process.exit(255);
|
||||
});
|
||||
|
||||
110
lib/config-path.js
Normal file
110
lib/config-path.js
Normal file
@@ -0,0 +1,110 @@
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const Path = require('path');
|
||||
const logger = require('./logger');
|
||||
const CONFIG_FILE = 'config.yaml';
|
||||
const pkgJson = require('../package.json');
|
||||
/**
|
||||
* Find and get the first config file that match.
|
||||
* @return {String} the config file path
|
||||
*/
|
||||
function find_config_file() {
|
||||
const paths = get_paths();
|
||||
|
||||
for (let i=0; i<paths.length; i++) {
|
||||
if (file_exists(paths[i].path)) return paths[i].path;
|
||||
}
|
||||
|
||||
create_config_file(paths[0]);
|
||||
return paths[0].path;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a default config file in your system.
|
||||
* @param {String} config_path
|
||||
*/
|
||||
function create_config_file(config_path) {
|
||||
require('mkdirp').sync(Path.dirname(config_path.path));
|
||||
logger.logger.info({file: config_path.path}, 'Creating default config file in @{file}');
|
||||
|
||||
let created_config = fs.readFileSync(require.resolve('../conf/default.yaml'), 'utf8');
|
||||
|
||||
if (config_path.type === 'xdg') {
|
||||
// $XDG_DATA_HOME defines the base directory relative to which user specific data files should be stored,
|
||||
// If $XDG_DATA_HOME is either not set or empty, a default equal to $HOME/.local/share should be used.
|
||||
let data_dir = process.env.XDG_DATA_HOME|| Path.join(process.env.HOME, '.local', 'share');
|
||||
if (folder_exists(data_dir)) {
|
||||
data_dir = Path.resolve(Path.join(data_dir, pkgJson.name, 'storage'));
|
||||
created_config = created_config.replace(/^storage: .\/storage$/m, `storage: ${data_dir}`);
|
||||
}
|
||||
}
|
||||
|
||||
fs.writeFileSync(config_path.path, created_config);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve a list of possible config file locations.
|
||||
* @return {Array}
|
||||
*/
|
||||
function get_paths() {
|
||||
let try_paths = [];
|
||||
let xdg_config = process.env.XDG_CONFIG_HOME
|
||||
|| process.env.HOME && Path.join(process.env.HOME, '.config');
|
||||
if (xdg_config && folder_exists(xdg_config)) {
|
||||
try_paths.push({
|
||||
path: Path.join(xdg_config, pkgJson.name, CONFIG_FILE),
|
||||
type: 'xdg',
|
||||
});
|
||||
}
|
||||
|
||||
if (process.platform === 'win32' && process.env.APPDATA && folder_exists(process.env.APPDATA)) {
|
||||
try_paths.push({
|
||||
path: Path.resolve(Path.join(process.env.APPDATA, pkgJson.name, CONFIG_FILE)),
|
||||
type: 'win',
|
||||
});
|
||||
}
|
||||
|
||||
try_paths.push({
|
||||
path: Path.resolve(Path.join('.', pkgJson.name, CONFIG_FILE)),
|
||||
type: 'def',
|
||||
});
|
||||
|
||||
// backward compatibility
|
||||
try_paths.push({
|
||||
path: Path.resolve(Path.join('.', CONFIG_FILE)),
|
||||
type: 'old',
|
||||
});
|
||||
|
||||
return try_paths;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the path already exist.
|
||||
* @param {String} path
|
||||
* @return {Boolean}
|
||||
*/
|
||||
function folder_exists(path) {
|
||||
try {
|
||||
const stat = fs.statSync(path);
|
||||
return stat.isDirectory();
|
||||
} catch(_) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the file already exist.
|
||||
* @param {String} path
|
||||
* @return {Boolean}
|
||||
*/
|
||||
function file_exists(path) {
|
||||
try {
|
||||
const stat = fs.statSync(path);
|
||||
return stat.isFile();
|
||||
} catch(_) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = find_config_file;
|
||||
324
lib/config.js
324
lib/config.js
@@ -1,144 +1,224 @@
|
||||
var assert = require('assert')
|
||||
, crypto = require('crypto')
|
||||
, minimatch = require('minimatch')
|
||||
, utils = require('./utils')
|
||||
/* eslint prefer-rest-params: "off" */
|
||||
/* eslint prefer-spread: "off" */
|
||||
|
||||
// [[a, [b, c]], d] -> [a, b, c, d]
|
||||
'use strict';
|
||||
|
||||
const assert = require('assert');
|
||||
const _ = require('lodash');
|
||||
const Error = require('http-errors');
|
||||
const Crypto = require('crypto');
|
||||
const minimatch = require('minimatch');
|
||||
|
||||
const Utils = require('./utils');
|
||||
const pkginfo = require('pkginfo')(module); // eslint-disable-line no-unused-vars
|
||||
const pkgVersion = module.exports.version;
|
||||
const pkgName = module.exports.name;
|
||||
|
||||
/**
|
||||
* [[a, [b, c]], d] -> [a, b, c, d]
|
||||
* @param {*} array
|
||||
* @return {Array}
|
||||
*/
|
||||
function flatten(array) {
|
||||
var result = []
|
||||
for (var i=0; i<array.length; i++) {
|
||||
if (Array.isArray(array[i])) {
|
||||
result.push.apply(result, flatten(array[i]))
|
||||
} else {
|
||||
result.push(array[i])
|
||||
}
|
||||
}
|
||||
return result
|
||||
let result = [];
|
||||
for (let i=0; i<array.length; i++) {
|
||||
if (Array.isArray(array[i])) {
|
||||
result.push.apply(result, flatten(array[i]));
|
||||
} else {
|
||||
result.push(array[i]);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
function Config(config) {
|
||||
if (!(this instanceof Config)) return new Config(config)
|
||||
for (var i in config) {
|
||||
if (this[i] == null) this[i] = config[i]
|
||||
}
|
||||
/**
|
||||
* Coordinates the application configuration
|
||||
*/
|
||||
class Config {
|
||||
|
||||
// some weird shell scripts are valid yaml files parsed as string
|
||||
assert.equal(typeof(config), 'object', 'CONFIG: this doesn\'t look like a valid config file')
|
||||
/**
|
||||
* @param {*} config config the content
|
||||
*/
|
||||
constructor(config) {
|
||||
const self = this;
|
||||
for (let i in config) {
|
||||
if (self[i] == null) {
|
||||
self[i] = config[i];
|
||||
}
|
||||
}
|
||||
|
||||
assert(this.storage, 'CONFIG: storage path not defined')
|
||||
if (!self.user_agent) {
|
||||
self.user_agent = `${pkgName}/${pkgVersion}`;
|
||||
}
|
||||
|
||||
var users = {all:true, anonymous:true, 'undefined':true, owner:true, none:true}
|
||||
// some weird shell scripts are valid yaml files parsed as string
|
||||
assert.equal(typeof(config), 'object', 'CONFIG: it doesn\'t look like a valid config file');
|
||||
|
||||
var check_user_or_uplink = function(arg) {
|
||||
assert(arg !== 'all' || arg !== 'owner' || arg !== 'anonymous' || arg !== 'undefined' || arg !== 'none', 'CONFIG: reserved user/uplink name: ' + arg)
|
||||
assert(!arg.match(/\s/), 'CONFIG: invalid user name: ' + arg)
|
||||
assert(users[arg] == null, 'CONFIG: duplicate user/uplink name: ' + arg)
|
||||
users[arg] = true
|
||||
}
|
||||
assert(self.storage, 'CONFIG: storage path not defined');
|
||||
|
||||
;['users', 'uplinks', 'packages'].forEach(function(x) {
|
||||
if (this[x] == null) this[x] = {}
|
||||
assert(utils.is_object(this[x]), 'CONFIG: bad "'+x+'" value (object expected)')
|
||||
})
|
||||
const users = {
|
||||
'all': true,
|
||||
'anonymous': true,
|
||||
'undefined': true,
|
||||
'owner': true,
|
||||
'none': true,
|
||||
};
|
||||
|
||||
for (var i in this.users) check_user_or_uplink(i)
|
||||
for (var i in this.uplinks) check_user_or_uplink(i)
|
||||
const check_user_or_uplink = function(arg) {
|
||||
assert(arg !== 'all' && arg !== 'owner'
|
||||
&& arg !== 'anonymous' && arg !== 'undefined' && arg !== 'none', 'CONFIG: reserved user/uplink name: ' + arg);
|
||||
assert(!arg.match(/\s/), 'CONFIG: invalid user name: ' + arg);
|
||||
assert(users[arg] == null, 'CONFIG: duplicate user/uplink name: ' + arg);
|
||||
users[arg] = true;
|
||||
}
|
||||
// sanity check for strategic config properties
|
||||
;['users', 'uplinks', 'packages'].forEach(function(x) {
|
||||
if (self[x] == null) self[x] = {};
|
||||
assert(Utils.is_object(self[x]), `CONFIG: bad "${x}" value (object expected)`);
|
||||
});
|
||||
// sanity check for users
|
||||
for (let i in self.users) {
|
||||
if (Object.prototype.hasOwnProperty.call(self.users, i)) {
|
||||
check_user_or_uplink(i);
|
||||
}
|
||||
}
|
||||
// sanity check for uplinks
|
||||
for (let i in self.uplinks) {
|
||||
if (self.uplinks[i].cache == null) {
|
||||
self.uplinks[i].cache = true;
|
||||
}
|
||||
if (Object.prototype.hasOwnProperty.call(self.uplinks, i)) {
|
||||
check_user_or_uplink(i);
|
||||
}
|
||||
}
|
||||
for (let i in self.users) {
|
||||
if (Object.prototype.hasOwnProperty.call(self.users, i)) {
|
||||
assert(self.users[i].password, 'CONFIG: no password for user: ' + i);
|
||||
assert(typeof(self.users[i].password) === 'string' &&
|
||||
self.users[i].password.match(/^[a-f0-9]{40}$/)
|
||||
, 'CONFIG: wrong password format for user: ' + i + ', sha1 expected');
|
||||
}
|
||||
}
|
||||
for (let i in self.uplinks) {
|
||||
if (Object.prototype.hasOwnProperty.call(self.uplinks, i)) {
|
||||
assert(self.uplinks[i].url, 'CONFIG: no url for uplink: ' + i);
|
||||
assert( typeof(self.uplinks[i].url) === 'string'
|
||||
, 'CONFIG: wrong url format for uplink: ' + i);
|
||||
self.uplinks[i].url = self.uplinks[i].url.replace(/\/$/, '');
|
||||
}
|
||||
}
|
||||
|
||||
for (var i in this.users) {
|
||||
assert(this.users[i].password, 'CONFIG: no password for user: ' + i)
|
||||
assert(
|
||||
typeof(this.users[i].password) === 'string' &&
|
||||
this.users[i].password.match(/^[a-f0-9]{40}$/)
|
||||
, 'CONFIG: wrong password format for user: ' + i + ', sha1 expected')
|
||||
}
|
||||
/**
|
||||
* Normalise user list.
|
||||
* @return {Array}
|
||||
*/
|
||||
function normalize_userlist() {
|
||||
let result = [];
|
||||
|
||||
for (var i in this.uplinks) {
|
||||
assert(this.uplinks[i].url, 'CONFIG: no url for uplink: ' + i)
|
||||
assert(
|
||||
typeof(this.uplinks[i].url) === 'string'
|
||||
, 'CONFIG: wrong url format for uplink: ' + i)
|
||||
this.uplinks[i].url = this.uplinks[i].url.replace(/\/$/, '')
|
||||
}
|
||||
for (let i=0; i<arguments.length; i++) {
|
||||
if (arguments[i] == null) continue;
|
||||
|
||||
function check_userlist(i, hash, action) {
|
||||
if (hash[action] == null) hash[action] = []
|
||||
// if it's a string, split it to array
|
||||
if (typeof(arguments[i]) === 'string') {
|
||||
result.push(arguments[i].split(/\s+/));
|
||||
} else if (Array.isArray(arguments[i])) {
|
||||
result.push(arguments[i]);
|
||||
} else {
|
||||
throw Error('CONFIG: bad package acl (array or string expected): ' + JSON.stringify(arguments[i]));
|
||||
}
|
||||
}
|
||||
return flatten(result);
|
||||
}
|
||||
|
||||
// if it's a string, split it to array
|
||||
if (typeof(hash[action]) === 'string') {
|
||||
hash[action] = hash[action].split(/\s+/)
|
||||
}
|
||||
// add a default rule for all packages to make writing plugins easier
|
||||
if (self.packages['**'] == null) {
|
||||
self.packages['**'] = {};
|
||||
}
|
||||
|
||||
assert(
|
||||
typeof(hash[action]) === 'object' &&
|
||||
Array.isArray(hash[action])
|
||||
, 'CONFIG: bad "'+i+'" package '+action+' description (array or string expected)')
|
||||
hash[action] = flatten(hash[action])
|
||||
hash[action].forEach(function(user) {
|
||||
assert(
|
||||
users[user] != null
|
||||
, 'CONFIG: "'+i+'" package: user "'+user+'" doesn\'t exist')
|
||||
})
|
||||
}
|
||||
for (let i in self.packages) {
|
||||
if (Object.prototype.hasOwnProperty.call(self.packages, i)) {
|
||||
assert(
|
||||
typeof(self.packages[i]) === 'object' &&
|
||||
!Array.isArray(self.packages[i])
|
||||
, 'CONFIG: bad "'+i+'" package description (object expected)');
|
||||
|
||||
for (var i in this.packages) {
|
||||
assert(
|
||||
typeof(this.packages[i]) === 'object' &&
|
||||
!Array.isArray(this.packages[i])
|
||||
, 'CONFIG: bad "'+i+'" package description (object expected)')
|
||||
self.packages[i].access = normalize_userlist(
|
||||
self.packages[i].allow_access,
|
||||
self.packages[i].access
|
||||
);
|
||||
delete self.packages[i].allow_access;
|
||||
|
||||
check_userlist(i, this.packages[i], 'allow_access')
|
||||
check_userlist(i, this.packages[i], 'allow_publish')
|
||||
check_userlist(i, this.packages[i], 'proxy_access')
|
||||
check_userlist(i, this.packages[i], 'proxy_publish')
|
||||
self.packages[i].publish = normalize_userlist(
|
||||
self.packages[i].allow_publish,
|
||||
self.packages[i].publish
|
||||
);
|
||||
delete self.packages[i].allow_publish;
|
||||
|
||||
// deprecated
|
||||
check_userlist(i, this.packages[i], 'access')
|
||||
check_userlist(i, this.packages[i], 'proxy')
|
||||
check_userlist(i, this.packages[i], 'publish')
|
||||
}
|
||||
self.packages[i].proxy = normalize_userlist(
|
||||
self.packages[i].proxy_access,
|
||||
self.packages[i].proxy
|
||||
);
|
||||
delete self.packages[i].proxy_access;
|
||||
}
|
||||
}
|
||||
|
||||
// loading these from ENV if aren't in config
|
||||
;['http_proxy', 'https_proxy', 'no_proxy'].forEach((function(v) {
|
||||
if (!(v in this)) {
|
||||
this[v] = process.env[v] || process.env[v.toUpperCase()]
|
||||
}
|
||||
}).bind(this))
|
||||
// loading these from ENV if aren't in config
|
||||
['http_proxy', 'https_proxy', 'no_proxy'].forEach((function(v) {
|
||||
if (!(v in self)) {
|
||||
self[v] = process.env[v] || process.env[v.toUpperCase()];
|
||||
}
|
||||
}));
|
||||
|
||||
return this
|
||||
// unique identifier of self server (or a cluster), used to avoid loops
|
||||
if (!self.server_id) {
|
||||
self.server_id = Crypto.pseudoRandomBytes(6).toString('hex');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether an uplink can proxy
|
||||
* @param {String} pkg package anem
|
||||
* @param {*} upLink
|
||||
* @return {Boolean}
|
||||
*/
|
||||
hasProxyTo(pkg, upLink) {
|
||||
return (this.getMatchedPackagesSpec(pkg).proxy || []).reduce(function(prev, curr) {
|
||||
if (upLink === curr) {
|
||||
return true;
|
||||
}
|
||||
return prev;
|
||||
}, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check for package spec
|
||||
* @param {String} pkg package name
|
||||
* @return {Object}
|
||||
*/
|
||||
getMatchedPackagesSpec(pkg) {
|
||||
for (let i in this.packages) {
|
||||
if (minimatch.makeRe(i).exec(pkg)) {
|
||||
return this.packages[i];
|
||||
}
|
||||
}
|
||||
return {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Store or create whether recieve a secret key
|
||||
* @param {String} secret
|
||||
* @return {String}
|
||||
*/
|
||||
checkSecretKey(secret) {
|
||||
if (_.isNil(secret) === false) {
|
||||
this.secret = secret;
|
||||
return secret;
|
||||
}
|
||||
// it generates a secret key
|
||||
// FUTURE: this might be an external secret key, perhaps whitin config file?
|
||||
this.secret = Crypto.pseudoRandomBytes(32).toString('hex');
|
||||
return this.secret;
|
||||
}
|
||||
}
|
||||
|
||||
function allow_action(package, who, action) {
|
||||
for (var i in this.packages) {
|
||||
if (minimatch.makeRe(i).exec(package)) {
|
||||
return this.packages[i][action].reduce(function(prev, curr) {
|
||||
if (curr === String(who) || curr === 'all') return true
|
||||
return prev
|
||||
}, false)
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
Config.prototype.allow_access = function(package, user) {
|
||||
return allow_action.call(this, package, user, 'allow_access') || allow_action.call(this, package, user, 'access')
|
||||
}
|
||||
|
||||
Config.prototype.allow_publish = function(package, user) {
|
||||
return allow_action.call(this, package, user, 'allow_publish') || allow_action.call(this, package, user, 'publish')
|
||||
}
|
||||
|
||||
Config.prototype.proxy_access = function(package, uplink) {
|
||||
return allow_action.call(this, package, uplink, 'proxy_access') || allow_action.call(this, package, uplink, 'proxy')
|
||||
}
|
||||
|
||||
Config.prototype.proxy_publish = function(package, uplink) {
|
||||
return allow_action.call(this, package, uplink, 'proxy_publish')
|
||||
}
|
||||
|
||||
Config.prototype.authenticate = function(user, password) {
|
||||
if (this.users[user] == null) return false
|
||||
return crypto.createHash('sha1').update(password).digest('hex') === this.users[user].password
|
||||
}
|
||||
|
||||
module.exports = Config
|
||||
|
||||
module.exports = Config;
|
||||
|
||||
@@ -1,73 +0,0 @@
|
||||
# path to a directory with all packages
|
||||
storage: ./storage
|
||||
|
||||
# a list of users
|
||||
users:
|
||||
admin:
|
||||
# crypto.createHash('sha1').update(pass).digest('hex')
|
||||
password: __PASSWORD__
|
||||
|
||||
# a list of other known repositories we can talk to
|
||||
uplinks:
|
||||
npmjs:
|
||||
url: https://registry.npmjs.org/
|
||||
|
||||
# amount of time (in milliseconds) to wait for repository to respond
|
||||
# before giving up and use the local cached copy
|
||||
#timeout: 30000
|
||||
|
||||
# maximum time (in seconds) in which data is considered up to date
|
||||
#
|
||||
# default is 2 minutes, so server won't request the same data from
|
||||
# uplink if a similar request was made less than 2 minutes ago
|
||||
#maxage: 120
|
||||
|
||||
packages:
|
||||
# uncomment this for packages with "local-" prefix to be available
|
||||
# for admin only, it's a recommended way of handling private packages
|
||||
#'local-*':
|
||||
# allow_access: admin
|
||||
# allow_publish: admin
|
||||
|
||||
'*':
|
||||
# allow all users to read packages ('all' is a keyword)
|
||||
# this includes non-authenticated users
|
||||
allow_access: all
|
||||
|
||||
# allow 'admin' to publish packages
|
||||
allow_publish: admin
|
||||
|
||||
# if package is not available locally, proxy requests to 'npmjs' registry
|
||||
proxy_access: npmjs
|
||||
|
||||
# when package is published locally, also push it to remote registry
|
||||
#proxy_publish: none
|
||||
|
||||
#####################################################################
|
||||
# Advanced settings
|
||||
#####################################################################
|
||||
|
||||
# if you use nginx with custom path, use this to override links
|
||||
#url_prefix: https://dev.company.local/sinopia/
|
||||
|
||||
# you can specify listen address (or simply a port)
|
||||
#listen: localhost:4873
|
||||
|
||||
# type: file | stdout | stderr
|
||||
# level: trace | debug | info | http (default) | warn | error | fatal
|
||||
#
|
||||
# parameters for file: name is filename
|
||||
# {type: 'file', path: 'sinopia.log', level: 'debug'},
|
||||
#
|
||||
# parameters for stdout and stderr: format: json | pretty
|
||||
# {type: 'stdout', format: 'pretty', level: 'debug'},
|
||||
logs:
|
||||
- {type: stdout, format: pretty, level: http}
|
||||
#- {type: file, path: sinopia.log, level: info}
|
||||
|
||||
# you can specify proxy used with all requests in wget-like manner here
|
||||
# (or set up ENV variables with the same name)
|
||||
#http_proxy: http://something.local/
|
||||
#https_proxy: https://something.local/
|
||||
#no_proxy: localhost,127.0.0.1
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
var fs = require('fs')
|
||||
, crypto = require('crypto')
|
||||
|
||||
module.exports = function create_config() {
|
||||
var pass = crypto.randomBytes(8).toString('base64').replace(/[=+\/]/g, '')
|
||||
, pass_digest = crypto.createHash('sha1').update(pass).digest('hex')
|
||||
, config = fs.readFileSync(require.resolve('./config_def.yaml'), 'utf8')
|
||||
config = config.replace('__PASSWORD__', pass_digest)
|
||||
|
||||
return {
|
||||
yaml: config,
|
||||
user: 'admin',
|
||||
pass: pass,
|
||||
}
|
||||
}
|
||||
|
||||
62
lib/error.js
62
lib/error.js
@@ -1,62 +0,0 @@
|
||||
var util = require('util')
|
||||
, utils = require('./utils')
|
||||
|
||||
function parse_error_params(params, status, msg) {
|
||||
if (typeof(params) === 'string') {
|
||||
return {
|
||||
msg: params,
|
||||
status: status,
|
||||
}
|
||||
} else if (typeof(params) === 'number') {
|
||||
return {
|
||||
msg: msg,
|
||||
status: params,
|
||||
}
|
||||
} else if (utils.is_object(params)) {
|
||||
if (params.msg == null) params.msg = msg
|
||||
if (params.status == null) params.status = status
|
||||
return params
|
||||
} else {
|
||||
return {
|
||||
msg: msg,
|
||||
status: status,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Errors caused by malfunctioned code
|
||||
*/
|
||||
var AppError = function(params, constr) {
|
||||
Error.captureStackTrace(this, constr || this)
|
||||
params = parse_error_params(params, 500, 'Internal server error')
|
||||
this.msg = params.msg
|
||||
this.status = params.status
|
||||
}
|
||||
util.inherits(AppError, Error)
|
||||
AppError.prototype.name = 'Application Error'
|
||||
|
||||
/*
|
||||
* Errors caused by wrong request
|
||||
*/
|
||||
var UserError = function(params, constr) {
|
||||
params = parse_error_params(params, 404, 'The requested resource was not found')
|
||||
this.msg = params.msg
|
||||
this.status = params.status
|
||||
}
|
||||
util.inherits(UserError, Error)
|
||||
UserError.prototype.name = 'User Error'
|
||||
|
||||
/*
|
||||
* Mimic filesystem errors
|
||||
*/
|
||||
var FSError = function(code) {
|
||||
this.code = code
|
||||
}
|
||||
util.inherits(UserError, Error)
|
||||
UserError.prototype.name = 'FS Error'
|
||||
|
||||
module.exports.AppError = AppError
|
||||
module.exports.UserError = UserError
|
||||
module.exports.FSError = FSError
|
||||
|
||||
152
lib/file-locking.js
Normal file
152
lib/file-locking.js
Normal file
@@ -0,0 +1,152 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* file-locking.js - file system locking (replaces fs-ext)
|
||||
*/
|
||||
|
||||
const async = require('async');
|
||||
const locker = require('lockfile');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
// locks a file by creating a lock file
|
||||
const lockFile = function(name, next) {
|
||||
const lockFileName = `${name}.lock`;
|
||||
const lockOpts = {
|
||||
wait: 1000, // time (ms) to wait when checking for stale locks
|
||||
pollPeriod: 100, // how often (ms) to re-check stale locks
|
||||
|
||||
stale: 5 * 60 * 1000, // locks are considered stale after 5 minutes
|
||||
|
||||
retries: 100, // number of times to attempt to create a lock
|
||||
retryWait: 100, // time (ms) between tries
|
||||
};
|
||||
|
||||
async.series({
|
||||
|
||||
statdir: function(callback) {
|
||||
// test to see if the directory exists
|
||||
fs.stat(path.dirname(name), function(err, stats) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
} else if (!stats.isDirectory()) {
|
||||
callback(new Error(path.dirname(name) + ' is not a directory'));
|
||||
} else {
|
||||
callback(null);
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
statfile: function(callback) {
|
||||
// test to see if the file to lock exists
|
||||
fs.stat(name, function(err, stats) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
} else if (!stats.isFile()) {
|
||||
callback(new Error(path.dirname(name) + ' is not a file'));
|
||||
} else {
|
||||
callback(null);
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
lockfile: function(callback) {
|
||||
// try to lock the file
|
||||
locker.lock(lockFileName, lockOpts, callback);
|
||||
},
|
||||
|
||||
}, function(err) {
|
||||
if (err) {
|
||||
// lock failed
|
||||
return next(err);
|
||||
}
|
||||
|
||||
// lock succeeded
|
||||
return next(null);
|
||||
});
|
||||
};
|
||||
|
||||
// unlocks file by removing existing lock file
|
||||
const unlockFile= function(name, next) {
|
||||
const lockFileName = `${name}.lock`;
|
||||
locker.unlock(lockFileName, function(err) {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
return next(null);
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Reads a local file, which involves
|
||||
* optionally taking a lock
|
||||
* reading the file contents
|
||||
* optionally parsing JSON contents
|
||||
* @param {*} name
|
||||
* @param {*} options
|
||||
* @param {*} next
|
||||
*/
|
||||
function readFile(name, options, next) {
|
||||
if (typeof options === 'function' && next === null) {
|
||||
next = options;
|
||||
options = {};
|
||||
}
|
||||
|
||||
options = options || {};
|
||||
options.lock = options.lock || false;
|
||||
options.parse = options.parse || false;
|
||||
|
||||
const lock = function(callback) {
|
||||
if (!options.lock) {
|
||||
return callback(null);
|
||||
}
|
||||
|
||||
lockFile(name, function(err) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
return callback(null);
|
||||
});
|
||||
};
|
||||
|
||||
const read = function(callback) {
|
||||
fs.readFile(name, 'utf8', function(err, contents) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
callback(null, contents);
|
||||
});
|
||||
};
|
||||
|
||||
const parseJSON = function(contents, callback) {
|
||||
if (!options.parse) {
|
||||
return callback(null, contents);
|
||||
}
|
||||
|
||||
try {
|
||||
contents = JSON.parse(contents);
|
||||
return callback(null, contents);
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
}
|
||||
};
|
||||
|
||||
async.waterfall([
|
||||
lock,
|
||||
read,
|
||||
parseJSON,
|
||||
],
|
||||
|
||||
function(err, result) {
|
||||
if (err) {
|
||||
return next(err);
|
||||
} else {
|
||||
return next(null, result);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
exports.lockFile = lockFile;
|
||||
exports.unlockFile = unlockFile;
|
||||
exports.readFile = readFile;
|
||||
492
lib/index-api.js
Normal file
492
lib/index-api.js
Normal file
@@ -0,0 +1,492 @@
|
||||
'use strict';
|
||||
|
||||
let Cookies = require('cookies');
|
||||
let express = require('express');
|
||||
let bodyParser = require('body-parser');
|
||||
let Error = require('http-errors');
|
||||
let Path = require('path');
|
||||
let Middleware = require('./middleware');
|
||||
let Notify = require('./notify');
|
||||
let Utils = require('./utils');
|
||||
let expect_json = Middleware.expect_json;
|
||||
let match = Middleware.match;
|
||||
let media = Middleware.media;
|
||||
let validate_name = Middleware.validate_name;
|
||||
let validate_pkg = Middleware.validate_package;
|
||||
|
||||
module.exports = function(config, auth, storage) {
|
||||
/* eslint new-cap:off */
|
||||
const app = express.Router();
|
||||
const can = Middleware.allow(auth);
|
||||
const notify = Notify.notify;
|
||||
|
||||
// validate all of these params as a package name
|
||||
// this might be too harsh, so ask if it causes trouble
|
||||
app.param('package', validate_pkg);
|
||||
app.param('filename', validate_name);
|
||||
app.param('tag', validate_name);
|
||||
app.param('version', validate_name);
|
||||
app.param('revision', validate_name);
|
||||
app.param('token', validate_name);
|
||||
|
||||
// these can't be safely put into express url for some reason
|
||||
app.param('_rev', match(/^-rev$/));
|
||||
app.param('org_couchdb_user', match(/^org\.couchdb\.user:/));
|
||||
app.param('anything', match(/.*/));
|
||||
|
||||
app.use(auth.basic_middleware());
|
||||
// app.use(auth.bearer_middleware())
|
||||
app.use(bodyParser.json({strict: false, limit: config.max_body_size || '10mb'}));
|
||||
app.use(Middleware.anti_loop(config));
|
||||
|
||||
// encode / in a scoped package name to be matched as a single parameter in routes
|
||||
app.use(function(req, res, next) {
|
||||
if (req.url.indexOf('@') != -1) {
|
||||
// e.g.: /@org/pkg/1.2.3 -> /@org%2Fpkg/1.2.3, /@org%2Fpkg/1.2.3 -> /@org%2Fpkg/1.2.3
|
||||
req.url = req.url.replace(/^(\/@[^\/%]+)\/(?!$)/, '$1%2F');
|
||||
}
|
||||
next();
|
||||
});
|
||||
|
||||
// for "npm whoami"
|
||||
app.get('/whoami', function(req, res, next) {
|
||||
if (req.headers.referer === 'whoami') {
|
||||
next({username: req.remote_user.name});
|
||||
} else {
|
||||
next('route');
|
||||
}
|
||||
});
|
||||
app.get('/-/whoami', function(req, res, next) {
|
||||
next({username: req.remote_user.name});
|
||||
});
|
||||
|
||||
// TODO: anonymous user?
|
||||
app.get('/:package/:version?', can('access'), function(req, res, next) {
|
||||
storage.get_package(req.params.package, {req: req}, function(err, info) {
|
||||
if (err) return next(err);
|
||||
info = Utils.filter_tarball_urls(info, req, config);
|
||||
|
||||
let version = req.params.version;
|
||||
if (!version) return next(info);
|
||||
|
||||
let t = Utils.get_version(info, version);
|
||||
if (t != null) return next(t);
|
||||
|
||||
if (info['dist-tags'] != null) {
|
||||
if (info['dist-tags'][version] != null) {
|
||||
version = info['dist-tags'][version];
|
||||
t = Utils.get_version(info, version);
|
||||
if (t != null) return next(t);
|
||||
}
|
||||
}
|
||||
|
||||
return next( Error[404]('version not found: ' + req.params.version) );
|
||||
});
|
||||
});
|
||||
|
||||
app.get('/:package/-/:filename', can('access'), function(req, res, next) {
|
||||
let stream = storage.get_tarball(req.params.package, req.params.filename);
|
||||
stream.on('content-length', function(v) {
|
||||
res.header('Content-Length', v);
|
||||
});
|
||||
stream.on('error', function(err) {
|
||||
return res.report_error(err);
|
||||
});
|
||||
res.header('Content-Type', 'application/octet-stream');
|
||||
stream.pipe(res);
|
||||
});
|
||||
|
||||
// searching packages
|
||||
app.get('/-/all(\/since)?', function(req, res, next) {
|
||||
let received_end = false;
|
||||
let response_finished = false;
|
||||
let processing_pkgs = 0;
|
||||
let firstPackage = true;
|
||||
|
||||
res.status(200);
|
||||
|
||||
/*
|
||||
* Offical NPM registry (registry.npmjs.org) no longer return whole database,
|
||||
* They only return packages matched with keyword in `referer: search pkg-name`,
|
||||
* And NPM client will request server in every search.
|
||||
*
|
||||
* The magic number 99999 was sent by NPM registry. Modify it may caused strange
|
||||
* behaviour in the future.
|
||||
*
|
||||
* BTW: NPM will not return result if user-agent does not contain string 'npm',
|
||||
* See: method 'request' in up-storage.js
|
||||
*
|
||||
* If there is no cache in local, NPM will request /-/all, then get response with
|
||||
* _updated: 99999, 'Date' in response header was Mon, 10 Oct 1983 00:12:48 GMT,
|
||||
* this will make NPM always query from server
|
||||
*
|
||||
* Data structure also different, whel request /-/all, response is an object, but
|
||||
* when request /-/all/since, response is an array
|
||||
*/
|
||||
const respShouldBeArray = req.path.endsWith('/since');
|
||||
res.set('Date', 'Mon, 10 Oct 1983 00:12:48 GMT');
|
||||
const check_finish = function() {
|
||||
if (!received_end) {
|
||||
return;
|
||||
}
|
||||
if (processing_pkgs) {
|
||||
return;
|
||||
}
|
||||
if (response_finished) {
|
||||
return;
|
||||
}
|
||||
response_finished = true;
|
||||
if (respShouldBeArray) {
|
||||
res.end(']\n');
|
||||
} else {
|
||||
res.end('}\n');
|
||||
}
|
||||
};
|
||||
|
||||
if (respShouldBeArray) {
|
||||
res.write('[');
|
||||
} else {
|
||||
res.write('{"_updated":' + 99999);
|
||||
}
|
||||
|
||||
let stream = storage.search(req.query.startkey || 0, {req: req});
|
||||
|
||||
stream.on('data', function each(pkg) {
|
||||
processing_pkgs++;
|
||||
|
||||
auth.allow_access(pkg.name, req.remote_user, function(err, allowed) {
|
||||
processing_pkgs--;
|
||||
|
||||
if (err) {
|
||||
if (err.status && String(err.status).match(/^4\d\d$/)) {
|
||||
// auth plugin returns 4xx user error,
|
||||
// that's equivalent of !allowed basically
|
||||
allowed = false;
|
||||
} else {
|
||||
stream.abort(err);
|
||||
}
|
||||
}
|
||||
|
||||
if (allowed) {
|
||||
if (respShouldBeArray) {
|
||||
res.write(`${firstPackage ? '' : ','}${JSON.stringify(pkg)}\n`);
|
||||
if (firstPackage) {
|
||||
firstPackage = false;
|
||||
}
|
||||
} else {
|
||||
res.write(',\n' + JSON.stringify(pkg.name) + ':' + JSON.stringify(pkg));
|
||||
}
|
||||
}
|
||||
|
||||
check_finish();
|
||||
});
|
||||
});
|
||||
|
||||
stream.on('error', function(_err) {
|
||||
res.socket.destroy();
|
||||
});
|
||||
|
||||
stream.on('end', function() {
|
||||
received_end = true;
|
||||
check_finish();
|
||||
});
|
||||
});
|
||||
|
||||
// placeholder 'cause npm require to be authenticated to publish
|
||||
// we do not do any real authentication yet
|
||||
app.post('/_session', Cookies.express(), function(req, res, next) {
|
||||
res.cookies.set('AuthSession', String(Math.random()), {
|
||||
// npmjs.org sets 10h expire
|
||||
expires: new Date(Date.now() + 10*60*60*1000),
|
||||
});
|
||||
next({ok: true, name: 'somebody', roles: []});
|
||||
});
|
||||
|
||||
app.get('/-/user/:org_couchdb_user', function(req, res, next) {
|
||||
res.status(200);
|
||||
next({
|
||||
ok: 'you are authenticated as "' + req.remote_user.name + '"',
|
||||
});
|
||||
});
|
||||
|
||||
app.put('/-/user/:org_couchdb_user/:_rev?/:revision?', function(req, res, next) {
|
||||
let token = (req.body.name && req.body.password)
|
||||
? auth.aes_encrypt(req.body.name + ':' + req.body.password).toString('base64')
|
||||
: undefined;
|
||||
if (req.remote_user.name != null) {
|
||||
res.status(201);
|
||||
return next({
|
||||
ok: 'you are authenticated as \'' + req.remote_user.name + '\'',
|
||||
// token: auth.issue_token(req.remote_user),
|
||||
token: token,
|
||||
});
|
||||
} else {
|
||||
auth.add_user(req.body.name, req.body.password, function(err, user) {
|
||||
if (err) {
|
||||
if (err.status >= 400 && err.status < 500) {
|
||||
// With npm registering is the same as logging in,
|
||||
// and npm accepts only an 409 error.
|
||||
// So, changing status code here.
|
||||
return next( Error[409](err.message) );
|
||||
}
|
||||
return next(err);
|
||||
}
|
||||
|
||||
req.remote_user = user;
|
||||
res.status(201);
|
||||
return next({
|
||||
ok: 'user \'' + req.body.name + '\' created',
|
||||
// token: auth.issue_token(req.remote_user),
|
||||
token: token,
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
app.delete('/-/user/token/*', function(req, res, next) {
|
||||
res.status(200);
|
||||
next({
|
||||
ok: 'Logged out',
|
||||
});
|
||||
});
|
||||
|
||||
const tag_package_version = function(req, res, next) {
|
||||
if (typeof(req.body) !== 'string') {
|
||||
return next('route');
|
||||
}
|
||||
|
||||
let tags = {};
|
||||
tags[req.params.tag] = req.body;
|
||||
storage.merge_tags(req.params.package, tags, function(err) {
|
||||
if (err) return next(err);
|
||||
res.status(201);
|
||||
return next({ok: 'package tagged'});
|
||||
});
|
||||
};
|
||||
|
||||
// tagging a package
|
||||
app.put('/:package/:tag',
|
||||
can('publish'), media('application/json'), tag_package_version);
|
||||
|
||||
app.post('/-/package/:package/dist-tags/:tag',
|
||||
can('publish'), media('application/json'), tag_package_version);
|
||||
|
||||
app.put('/-/package/:package/dist-tags/:tag',
|
||||
can('publish'), media('application/json'), tag_package_version);
|
||||
|
||||
app.delete('/-/package/:package/dist-tags/:tag', can('publish'), function(req, res, next) {
|
||||
let tags = {};
|
||||
tags[req.params.tag] = null;
|
||||
storage.merge_tags(req.params.package, tags, function(err) {
|
||||
if (err) return next(err);
|
||||
res.status(201);
|
||||
return next({ok: 'tag removed'});
|
||||
});
|
||||
});
|
||||
|
||||
app.get('/-/package/:package/dist-tags', can('access'), function(req, res, next) {
|
||||
storage.get_package(req.params.package, {req: req}, function(err, info) {
|
||||
if (err) return next(err);
|
||||
|
||||
next(info['dist-tags']);
|
||||
});
|
||||
});
|
||||
|
||||
app.post('/-/package/:package/dist-tags', can('publish'), media('application/json'), expect_json,
|
||||
function(req, res, next) {
|
||||
storage.merge_tags(req.params.package, req.body, function(err) {
|
||||
if (err) return next(err);
|
||||
res.status(201);
|
||||
return next({ok: 'tags updated'});
|
||||
});
|
||||
});
|
||||
|
||||
app.put('/-/package/:package/dist-tags', can('publish'), media('application/json'), expect_json,
|
||||
function(req, res, next) {
|
||||
storage.replace_tags(req.params.package, req.body, function(err) {
|
||||
if (err) return next(err);
|
||||
res.status(201);
|
||||
return next({ok: 'tags updated'});
|
||||
});
|
||||
});
|
||||
|
||||
app.delete('/-/package/:package/dist-tags', can('publish'), media('application/json'),
|
||||
function(req, res, next) {
|
||||
storage.replace_tags(req.params.package, {}, function(err) {
|
||||
if (err) return next(err);
|
||||
res.status(201);
|
||||
return next({ok: 'tags removed'});
|
||||
});
|
||||
});
|
||||
|
||||
// publishing a package
|
||||
app.put('/:package/:_rev?/:revision?', can('publish'), media('application/json'), expect_json, function(req, res, next) {
|
||||
let name = req.params.package;
|
||||
let metadata;
|
||||
const create_tarball = function(filename, data, cb) {
|
||||
let stream = storage.add_tarball(name, filename);
|
||||
stream.on('error', function(err) {
|
||||
cb(err);
|
||||
});
|
||||
stream.on('success', function() {
|
||||
cb();
|
||||
});
|
||||
|
||||
// this is dumb and memory-consuming, but what choices do we have?
|
||||
stream.end(new Buffer(data.data, 'base64'));
|
||||
stream.done();
|
||||
};
|
||||
|
||||
const create_version = function(version, data, cb) {
|
||||
storage.add_version(name, version, data, null, cb);
|
||||
};
|
||||
|
||||
const add_tags = function(tags, cb) {
|
||||
storage.merge_tags(name, tags, cb);
|
||||
};
|
||||
|
||||
const after_change = function(err, ok_message) {
|
||||
// old npm behaviour
|
||||
if (metadata._attachments == null) {
|
||||
if (err) return next(err);
|
||||
res.status(201);
|
||||
return next({ok: ok_message, success: true});
|
||||
}
|
||||
|
||||
// npm-registry-client 0.3+ embeds tarball into the json upload
|
||||
// https://github.com/isaacs/npm-registry-client/commit/e9fbeb8b67f249394f735c74ef11fe4720d46ca0
|
||||
// issue https://github.com/rlidwka/sinopia/issues/31, dealing with it here:
|
||||
|
||||
if (typeof(metadata._attachments) !== 'object'
|
||||
|| Object.keys(metadata._attachments).length !== 1
|
||||
|| typeof(metadata.versions) !== 'object'
|
||||
|| Object.keys(metadata.versions).length !== 1) {
|
||||
// npm is doing something strange again
|
||||
// if this happens in normal circumstances, report it as a bug
|
||||
return next( Error[400]('unsupported registry call') );
|
||||
}
|
||||
|
||||
if (err && err.status != 409) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
// at this point document is either created or existed before
|
||||
const t1 = Object.keys(metadata._attachments)[0];
|
||||
create_tarball(Path.basename(t1), metadata._attachments[t1], function(err) {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
const t2 = Object.keys(metadata.versions)[0];
|
||||
metadata.versions[t2].readme = metadata.readme != null ? String(metadata.readme) : '';
|
||||
create_version(t2, metadata.versions[t2], function(err) {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
add_tags(metadata['dist-tags'], function(err) {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
notify(metadata, config);
|
||||
res.status(201);
|
||||
return next({ok: ok_message, success: true});
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
if (Object.keys(req.body).length == 1 && Utils.is_object(req.body.users)) {
|
||||
// 501 status is more meaningful, but npm doesn't show error message for 5xx
|
||||
return next( Error[404]('npm star|unstar calls are not implemented') );
|
||||
}
|
||||
|
||||
try {
|
||||
metadata = Utils.validate_metadata(req.body, name);
|
||||
} catch(err) {
|
||||
return next( Error[422]('bad incoming package data') );
|
||||
}
|
||||
|
||||
if (req.params._rev) {
|
||||
storage.change_package(name, metadata, req.params.revision, function(err) {
|
||||
after_change(err, 'package changed');
|
||||
});
|
||||
} else {
|
||||
storage.addPackage(name, metadata, function(err) {
|
||||
after_change(err, 'created new package');
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// unpublishing an entire package
|
||||
app.delete('/:package/-rev/*', can('publish'), function(req, res, next) {
|
||||
storage.remove_package(req.params.package, function(err) {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
res.status(201);
|
||||
return next({ok: 'package removed'});
|
||||
});
|
||||
});
|
||||
|
||||
// removing a tarball
|
||||
app.delete('/:package/-/:filename/-rev/:revision', can('publish'), function(req, res, next) {
|
||||
storage.remove_tarball(req.params.package, req.params.filename, req.params.revision, function(err) {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
res.status(201);
|
||||
return next({ok: 'tarball removed'});
|
||||
});
|
||||
});
|
||||
|
||||
// uploading package tarball
|
||||
app.put('/:package/-/:filename/*', can('publish'), media('application/octet-stream'), function(req, res, next) {
|
||||
const name = req.params.package;
|
||||
const stream = storage.add_tarball(name, req.params.filename);
|
||||
req.pipe(stream);
|
||||
|
||||
// checking if end event came before closing
|
||||
let complete = false;
|
||||
req.on('end', function() {
|
||||
complete = true;
|
||||
stream.done();
|
||||
});
|
||||
req.on('close', function() {
|
||||
if (!complete) {
|
||||
stream.abort();
|
||||
}
|
||||
});
|
||||
|
||||
stream.on('error', function(err) {
|
||||
return res.report_error(err);
|
||||
});
|
||||
stream.on('success', function() {
|
||||
res.status(201);
|
||||
return next({
|
||||
ok: 'tarball uploaded successfully',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// adding a version
|
||||
app.put('/:package/:version/-tag/:tag', can('publish'), media('application/json'), expect_json, function(req, res, next) {
|
||||
let name = req.params.package;
|
||||
let version = req.params.version;
|
||||
let tag = req.params.tag;
|
||||
|
||||
storage.add_version(name, version, req.body, tag, function(err) {
|
||||
if (err) return next(err);
|
||||
res.status(201);
|
||||
return next({ok: 'package published'});
|
||||
});
|
||||
});
|
||||
|
||||
// npm ping
|
||||
app.get('/-/ping', function(req, res, next) {
|
||||
next({});
|
||||
});
|
||||
|
||||
return app;
|
||||
};
|
||||
|
||||
168
lib/index-web.js
Normal file
168
lib/index-web.js
Normal file
@@ -0,0 +1,168 @@
|
||||
'use strict';
|
||||
|
||||
let async = require('async');
|
||||
let bodyParser = require('body-parser');
|
||||
let Cookies = require('cookies');
|
||||
let express = require('express');
|
||||
let fs = require('fs');
|
||||
let Handlebars = require('handlebars');
|
||||
let renderReadme = require('render-readme');
|
||||
let Search = require('./search');
|
||||
let Middleware = require('./middleware');
|
||||
let Utils = require('./utils');
|
||||
let match = Middleware.match;
|
||||
let validate_name = Middleware.validate_name;
|
||||
let validate_pkg = Middleware.validate_package;
|
||||
|
||||
module.exports = function(config, auth, storage) {
|
||||
/* eslint new-cap:off */
|
||||
let app = express.Router();
|
||||
let can = Middleware.allow(auth);
|
||||
|
||||
// validate all of these params as a package name
|
||||
// this might be too harsh, so ask if it causes trouble
|
||||
app.param('package', validate_pkg);
|
||||
app.param('filename', validate_name);
|
||||
app.param('version', validate_name);
|
||||
app.param('anything', match(/.*/));
|
||||
|
||||
app.use(Cookies.express());
|
||||
app.use(bodyParser.urlencoded({extended: false}));
|
||||
app.use(auth.cookie_middleware());
|
||||
app.use(function(req, res, next) {
|
||||
// disable loading in frames (clickjacking, etc.)
|
||||
res.header('X-Frame-Options', 'deny');
|
||||
next();
|
||||
});
|
||||
|
||||
Search.configureStorage(storage);
|
||||
|
||||
Handlebars.registerPartial('entry', fs.readFileSync(require.resolve('./GUI/entry.hbs'), 'utf8'));
|
||||
let template;
|
||||
if (config.web && config.web.template) {
|
||||
template = Handlebars.compile(fs.readFileSync(config.web.template, 'utf8'));
|
||||
} else {
|
||||
template = Handlebars.compile(fs.readFileSync(require.resolve('./GUI/index.hbs'), 'utf8'));
|
||||
}
|
||||
app.get('/', function(req, res, next) {
|
||||
let proto = req.get('X-Forwarded-Proto') || req.protocol;
|
||||
let base = Utils.combineBaseUrl(proto, req.get('host'), config.url_prefix);
|
||||
res.setHeader('Content-Type', 'text/html');
|
||||
|
||||
storage.get_local(function(err, packages) {
|
||||
if (err) throw err; // that function shouldn't produce any
|
||||
async.filterSeries(packages, function(pkg, cb) {
|
||||
auth.allow_access(pkg.name, req.remote_user, function(err, allowed) {
|
||||
setImmediate(function() {
|
||||
if (err) {
|
||||
cb(null, false);
|
||||
} else {
|
||||
cb(err, allowed);
|
||||
}
|
||||
});
|
||||
});
|
||||
}, function(err, packages) {
|
||||
if (err) throw err;
|
||||
packages.sort(function(p1, p2) {
|
||||
if (p1.name < p2.name) {
|
||||
return -1;
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
});
|
||||
|
||||
next(template({
|
||||
name: config.web && config.web.title ? config.web.title : 'Verdaccio',
|
||||
tagline: config.web && config.web.tagline ? config.web.tagline : '',
|
||||
packages: packages,
|
||||
baseUrl: base,
|
||||
username: req.remote_user.name,
|
||||
}));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// Static
|
||||
app.get('/-/static/:filename', function(req, res, next) {
|
||||
let file = __dirname + '/static/' + req.params.filename;
|
||||
res.sendFile(file, function(err) {
|
||||
if (!err) return;
|
||||
if (err.status === 404) {
|
||||
next();
|
||||
} else {
|
||||
next(err);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
app.get('/-/logo', function(req, res, next) {
|
||||
res.sendFile( config.web && config.web.logo
|
||||
? config.web.logo
|
||||
: __dirname + '/static/logo-sm.png' );
|
||||
});
|
||||
|
||||
app.post('/-/login', function(req, res, next) {
|
||||
auth.authenticate(req.body.user, req.body.pass, function(err, user) {
|
||||
if (!err) {
|
||||
req.remote_user = user;
|
||||
// res.cookies.set('token', auth.issue_token(req.remote_user))
|
||||
|
||||
let str = req.body.user + ':' + req.body.pass;
|
||||
res.cookies.set('token', auth.aes_encrypt(str).toString('base64'));
|
||||
}
|
||||
|
||||
let base = config.url_prefix
|
||||
? config.url_prefix.replace(/\/$/, '')
|
||||
: req.protocol + '://' + req.get('host');
|
||||
res.redirect(base);
|
||||
});
|
||||
});
|
||||
|
||||
app.post('/-/logout', function(req, res, next) {
|
||||
let base = config.url_prefix
|
||||
? config.url_prefix.replace(/\/$/, '')
|
||||
: req.protocol + '://' + req.get('host');
|
||||
res.cookies.set('token', '');
|
||||
res.redirect(base);
|
||||
});
|
||||
|
||||
// Search
|
||||
app.get('/-/search/:anything', function(req, res, next) {
|
||||
const results = Search.query(req.params.anything);
|
||||
const packages = [];
|
||||
|
||||
const getData = function(i) {
|
||||
storage.get_package(results[i].ref, function(err, entry) {
|
||||
if (!err && entry) {
|
||||
auth.allow_access(entry.name, req.remote_user, function(err, allowed) { // TODO: This may cause performance issue?
|
||||
if (err || !allowed) return;
|
||||
|
||||
packages.push(entry.versions[entry['dist-tags'].latest]);
|
||||
});
|
||||
}
|
||||
|
||||
if (i >= results.length - 1) {
|
||||
next(packages);
|
||||
} else {
|
||||
getData(i + 1);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
if (results.length) {
|
||||
getData(0);
|
||||
} else {
|
||||
next([]);
|
||||
}
|
||||
});
|
||||
|
||||
app.get('/-/readme(/@:scope?)?/:package/:version?', can('access'), function(req, res, next) {
|
||||
let packageName = req.params.package;
|
||||
if (req.params.scope) packageName = '@'+ req.params.scope + '/' + packageName;
|
||||
storage.get_package(packageName, {req: req}, function(err, info) {
|
||||
if (err) return next(err);
|
||||
next( renderReadme(info.readme || 'ERROR: No README data found!') );
|
||||
});
|
||||
});
|
||||
return app;
|
||||
};
|
||||
394
lib/index.js
394
lib/index.js
@@ -1,311 +1,113 @@
|
||||
var express = require('express')
|
||||
, cookies = require('cookies')
|
||||
, utils = require('./utils')
|
||||
, Storage = require('./storage')
|
||||
, Config = require('./config')
|
||||
, UError = require('./error').UserError
|
||||
, Middleware = require('./middleware')
|
||||
, Logger = require('./logger')
|
||||
, Cats = require('./status-cats')
|
||||
, basic_auth = Middleware.basic_auth
|
||||
, validate_name = Middleware.validate_name
|
||||
, media = Middleware.media
|
||||
, expect_json = Middleware.expect_json
|
||||
'use strict';
|
||||
|
||||
const express = require('express');
|
||||
const Error = require('http-errors');
|
||||
const compression = require('compression');
|
||||
const Auth = require('./auth');
|
||||
const Logger = require('./logger');
|
||||
const Config = require('./config');
|
||||
const Middleware = require('./middleware');
|
||||
const Cats = require('./status-cats');
|
||||
const Storage = require('./storage');
|
||||
|
||||
module.exports = function(config_hash) {
|
||||
var config = new Config(config_hash)
|
||||
, storage = new Storage(config)
|
||||
Logger.setup(config_hash.logs);
|
||||
|
||||
var can = function(action) {
|
||||
return function(req, res, next) {
|
||||
if (config['allow_'+action](req.params.package, req.remoteUser)) {
|
||||
next()
|
||||
} else {
|
||||
if (!req.remoteUser) {
|
||||
next(new UError({
|
||||
status: 403,
|
||||
msg: "can't "+action+" restricted package without auth, did you forget 'npm set always-auth true'?",
|
||||
}))
|
||||
} else {
|
||||
next(new UError({
|
||||
status: 403,
|
||||
msg: 'user '+req.remoteUser+' not allowed to '+action+' it'
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const config = new Config(config_hash);
|
||||
const storage = new Storage(config);
|
||||
const auth = new Auth(config);
|
||||
const app = express();
|
||||
|
||||
var app = express()
|
||||
// run in production mode by default, just in case
|
||||
// it shouldn't make any difference anyway
|
||||
app.set('env', process.env.NODE_ENV || 'production');
|
||||
|
||||
function error_reporting_middleware(req, res, next) {
|
||||
var calls = 0
|
||||
res.report_error = res.report_error || function(err) {
|
||||
calls++
|
||||
if (err.status && err.status >= 400 && err.status < 600) {
|
||||
if (calls == 1) {
|
||||
res.status(err.status)
|
||||
res.send({error: err.msg || err.message || 'unknown error'})
|
||||
}
|
||||
} else {
|
||||
Logger.logger.error({err: err}, 'unexpected error: @{!err.message}\n@{err.stack}')
|
||||
if (!res.status || !res.send) {
|
||||
Logger.logger.error('this is an error in express.js, please report this')
|
||||
res.destroy()
|
||||
}
|
||||
if (calls == 1) {
|
||||
res.status(500)
|
||||
res.send({error: 'internal server error'})
|
||||
}
|
||||
}
|
||||
}
|
||||
next()
|
||||
}
|
||||
const error_reporting_middleware = function(req, res, next) {
|
||||
res.report_error = res.report_error || function(err) {
|
||||
if (err.status && err.status >= 400 && err.status < 600) {
|
||||
if (!res.headersSent) {
|
||||
res.status(err.status);
|
||||
next({error: err.message || 'unknown error'});
|
||||
}
|
||||
} else {
|
||||
Logger.logger.error( {err: err}
|
||||
, 'unexpected error: @{!err.message}\n@{err.stack}');
|
||||
if (!res.status || !res.send) {
|
||||
Logger.logger.error('this is an error in express.js, please report this');
|
||||
res.destroy();
|
||||
} else if (!res.headersSent) {
|
||||
res.status(500);
|
||||
next({error: 'internal server error'});
|
||||
} else {
|
||||
// socket should be already closed
|
||||
}
|
||||
}
|
||||
};
|
||||
next();
|
||||
};
|
||||
|
||||
app.use(error_reporting_middleware)
|
||||
app.use(Middleware.log_and_etagify)
|
||||
app.use(function(req, res, next) {
|
||||
res.setHeader('X-Powered-By', 'Sinopia')
|
||||
next()
|
||||
})
|
||||
app.use(Cats.middleware)
|
||||
app.use(basic_auth(function(user, pass) {
|
||||
return config.authenticate(user, pass)
|
||||
}))
|
||||
app.use(express.json({strict: false}))
|
||||
app.use(Middleware.log);
|
||||
app.use(error_reporting_middleware);
|
||||
app.use(function(req, res, next) {
|
||||
res.setHeader('X-Powered-By', config.user_agent);
|
||||
next();
|
||||
});
|
||||
app.use(Cats.middleware);
|
||||
app.use(compression());
|
||||
|
||||
// TODO: npm DO NOT support compression :(
|
||||
app.use(express.compress())
|
||||
app.get('/favicon.ico', function(req, res, next) {
|
||||
req.url = '/-/static/favicon.png';
|
||||
next();
|
||||
});
|
||||
|
||||
app.param('package', validate_name)
|
||||
app.param('filename', validate_name)
|
||||
// hook for tests only
|
||||
if (config._debug) {
|
||||
app.get('/-/_debug', function(req, res, next) {
|
||||
let do_gc = typeof(global.gc) !== 'undefined';
|
||||
if (do_gc) {
|
||||
global.gc();
|
||||
}
|
||||
next({
|
||||
pid: process.pid,
|
||||
main: process.mainModule.filename,
|
||||
conf: config.self_path,
|
||||
mem: process.memoryUsage(),
|
||||
gc: do_gc,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/* app.get('/', function(req, res) {
|
||||
res.send({
|
||||
error: 'unimplemented'
|
||||
})
|
||||
})*/
|
||||
app.use(require('./index-api')(config, auth, storage));
|
||||
|
||||
/* app.get('/-/all', function(req, res) {
|
||||
var https = require('https')
|
||||
var JSONStream = require('JSONStream')
|
||||
var request = require('request')({
|
||||
url: 'https://registry.npmjs.org/-/all',
|
||||
ca: require('./npmsslkeys'),
|
||||
})
|
||||
.pipe(JSONStream.parse('*'))
|
||||
.on('data', function(d) {
|
||||
console.log(d)
|
||||
})
|
||||
})*/
|
||||
if (config.web && config.web.enable === false) {
|
||||
app.get('/', function(req, res, next) {
|
||||
next( Error[404]('web interface is disabled in the config file') );
|
||||
});
|
||||
} else {
|
||||
app.use(require('./index-web')(config, auth, storage));
|
||||
}
|
||||
|
||||
// TODO: anonymous user?
|
||||
app.get('/:package/:version?', can('access'), function(req, res, next) {
|
||||
storage.get_package(req.params.package, function(err, info) {
|
||||
if (err) return next(err)
|
||||
info = utils.filter_tarball_urls(info, req, config)
|
||||
app.get('/*', function(req, res, next) {
|
||||
next( Error[404]('file not found') );
|
||||
});
|
||||
|
||||
var version = req.params.version
|
||||
if (!version) {
|
||||
return res.send(info)
|
||||
}
|
||||
app.use(function(err, req, res, next) {
|
||||
if (Object.prototype.toString.call(err) !== '[object Error]') {
|
||||
return next(err);
|
||||
}
|
||||
if (err.code === 'ECONNABORT' && res.statusCode === 304) {
|
||||
return next();
|
||||
}
|
||||
if (typeof(res.report_error) !== 'function') {
|
||||
// in case of very early error this middleware may not be loaded before error is generated
|
||||
// fixing that
|
||||
error_reporting_middleware(req, res, function() {});
|
||||
}
|
||||
res.report_error(err);
|
||||
});
|
||||
|
||||
if (info.versions[version] != null) {
|
||||
return res.send(info.versions[version])
|
||||
}
|
||||
app.use(Middleware.final);
|
||||
|
||||
if (info['dist-tags'] != null) {
|
||||
if (info['dist-tags'][version] != null) {
|
||||
version = info['dist-tags'][version]
|
||||
if (info.versions[version] != null) {
|
||||
return res.send(info.versions[version])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return next(new UError({
|
||||
status: 404,
|
||||
msg: 'version not found: ' + req.params.version
|
||||
}))
|
||||
})
|
||||
})
|
||||
|
||||
app.get('/:package/-/:filename', can('access'), function(req, res, next) {
|
||||
var stream = storage.get_tarball(req.params.package, req.params.filename)
|
||||
stream.on('error', function(err) {
|
||||
return res.report_error(err)
|
||||
})
|
||||
res.header('content-type', 'application/octet-stream')
|
||||
stream.pipe(res)
|
||||
})
|
||||
|
||||
//app.get('/*', function(req, res) {
|
||||
// proxy.request(req, res)
|
||||
//})
|
||||
|
||||
// placeholder 'cause npm require to be authenticated to publish
|
||||
// we do not do any real authentication yet
|
||||
app.post('/_session', cookies.express(), function(req, res) {
|
||||
res.cookies.set('AuthSession', String(Math.random()), {
|
||||
// npmjs.org sets 10h expire
|
||||
expires: new Date(Date.now() + 10*60*60*1000)
|
||||
})
|
||||
res.send({"ok":true,"name":"somebody","roles":[]})
|
||||
})
|
||||
|
||||
app.get('/-/user/:argument', function(req, res, next) {
|
||||
// can't put 'org.couchdb.user' in route address for some reason
|
||||
if (req.params.argument.split(':')[0] !== 'org.couchdb.user') return next('route')
|
||||
res.status(200)
|
||||
return res.send({
|
||||
ok: 'you are authenticated as "' + req.user + '"',
|
||||
})
|
||||
})
|
||||
|
||||
app.put('/-/user/:argument', function(req, res, next) {
|
||||
// can't put 'org.couchdb.user' in route address for some reason
|
||||
if (req.params.argument.split(':')[0] !== 'org.couchdb.user') return next('route')
|
||||
res.status(409)
|
||||
return res.send({
|
||||
error: 'registration is not implemented',
|
||||
})
|
||||
})
|
||||
|
||||
app.put('/-/user/:argument/-rev/*', function(req, res, next) {
|
||||
// can't put 'org.couchdb.user' in route address for some reason
|
||||
if (req.params.argument.split(':')[0] !== 'org.couchdb.user') return next('route')
|
||||
|
||||
if (req.remoteUser == null) {
|
||||
res.status(403)
|
||||
return res.send({
|
||||
error: 'bad username/password, access denied',
|
||||
})
|
||||
}
|
||||
|
||||
res.status(201)
|
||||
return res.send({
|
||||
ok: 'you are authenticated as "' + req.remoteUser + '"',
|
||||
})
|
||||
})
|
||||
|
||||
// publishing a package
|
||||
app.put('/:package/:_rev?/:revision?', can('publish'), media('application/json'), expect_json, function(req, res, next) {
|
||||
if (req.params._rev != null && req.params._rev != '-rev') return next('route')
|
||||
var name = req.params.package
|
||||
|
||||
if (Object.keys(req.body).length == 1 && utils.is_object(req.body.users)) {
|
||||
return next(new UError({
|
||||
// 501 status is more meaningful, but npm doesn't show error message for 5xx
|
||||
status: 404,
|
||||
msg: 'npm star|unstar calls are not implemented',
|
||||
}))
|
||||
}
|
||||
|
||||
try {
|
||||
var metadata = utils.validate_metadata(req.body, name)
|
||||
} catch(err) {
|
||||
return next(new UError({
|
||||
status: 422,
|
||||
msg: 'bad incoming package data',
|
||||
}))
|
||||
}
|
||||
|
||||
if (req.params._rev) {
|
||||
storage.change_package(name, metadata, req.params.revision, function(err) {
|
||||
if (err) return next(err)
|
||||
res.status(201)
|
||||
return res.send({
|
||||
ok: 'package changed'
|
||||
})
|
||||
})
|
||||
} else {
|
||||
storage.add_package(name, metadata, function(err) {
|
||||
if (err) return next(err)
|
||||
res.status(201)
|
||||
return res.send({
|
||||
ok: 'created new package'
|
||||
})
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
// unpublishing an entire package
|
||||
app.delete('/:package/-rev/*', can('publish'), function(req, res, next) {
|
||||
storage.remove_package(req.params.package, function(err) {
|
||||
if (err) return next(err)
|
||||
res.status(201)
|
||||
return res.send({
|
||||
ok: 'package removed'
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// removing a tarball
|
||||
app.delete('/:package/-/:filename/-rev/:revision', can('publish'), function(req, res, next) {
|
||||
storage.remove_tarball(req.params.package, req.params.filename, req.params.revision, function(err) {
|
||||
if (err) return next(err)
|
||||
res.status(201)
|
||||
return res.send({
|
||||
ok: 'tarball removed'
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// uploading package tarball
|
||||
app.put('/:package/-/:filename/*', can('publish'), media('application/octet-stream'), function(req, res, next) {
|
||||
var name = req.params.package
|
||||
|
||||
var stream = storage.add_tarball(name, req.params.filename)
|
||||
req.pipe(stream)
|
||||
|
||||
// checking if end event came before closing
|
||||
var complete = false
|
||||
req.on('end', function() {
|
||||
complete = true
|
||||
stream.done()
|
||||
})
|
||||
req.on('close', function() {
|
||||
if (!complete) {
|
||||
stream.abort()
|
||||
}
|
||||
})
|
||||
|
||||
stream.on('error', function(err) {
|
||||
return res.report_error(err)
|
||||
})
|
||||
stream.on('success', function() {
|
||||
res.status(201)
|
||||
return res.send({
|
||||
ok: 'tarball uploaded successfully'
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// adding a version
|
||||
app.put('/:package/:version/-tag/:tag', can('publish'), media('application/json'), expect_json, function(req, res, next) {
|
||||
var name = req.params.package
|
||||
, version = req.params.version
|
||||
, tag = req.params.tag
|
||||
|
||||
storage.add_version(name, version, req.body, tag, function(err) {
|
||||
if (err) return next(err)
|
||||
res.status(201)
|
||||
return res.send({
|
||||
ok: 'package published'
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
app.use(app.router)
|
||||
app.use(function(err, req, res, next) {
|
||||
if (typeof(res.report_error) !== 'function') {
|
||||
// in case of very early error this middleware may not be loaded before error is generated
|
||||
// fixing that
|
||||
error_reporting_middleware(req, res, function(){})
|
||||
}
|
||||
res.report_error(err)
|
||||
})
|
||||
|
||||
return app
|
||||
}
|
||||
return app;
|
||||
};
|
||||
|
||||
|
||||
253
lib/local-fs.js
253
lib/local-fs.js
@@ -1,253 +0,0 @@
|
||||
var fs = require('fs')
|
||||
, fsExt = require('fs-ext')
|
||||
, Path = require('path')
|
||||
, mystreams = require('./streams')
|
||||
, Logger = require('./logger')
|
||||
, FSError = require('./error').FSError
|
||||
|
||||
function make_directories(dest, cb) {
|
||||
var dir = Path.dirname(dest)
|
||||
if (dir === '.' || dir === '..') return cb()
|
||||
fs.mkdir(dir, function(err) {
|
||||
if (err && err.code === 'ENOENT') {
|
||||
make_directories(dir, function() {
|
||||
fs.mkdir(dir, cb)
|
||||
})
|
||||
} else {
|
||||
cb()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function write(dest, data, cb) {
|
||||
var safe_write = function(cb) {
|
||||
var tmpname = dest + '.tmp' + String(Math.random()).substr(2)
|
||||
fs.writeFile(tmpname, data, function(err) {
|
||||
if (err) return cb(err)
|
||||
return fs.rename(tmpname, dest, cb)
|
||||
})
|
||||
}
|
||||
|
||||
safe_write(function(err) {
|
||||
if (err && err.code === 'ENOENT') {
|
||||
make_directories(dest, function() {
|
||||
safe_write(cb)
|
||||
})
|
||||
} else {
|
||||
cb(err)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function write_stream(name) {
|
||||
var stream = new mystreams.UploadTarballStream()
|
||||
|
||||
var _ended = 0
|
||||
stream.on('end', function() {
|
||||
_ended = 1
|
||||
})
|
||||
|
||||
fs.exists(name, function(exists) {
|
||||
if (exists) return stream.emit('error', new FSError('EEXISTS'))
|
||||
|
||||
var tmpname = name + '.tmp-'+String(Math.random()).replace(/^0\./, '')
|
||||
, file = fs.createWriteStream(tmpname)
|
||||
, opened = false
|
||||
stream.pipe(file)
|
||||
|
||||
stream.done = function() {
|
||||
function onend() {
|
||||
file.on('close', function() {
|
||||
fs.rename(tmpname, name, function(err) {
|
||||
if (err) stream.emit('error', err)
|
||||
stream.emit('success')
|
||||
})
|
||||
})
|
||||
file.destroySoon()
|
||||
}
|
||||
if (_ended) {
|
||||
onend()
|
||||
} else {
|
||||
stream.on('end', onend)
|
||||
}
|
||||
}
|
||||
stream.abort = function() {
|
||||
if (opened) {
|
||||
opened = false
|
||||
file.on('close', function() {
|
||||
fs.unlink(tmpname, function(){})
|
||||
})
|
||||
}
|
||||
file.destroySoon()
|
||||
}
|
||||
file.on('open', function() {
|
||||
opened = true
|
||||
// re-emitting open because it's handled in storage.js
|
||||
stream.emit('open')
|
||||
})
|
||||
file.on('error', function(err) {
|
||||
stream.emit('error', err)
|
||||
})
|
||||
})
|
||||
return stream
|
||||
}
|
||||
|
||||
function read_stream(name, stream, callback) {
|
||||
return fs.createReadStream(name)
|
||||
}
|
||||
|
||||
function create(name, contents, callback) {
|
||||
fs.exists(name, function(exists) {
|
||||
if (exists) return callback(new FSError('EEXISTS'))
|
||||
write(name, contents, callback)
|
||||
})
|
||||
}
|
||||
|
||||
function update(name, contents, callback) {
|
||||
fs.exists(name, function(exists) {
|
||||
if (!exists) return callback(new FSError('ENOENT'))
|
||||
write(name, contents, callback)
|
||||
})
|
||||
}
|
||||
|
||||
function read(name, callback) {
|
||||
fs.readFile(name, callback)
|
||||
}
|
||||
|
||||
// open and flock with exponential backoff
|
||||
function open_flock(name, opmod, flmod, tries, backoff, cb) {
|
||||
fs.open(name, opmod, function(err, fd) {
|
||||
if (err) return cb(err, fd)
|
||||
|
||||
fsExt.flock(fd, flmod, function(err) {
|
||||
if (err) {
|
||||
if (!tries) {
|
||||
fs.close(fd, function() {
|
||||
cb(err)
|
||||
})
|
||||
} else {
|
||||
fs.close(fd, function() {
|
||||
setTimeout(function() {
|
||||
open_flock(name, opmod, flmod, tries-1, backoff*2, cb)
|
||||
}, backoff)
|
||||
})
|
||||
}
|
||||
} else {
|
||||
cb(null, fd)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// this function neither unlocks file nor closes it
|
||||
// it'll have to be done manually later
|
||||
function lock_and_read(name, callback) {
|
||||
open_flock(name, 'r', 'exnb', 4, 10, function(err, fd) {
|
||||
if (err) return callback(err, fd)
|
||||
|
||||
fs.fstat(fd, function(err, st) {
|
||||
if (err) return callback(err, fd)
|
||||
|
||||
var buffer = new Buffer(st.size)
|
||||
fs.read(fd, buffer, 0, st.size, null, function(err, bytesRead, buffer) {
|
||||
if (bytesRead != st.size) return callback(new Error('st.size != bytesRead'), fd)
|
||||
|
||||
callback(null, fd, buffer)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function Storage(path) {
|
||||
this.path = path
|
||||
this.logger = Logger.logger.child({sub: 'fs'})
|
||||
try {
|
||||
fs.mkdirSync(path)
|
||||
this.logger.warn({path: path}, 'created new packages directory: @{path}')
|
||||
} catch(err) {
|
||||
if (err.code !== 'EEXIST') throw new Error(err)
|
||||
}
|
||||
}
|
||||
|
||||
Storage.prototype.read = function(name, cb) {
|
||||
read(this.path + '/' + name, cb)
|
||||
}
|
||||
|
||||
Storage.prototype.read_json = function(name, cb) {
|
||||
read(this.path + '/' + name, function(err, res) {
|
||||
if (err) return cb(err)
|
||||
|
||||
var args = []
|
||||
try {
|
||||
args = [null, JSON.parse(res.toString('utf8'))]
|
||||
} catch(err) {
|
||||
args = [err]
|
||||
}
|
||||
cb.apply(null, args)
|
||||
})
|
||||
}
|
||||
|
||||
Storage.prototype.lock_and_read = function(name, cb) {
|
||||
lock_and_read(this.path + '/' + name, cb)
|
||||
}
|
||||
|
||||
Storage.prototype.lock_and_read_json = function(name, cb) {
|
||||
lock_and_read(this.path + '/' + name, function(err, fd, res) {
|
||||
if (err) return cb(err, fd)
|
||||
|
||||
var args = []
|
||||
try {
|
||||
args = [null, fd, JSON.parse(res.toString('utf8'))]
|
||||
} catch(err) {
|
||||
args = [err, fd]
|
||||
}
|
||||
cb.apply(null, args)
|
||||
})
|
||||
}
|
||||
|
||||
Storage.prototype.path_to = function(file) {
|
||||
return this.path + '/' + file
|
||||
}
|
||||
|
||||
Storage.prototype.create = function(name, value, cb) {
|
||||
create(this.path + '/' + name, value, cb)
|
||||
}
|
||||
|
||||
Storage.prototype.create_json = function(name, value, cb) {
|
||||
create(this.path + '/' + name, JSON.stringify(value, null, '\t'), cb)
|
||||
}
|
||||
|
||||
Storage.prototype.update = function(name, value, cb) {
|
||||
update(this.path + '/' + name, value, cb)
|
||||
}
|
||||
|
||||
Storage.prototype.update_json = function(name, value, cb) {
|
||||
update(this.path + '/' + name, JSON.stringify(value, null, '\t'), cb)
|
||||
}
|
||||
|
||||
Storage.prototype.write = function(name, value, cb) {
|
||||
write(this.path + '/' + name, value, cb)
|
||||
}
|
||||
|
||||
Storage.prototype.write_json = function(name, value, cb) {
|
||||
write(this.path + '/' + name, JSON.stringify(value, null, '\t'), cb)
|
||||
}
|
||||
|
||||
Storage.prototype.write_stream = function(name, value, cb) {
|
||||
return write_stream(this.path + '/' + name, value, cb)
|
||||
}
|
||||
|
||||
Storage.prototype.read_stream = function(name, cb) {
|
||||
return read_stream(this.path + '/' + name, cb)
|
||||
}
|
||||
|
||||
Storage.prototype.unlink = function(name, cb) {
|
||||
fs.unlink(this.path + '/' + name, cb)
|
||||
}
|
||||
|
||||
Storage.prototype.rmdir = function(name, cb) {
|
||||
fs.rmdir(this.path + '/' + name, cb)
|
||||
}
|
||||
|
||||
module.exports = Storage
|
||||
|
||||
@@ -1,479 +0,0 @@
|
||||
var fs = require('fs')
|
||||
, semver = require('semver')
|
||||
, Path = require('path')
|
||||
, crypto = require('crypto')
|
||||
, fs_storage = require('./local-fs')
|
||||
, UError = require('./error').UserError
|
||||
, utils = require('./utils')
|
||||
, mystreams = require('./streams')
|
||||
, Logger = require('./logger')
|
||||
, info_file = 'package.json'
|
||||
|
||||
//
|
||||
// Implements Storage interface
|
||||
// (same for storage.js, local-storage.js, up-storage.js)
|
||||
//
|
||||
function Storage(config) {
|
||||
if (!(this instanceof Storage)) return new Storage(config)
|
||||
this.config = config
|
||||
var path = Path.resolve(Path.dirname(this.config.self_path), this.config.storage)
|
||||
this.storage = new fs_storage(path)
|
||||
this.logger = Logger.logger.child({sub: 'fs'})
|
||||
return this
|
||||
}
|
||||
|
||||
// returns the minimal package file
|
||||
function get_boilerplate(name) {
|
||||
return {
|
||||
// standard things
|
||||
name: name,
|
||||
versions: {},
|
||||
'dist-tags': {},
|
||||
|
||||
// our own object
|
||||
'_distfiles': {},
|
||||
'_attachments': {},
|
||||
'_uplinks': {},
|
||||
}
|
||||
}
|
||||
|
||||
Storage.prototype._internal_error = function(err, file, msg) {
|
||||
this.logger.error( {err: err, file: this.storage.path_to(file)}
|
||||
, msg + ' @{file}: @{!err.message}'
|
||||
)
|
||||
return new UError({
|
||||
status: 500,
|
||||
msg: 'internal server error'
|
||||
})
|
||||
}
|
||||
|
||||
Storage.prototype.add_package = function(name, metadata, callback) {
|
||||
this.storage.create_json(name + '/' + info_file, get_boilerplate(name), function(err) {
|
||||
if (err && err.code === 'EEXISTS') {
|
||||
return callback(new UError({
|
||||
status: 409,
|
||||
msg: 'this package is already present'
|
||||
}))
|
||||
}
|
||||
callback()
|
||||
})
|
||||
}
|
||||
|
||||
Storage.prototype.remove_package = function(name, callback) {
|
||||
var self = this
|
||||
self.logger.info({name: name}, 'unpublishing @{name} (all)')
|
||||
self.storage.read_json(name + '/' + info_file, function(err, data) {
|
||||
if (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return callback(new UError({
|
||||
status: 404,
|
||||
msg: 'no such package available',
|
||||
}))
|
||||
} else {
|
||||
return callback(err)
|
||||
}
|
||||
}
|
||||
self._normalize_package(data)
|
||||
|
||||
self.storage.unlink(name + '/' + info_file, function(err) {
|
||||
if (err) return callback(err)
|
||||
|
||||
var files = Object.keys(data._attachments)
|
||||
|
||||
function unlinkNext(cb) {
|
||||
if (files.length === 0) return cb()
|
||||
|
||||
var file = files.shift()
|
||||
self.storage.unlink(name + '/' + file, function() {
|
||||
unlinkNext(cb)
|
||||
})
|
||||
}
|
||||
|
||||
unlinkNext(function() {
|
||||
// try to unlink the directory, but ignore errors because it can fail
|
||||
self.storage.rmdir(name, function(err) {
|
||||
callback()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
Storage.prototype._read_create_package = function(name, callback) {
|
||||
var self = this
|
||||
, file = name + '/' + info_file
|
||||
self.storage.read_json(file, function(err, data) {
|
||||
// TODO: race condition
|
||||
if (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
// if package doesn't exist, we create it here
|
||||
data = get_boilerplate(name)
|
||||
} else {
|
||||
return callback(self._internal_error(err, file, 'error reading'))
|
||||
}
|
||||
}
|
||||
self._normalize_package(data)
|
||||
callback(null, data)
|
||||
})
|
||||
}
|
||||
|
||||
// synchronize remote package info with the local one
|
||||
// TODO: readfile called twice
|
||||
Storage.prototype.update_versions = function(name, newdata, callback) {
|
||||
var self = this
|
||||
self._read_create_package(name, function(err, data) {
|
||||
if (err) return callback(err)
|
||||
|
||||
var change = false
|
||||
for (var ver in newdata.versions) {
|
||||
if (data.versions[ver] == null) {
|
||||
var verdata = newdata.versions[ver]
|
||||
|
||||
// why does anyone need to keep that in database?
|
||||
delete verdata.readme
|
||||
|
||||
change = true
|
||||
data.versions[ver] = verdata
|
||||
|
||||
if (verdata.dist && verdata.dist.tarball) {
|
||||
var url = utils.parse_tarball_url(
|
||||
verdata.dist.__sinopia_orig_tarball || verdata.dist.tarball
|
||||
)
|
||||
|
||||
// we do NOT overwrite any existing records
|
||||
if (url != null && data._distfiles[url.filename] == null) {
|
||||
data._distfiles[url.filename] = {
|
||||
url: verdata.dist.__sinopia_orig_tarball || verdata.dist.tarball,
|
||||
sha: verdata.dist.shasum,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (var tag in newdata['dist-tags']) {
|
||||
if (!Array.isArray(data['dist-tags'][tag]) || data['dist-tags'][tag].length != newdata['dist-tags'][tag].length) {
|
||||
// backward compat
|
||||
var need_change = true
|
||||
} else {
|
||||
for (var i=0; i<data['dist-tags'][tag].length; i++) {
|
||||
if (data['dist-tags'][tag][i] != newdata['dist-tags'][tag][i]) {
|
||||
var need_change = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (need_change) {
|
||||
change = true
|
||||
data['dist-tags'][tag] = newdata['dist-tags'][tag]
|
||||
}
|
||||
}
|
||||
for (var up in newdata._uplinks) {
|
||||
var need_change =
|
||||
!utils.is_object(data._uplinks[up]) || (newdata._uplinks[up].etag !== data._uplinks[up].etag)
|
||||
|
||||
if (need_change) {
|
||||
change = true
|
||||
data._uplinks[up] = newdata._uplinks[up]
|
||||
}
|
||||
}
|
||||
|
||||
if (change) {
|
||||
self.logger.debug('updating package info')
|
||||
self._write_package(name, data, callback)
|
||||
} else {
|
||||
callback()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
Storage.prototype.add_version = function(name, version, metadata, tag, callback) {
|
||||
var self = this
|
||||
self.update_package(name, function updater(data, cb) {
|
||||
// why does anyone need to keep that in database?
|
||||
delete metadata.readme
|
||||
|
||||
if (data.versions[version] != null) {
|
||||
return cb(new UError({
|
||||
status: 409,
|
||||
msg: 'this version already present'
|
||||
}))
|
||||
}
|
||||
|
||||
// if uploaded tarball has a different shasum, it's very likely that we have some kind of error
|
||||
if (utils.is_object(metadata.dist) && typeof(metadata.dist.tarball) === 'string') {
|
||||
var tarball = metadata.dist.tarball.replace(/.*\//, '')
|
||||
if (utils.is_object(data._attachments[tarball])) {
|
||||
if (data._attachments[tarball].shasum != null && metadata.dist.shasum != null) {
|
||||
if (data._attachments[tarball].shasum != metadata.dist.shasum) {
|
||||
return cb(new UError({
|
||||
status: 400,
|
||||
msg: 'shasum error, ' + data._attachments[tarball].shasum + ' != ' + metadata.dist.shasum,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
data._attachments[tarball].version = version
|
||||
}
|
||||
}
|
||||
|
||||
data.versions[version] = metadata
|
||||
data['dist-tags'][tag] = version
|
||||
cb()
|
||||
}, callback)
|
||||
}
|
||||
|
||||
// currently supports unpublishing only
|
||||
Storage.prototype.change_package = function(name, metadata, revision, callback) {
|
||||
var self = this
|
||||
|
||||
if (!utils.is_object(metadata.versions) || !utils.is_object(metadata['dist-tags'])) {
|
||||
return callback(new UError({
|
||||
status: 422,
|
||||
msg: 'bad data',
|
||||
}))
|
||||
}
|
||||
|
||||
self.update_package(name, function updater(data, cb) {
|
||||
for (var ver in data.versions) {
|
||||
if (metadata.versions[ver] == null) {
|
||||
self.logger.info({name: name, version: ver}, 'unpublishing @{name}@@{version}')
|
||||
delete data.versions[ver]
|
||||
|
||||
for (var file in data._attachments) {
|
||||
if (data._attachments[file].version === ver) {
|
||||
delete data._attachments[file].version
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
data['dist-tags'] = metadata['dist-tags']
|
||||
cb()
|
||||
}, function(err) {
|
||||
if (err) return callback(err)
|
||||
callback()
|
||||
})
|
||||
}
|
||||
|
||||
Storage.prototype.remove_tarball = function(name, filename, revision, callback) {
|
||||
var self = this
|
||||
|
||||
self.update_package(name, function updater(data, cb) {
|
||||
if (data._attachments[filename]) {
|
||||
delete data._attachments[filename]
|
||||
cb()
|
||||
} else {
|
||||
cb(new UError({
|
||||
status: 404,
|
||||
msg: 'no such file available',
|
||||
}))
|
||||
}
|
||||
}, function(err) {
|
||||
if (err) return callback(err)
|
||||
self.storage.unlink(name + '/' + filename, callback)
|
||||
})
|
||||
}
|
||||
|
||||
Storage.prototype.add_tarball = function(name, filename) {
|
||||
var stream = new mystreams.UploadTarballStream()
|
||||
, _transform = stream._transform
|
||||
, length = 0
|
||||
, shasum = crypto.createHash('sha1')
|
||||
|
||||
stream._transform = function(data) {
|
||||
shasum.update(data)
|
||||
length += data.length
|
||||
_transform.apply(stream, arguments)
|
||||
}
|
||||
|
||||
var self = this
|
||||
if (name === info_file || name === '__proto__') {
|
||||
stream.emit('error', new UError({
|
||||
status: 403,
|
||||
msg: 'can\'t use this filename'
|
||||
}))
|
||||
}
|
||||
|
||||
var wstream = this.storage.write_stream(name + '/' + filename)
|
||||
|
||||
wstream.on('error', function(err) {
|
||||
if (err.code === 'EEXISTS') {
|
||||
stream.emit('error', new UError({
|
||||
status: 409,
|
||||
msg: 'this tarball is already present'
|
||||
}))
|
||||
} else if (err.code === 'ENOENT') {
|
||||
// check if package exists to throw an appropriate message
|
||||
self.get_package(name, function(_err, res) {
|
||||
if (_err) {
|
||||
stream.emit('error', _err)
|
||||
} else {
|
||||
stream.emit('error', err)
|
||||
}
|
||||
})
|
||||
} else {
|
||||
stream.emit('error', err)
|
||||
}
|
||||
})
|
||||
|
||||
wstream.on('open', function() {
|
||||
// re-emitting open because it's handled in storage.js
|
||||
stream.emit('open')
|
||||
})
|
||||
wstream.on('success', function() {
|
||||
self.update_package(name, function updater(data, cb) {
|
||||
data._attachments[filename] = {
|
||||
shasum: shasum.digest('hex'),
|
||||
}
|
||||
cb()
|
||||
}, function(err) {
|
||||
if (err) {
|
||||
stream.emit('error', err)
|
||||
} else {
|
||||
stream.emit('success')
|
||||
}
|
||||
})
|
||||
})
|
||||
stream.abort = function() {
|
||||
wstream.abort()
|
||||
}
|
||||
stream.done = function() {
|
||||
if (!length) {
|
||||
stream.emit('error', new UError({
|
||||
status: 422,
|
||||
msg: 'refusing to accept zero-length file'
|
||||
}))
|
||||
wstream.abort()
|
||||
} else {
|
||||
wstream.done()
|
||||
}
|
||||
}
|
||||
stream.pipe(wstream)
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
Storage.prototype.get_tarball = function(name, filename, callback) {
|
||||
var stream = new mystreams.ReadTarballStream()
|
||||
stream.abort = function() {
|
||||
rstream.close()
|
||||
}
|
||||
|
||||
var rstream = this.storage.read_stream(name + '/' + filename)
|
||||
rstream.on('error', function(err) {
|
||||
if (err && err.code === 'ENOENT') {
|
||||
stream.emit('error', new UError({
|
||||
status: 404,
|
||||
msg: 'no such file available',
|
||||
}))
|
||||
} else {
|
||||
stream.emit('error', err)
|
||||
}
|
||||
})
|
||||
rstream.on('open', function() {
|
||||
// re-emitting open because it's handled in storage.js
|
||||
stream.emit('open')
|
||||
rstream.pipe(stream)
|
||||
})
|
||||
return stream
|
||||
}
|
||||
|
||||
Storage.prototype.get_package = function(name, callback) {
|
||||
var self = this
|
||||
, file = name + '/' + info_file
|
||||
|
||||
self.storage.read_json(file, function(err, result) {
|
||||
if (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return callback(new UError({
|
||||
status: 404,
|
||||
msg: 'no such package available'
|
||||
}))
|
||||
} else {
|
||||
return callback(self._internal_error(err, file, 'error reading'))
|
||||
}
|
||||
}
|
||||
self._normalize_package(result)
|
||||
callback(err, result)
|
||||
})
|
||||
}
|
||||
|
||||
//
|
||||
// This function allows to update the package thread-safely
|
||||
//
|
||||
// Arguments:
|
||||
// - name - package name
|
||||
// - updateFn - function(package, cb) - update function
|
||||
// - callback - callback that gets invoked after it's all updated
|
||||
//
|
||||
// Algorithm:
|
||||
// 1. lock package.json for writing
|
||||
// 2. read package.json
|
||||
// 3. updateFn(pkg, cb), and wait for cb
|
||||
// 4. write package.json.tmp
|
||||
// 5. move package.json.tmp package.json
|
||||
// 6. callback(err?)
|
||||
//
|
||||
Storage.prototype.update_package = function(name, updateFn, _callback) {
|
||||
var self = this
|
||||
, file = name + '/' + info_file
|
||||
self.storage.lock_and_read_json(file, function(err, fd, json) {
|
||||
self.logger.debug({file: file}, 'locking @{file}')
|
||||
|
||||
function callback() {
|
||||
self.logger.debug({file: file}, 'unlocking @{file}')
|
||||
var _args = arguments
|
||||
if (fd) {
|
||||
fs.close(fd, function(err) {
|
||||
if (err) return _callback(err)
|
||||
_callback.apply(null, _args)
|
||||
})
|
||||
} else {
|
||||
_callback.apply(null, _args)
|
||||
}
|
||||
}
|
||||
|
||||
if (err) {
|
||||
if (err.code === 'EAGAIN') {
|
||||
return callback(new UError({
|
||||
status: 503,
|
||||
msg: 'resource temporarily unavailable'
|
||||
}))
|
||||
} else if (err.code === 'ENOENT') {
|
||||
return callback(new UError({
|
||||
status: 404,
|
||||
msg: 'no such package available',
|
||||
}))
|
||||
} else {
|
||||
return callback(err)
|
||||
}
|
||||
}
|
||||
|
||||
self._normalize_package(json)
|
||||
updateFn(json, function(err) {
|
||||
if (err) return callback(err)
|
||||
|
||||
self._write_package(name, json, callback)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
Storage.prototype._normalize_package = function(pkg) {
|
||||
;['versions', 'dist-tags', '_distfiles', '_attachments', '_uplinks'].forEach(function(key) {
|
||||
if (!utils.is_object(pkg[key])) pkg[key] = {}
|
||||
})
|
||||
if (typeof(pkg._rev) !== 'string') pkg._rev = '0-0000000000000000'
|
||||
}
|
||||
|
||||
Storage.prototype._write_package = function(name, json, callback) {
|
||||
|
||||
// calculate revision a la couchdb
|
||||
if (typeof(json._rev) !== 'string') json._rev = '0-0000000000000000'
|
||||
var rev = json._rev.split('-')
|
||||
json._rev = ((+rev[0] || 0) + 1) + '-' + crypto.pseudoRandomBytes(8).toString('hex')
|
||||
|
||||
this.storage.write_json(name + '/' + info_file, json, callback)
|
||||
}
|
||||
|
||||
module.exports = Storage
|
||||
|
||||
221
lib/logger.js
221
lib/logger.js
@@ -1,78 +1,99 @@
|
||||
var Logger = require('bunyan')
|
||||
, Stream = require('stream')
|
||||
, utils = require('./utils')
|
||||
'use strict';
|
||||
|
||||
const Logger = require('bunyan');
|
||||
const Error = require('http-errors');
|
||||
const Stream = require('stream');
|
||||
const chalk = require('chalk');
|
||||
const Utils = require('./utils');
|
||||
const pkgJSON = require('../package.json');
|
||||
|
||||
/**
|
||||
* Match the level based on buyan severity scale
|
||||
* @param {*} x severity level
|
||||
* @return {String} security level
|
||||
*/
|
||||
function getlvl(x) {
|
||||
if (x < 15) {
|
||||
return 'trace'
|
||||
} else if (x < 25) {
|
||||
return 'debug'
|
||||
} else if (x < 35) {
|
||||
return 'info'
|
||||
} else if (x == 35) {
|
||||
return 'http'
|
||||
} else if (x < 45) {
|
||||
return 'warn'
|
||||
} else if (x < 55) {
|
||||
return 'error'
|
||||
} else {
|
||||
return 'fatal'
|
||||
switch(true) {
|
||||
case x < 15 : return 'trace';
|
||||
case x < 25 : return 'debug';
|
||||
case x < 35 : return 'info';
|
||||
case x == 35 : return 'http';
|
||||
case x < 45 : return 'warn';
|
||||
case x < 55 : return 'error';
|
||||
default : return 'fatal';
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.setup = function(logs) {
|
||||
var streams = []
|
||||
if (!logs) logs = [{ type: 'stdout', format: 'pretty', level: 'http' }]
|
||||
/**
|
||||
* Setup the Buyan logger
|
||||
* @param {*} logs list of log configuration
|
||||
*/
|
||||
function setup(logs) {
|
||||
let streams = [];
|
||||
if (logs == null) {
|
||||
logs = [{type: 'stdout', format: 'pretty', level: 'http'}];
|
||||
}
|
||||
|
||||
logs.forEach(function(target) {
|
||||
var stream = new Stream()
|
||||
stream.writable = true
|
||||
// create a stream for each log configuration
|
||||
const stream = new Stream();
|
||||
stream.writable = true;
|
||||
|
||||
if (target.type === 'stdout' || target.type === 'stderr') {
|
||||
// destination stream
|
||||
var dest = target.type === 'stdout' ? process.stdout : process.stderr
|
||||
const dest = target.type === 'stdout' ? process.stdout : process.stderr;
|
||||
|
||||
if (target.format === 'pretty') {
|
||||
// making fake stream for prettypritting
|
||||
stream.write = function(obj) {
|
||||
dest.write(print(obj.level, obj.msg, obj, dest.isTTY) + "\n")
|
||||
}
|
||||
dest.write(print(obj.level, obj.msg, obj, dest.isTTY) + '\n');
|
||||
};
|
||||
} else if (target.format === 'pretty-timestamped') {
|
||||
// making fake stream for prettypritting
|
||||
stream.write = function(obj) {
|
||||
dest.write(obj.time.toISOString() + print(obj.level, obj.msg, obj, dest.isTTY) + '\n');
|
||||
};
|
||||
} else {
|
||||
stream.write = function(obj) {
|
||||
dest.write(JSON.stringify(obj, Logger.safeCycles()) + "\n")
|
||||
}
|
||||
dest.write(JSON.stringify(obj, Logger.safeCycles()) + '\n');
|
||||
};
|
||||
}
|
||||
} else if (target.type === 'file') {
|
||||
var dest = require('fs').createWriteStream(target.path, {flags: 'a', encoding: 'utf8'})
|
||||
dest.on('error', function (err) {
|
||||
Logger.emit('error', err)
|
||||
})
|
||||
const dest = require('fs').createWriteStream(target.path, {flags: 'a', encoding: 'utf8'});
|
||||
dest.on('error', function(err) {
|
||||
Logger.emit('error', err);
|
||||
});
|
||||
stream.write = function(obj) {
|
||||
dest.write(JSON.stringify(obj, Logger.safeCycles()) + "\n")
|
||||
}
|
||||
if (target.format === 'pretty') {
|
||||
dest.write(print(obj.level, obj.msg, obj, false) + '\n');
|
||||
} else {
|
||||
dest.write(JSON.stringify(obj, Logger.safeCycles()) + '\n');
|
||||
}
|
||||
};
|
||||
} else {
|
||||
throw new Error('wrong target type for a log')
|
||||
throw Error('wrong target type for a log');
|
||||
}
|
||||
|
||||
if (target.level === 'http') target.level = 35
|
||||
if (target.level === 'http') target.level = 35;
|
||||
streams.push({
|
||||
type: "raw",
|
||||
type: 'raw',
|
||||
level: target.level || 35,
|
||||
stream: stream,
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
var logger = new Logger({
|
||||
name: 'sinopia',
|
||||
// buyan default configuration
|
||||
const logger = new Logger({
|
||||
name: pkgJSON.name,
|
||||
streams: streams,
|
||||
serializers: {
|
||||
err: Logger.stdSerializers.err,
|
||||
req: Logger.stdSerializers.req,
|
||||
res: Logger.stdSerializers.res,
|
||||
},
|
||||
})
|
||||
});
|
||||
|
||||
module.exports.logger = logger
|
||||
module.exports.logger = logger;
|
||||
}
|
||||
|
||||
// adopted from socket.io
|
||||
@@ -80,76 +101,96 @@ module.exports.setup = function(logs) {
|
||||
// so it might look weird
|
||||
|
||||
// level to color
|
||||
var levels = {
|
||||
fatal: 31,
|
||||
error: 31,
|
||||
warn: 33,
|
||||
http: 35,
|
||||
info: 36,
|
||||
debug: 90,
|
||||
trace: 90,
|
||||
}
|
||||
|
||||
var max = 0
|
||||
for (var l in levels) {
|
||||
max = Math.max(max, l.length)
|
||||
const levels = {
|
||||
fatal: chalk.red,
|
||||
error: chalk.red,
|
||||
warn: chalk.yellow,
|
||||
http: chalk.magenta,
|
||||
info: chalk.cyan,
|
||||
debug: chalk.black,
|
||||
trace: chalk.white,
|
||||
};
|
||||
|
||||
let max = 0;
|
||||
for (let l in levels) {
|
||||
if (Object.prototype.hasOwnProperty.call(levels, l)) {
|
||||
max = Math.max(max, l.length);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply whitespaces based on the length
|
||||
* @param {*} str the log message
|
||||
* @return {String}
|
||||
*/
|
||||
function pad(str) {
|
||||
if (str.length < max) return str + new Array(max - str.length + 1).join(' ')
|
||||
return str
|
||||
if (str.length < max) {
|
||||
return str + ' '.repeat(max - str.length);
|
||||
}
|
||||
return str;
|
||||
}
|
||||
|
||||
var subsystems = [{
|
||||
in: '\033[32m<--\033[39m',
|
||||
out: '\033[33m-->\033[39m',
|
||||
fs: '\033[90m-=-\033[39m',
|
||||
default: '\033[34m---\033[39m',
|
||||
}, {
|
||||
in: '<--',
|
||||
out: '-->',
|
||||
fs: '-=-',
|
||||
default: '---',
|
||||
}]
|
||||
|
||||
/**
|
||||
* Apply colors to a string based on level parameters.
|
||||
* @param {*} type
|
||||
* @param {*} msg
|
||||
* @param {*} obj
|
||||
* @param {*} colors
|
||||
* @return {String}
|
||||
*/
|
||||
function print(type, msg, obj, colors) {
|
||||
if (typeof type === 'number') type = getlvl(type)
|
||||
var finalmsg = msg.replace(/@{(!?[$A-Za-z_][$0-9A-Za-z\._]*)}/g, function(_, name) {
|
||||
var str = obj, is_error
|
||||
if (typeof type === 'number') {
|
||||
type = getlvl(type);
|
||||
}
|
||||
let finalmsg = msg.replace(/@{(!?[$A-Za-z_][$0-9A-Za-z\._]*)}/g, function(_, name) {
|
||||
let str = obj;
|
||||
let is_error;
|
||||
if (name[0] === '!') {
|
||||
name = name.substr(1)
|
||||
is_error = true
|
||||
name = name.substr(1);
|
||||
is_error = true;
|
||||
}
|
||||
|
||||
var _ref = name.split('.')
|
||||
for (var _i = 0; _i < _ref.length; _i++) {
|
||||
var id = _ref[_i]
|
||||
if (utils.is_object(str) || Array.isArray(str)) {
|
||||
str = str[id]
|
||||
let _ref = name.split('.');
|
||||
for (let _i = 0; _i < _ref.length; _i++) {
|
||||
let id = _ref[_i];
|
||||
if (Utils.is_object(str) || Array.isArray(str)) {
|
||||
str = str[id];
|
||||
} else {
|
||||
str = undefined
|
||||
str = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof(str) === 'string') {
|
||||
if (!colors) {
|
||||
return str
|
||||
if (!colors || str.includes('\n')) {
|
||||
return str;
|
||||
} else if (is_error) {
|
||||
return '\033[31m' + str + '\033[39m'
|
||||
return chalk.red(str);
|
||||
} else {
|
||||
return '\033[32m' + str + '\033[39m'
|
||||
return chalk.green(str);
|
||||
}
|
||||
} else {
|
||||
return require('util').inspect(str, void 0, void 0, colors)
|
||||
return require('util').inspect(str, null, null, colors);
|
||||
}
|
||||
})
|
||||
var sub = subsystems[+!colors][obj.sub] || subsystems[+!colors].default
|
||||
// ^^--- black magic... kidding, just "colors ? 0 : 1"
|
||||
});
|
||||
|
||||
const subsystems = [{
|
||||
in: chalk.green('<--'),
|
||||
out: chalk.yellow('-->'),
|
||||
fs: chalk.black('-=-'),
|
||||
default: chalk.blue('---'),
|
||||
}, {
|
||||
in: '<--',
|
||||
out: '-->',
|
||||
fs: '-=-',
|
||||
default: '---',
|
||||
}];
|
||||
|
||||
const sub = subsystems[colors ? 0 : 1][obj.sub] || subsystems[+!colors].default;
|
||||
if (colors) {
|
||||
return " \033[" + levels[type] + "m" + (pad(type)) + "\033[39m " + sub + " " + finalmsg
|
||||
return ` ${levels[type]((pad(type)))}${chalk.white(`${sub} ${finalmsg}`)}`;
|
||||
} else {
|
||||
return " " + (pad(type)) + " " + sub + " " + finalmsg
|
||||
return ` ${(pad(type))}${sub} ${finalmsg}`;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.setup = setup;
|
||||
|
||||
@@ -1,169 +1,215 @@
|
||||
var crypto = require('crypto')
|
||||
, utils = require('./utils')
|
||||
, UError = require('./error').UserError
|
||||
, Logger = require('./logger')
|
||||
/* eslint prefer-rest-params: "off" */
|
||||
|
||||
'use strict';
|
||||
|
||||
const crypto = require('crypto');
|
||||
const Error = require('http-errors');
|
||||
const utils = require('./utils');
|
||||
const Logger = require('./logger');
|
||||
|
||||
module.exports.match = function match(regexp) {
|
||||
return function(req, res, next, value, name) {
|
||||
if (regexp.exec(value)) {
|
||||
next();
|
||||
} else {
|
||||
next('route');
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
module.exports.validate_name = function validate_name(req, res, next, value, name) {
|
||||
if (utils.validate_name(req.params.package)) {
|
||||
req.params.package = String(req.params.package)
|
||||
next()
|
||||
} else {
|
||||
next(new UError({
|
||||
status: 403,
|
||||
msg: 'invalid package name',
|
||||
}))
|
||||
}
|
||||
}
|
||||
if (value.charAt(0) === '-') {
|
||||
// special case in couchdb usually
|
||||
next('route');
|
||||
} else if (utils.validate_name(value)) {
|
||||
next();
|
||||
} else {
|
||||
next( Error[403]('invalid ' + name) );
|
||||
}
|
||||
};
|
||||
|
||||
module.exports.validate_package = function validate_package(req, res, next, value, name) {
|
||||
if (value.charAt(0) === '-') {
|
||||
// special case in couchdb usually
|
||||
next('route');
|
||||
} else if (utils.validate_package(value)) {
|
||||
next();
|
||||
} else {
|
||||
next( Error[403]('invalid ' + name) );
|
||||
}
|
||||
};
|
||||
|
||||
module.exports.media = function media(expect) {
|
||||
return function(req, res, next) {
|
||||
if (req.headers['content-type'] !== expect) {
|
||||
next(new UError({
|
||||
status: 415,
|
||||
msg: 'wrong content-type, expect: '+expect+', got: '+req.headers['content-type'],
|
||||
}))
|
||||
} else {
|
||||
next()
|
||||
}
|
||||
}
|
||||
}
|
||||
return function(req, res, next) {
|
||||
if (req.headers['content-type'] !== expect) {
|
||||
next( Error[415]('wrong content-type, expect: ' + expect
|
||||
+ ', got: '+req.headers['content-type']) );
|
||||
} else {
|
||||
next();
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
module.exports.expect_json = function expect_json(req, res, next) {
|
||||
if (!utils.is_object(req.body)) {
|
||||
return next({
|
||||
status: 400,
|
||||
msg: 'can\'t parse incoming json',
|
||||
})
|
||||
}
|
||||
next()
|
||||
}
|
||||
if (!utils.is_object(req.body)) {
|
||||
return next( Error[400]('can\'t parse incoming json') );
|
||||
}
|
||||
next();
|
||||
};
|
||||
|
||||
module.exports.basic_auth = function basic_auth(callback) {
|
||||
return function(req, res, _next) {
|
||||
function next(err) {
|
||||
// uncomment this to reject users with bad auth headers
|
||||
//return _next.apply(null, arguments)
|
||||
|
||||
// swallow error, user remains unauthorized
|
||||
return _next()
|
||||
}
|
||||
module.exports.anti_loop = function(config) {
|
||||
return function(req, res, next) {
|
||||
if (req.headers.via != null) {
|
||||
let arr = req.headers.via.split(',');
|
||||
|
||||
var authorization = req.headers.authorization
|
||||
for (let i=0; i<arr.length; i++) {
|
||||
let m = arr[i].match(/\s*(\S+)\s+(\S+)/);
|
||||
if (m && m[2] === config.server_id) {
|
||||
return next( Error[508]('loop detected') );
|
||||
}
|
||||
}
|
||||
}
|
||||
next();
|
||||
};
|
||||
};
|
||||
|
||||
if (req.remoteUser != null) return next()
|
||||
if (authorization == null) return next()
|
||||
|
||||
var parts = authorization.split(' ')
|
||||
|
||||
if (parts.length !== 2) return next({
|
||||
status: 400,
|
||||
msg: 'bad authorization header',
|
||||
})
|
||||
|
||||
var scheme = parts[0]
|
||||
, credentials = new Buffer(parts[1], 'base64').toString()
|
||||
, index = credentials.indexOf(':')
|
||||
|
||||
if ('Basic' != scheme || index < 0) return next({
|
||||
status: 400,
|
||||
msg: 'bad authorization header',
|
||||
})
|
||||
|
||||
var user = credentials.slice(0, index)
|
||||
, pass = credentials.slice(index + 1)
|
||||
|
||||
if (callback(user, pass)) {
|
||||
req.remoteUser = user
|
||||
next()
|
||||
} else {
|
||||
next({
|
||||
status: 403,
|
||||
msg: 'bad username/password, access denied',
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// express doesn't do etags with requests <= 1024b
|
||||
// we use md5 here, it works well on 1k+ bytes, but sucks with fewer data
|
||||
// could improve performance using crc32 after benchmarks
|
||||
/**
|
||||
* Express doesn't do etags with requests <= 1024b
|
||||
* we use md5 here, it works well on 1k+ bytes, but sucks with fewer data
|
||||
* could improve performance using crc32 after benchmarks.
|
||||
* @param {Object} data
|
||||
* @return {String}
|
||||
*/
|
||||
function md5sum(data) {
|
||||
return crypto.createHash('md5').update(data).digest('hex')
|
||||
return crypto.createHash('md5').update(data).digest('hex');
|
||||
}
|
||||
|
||||
module.exports.log_and_etagify = function(req, res, next) {
|
||||
// logger
|
||||
req.log = Logger.logger.child({sub: 'in'})
|
||||
module.exports.allow = function(auth) {
|
||||
return function(action) {
|
||||
return function(req, res, next) {
|
||||
req.pause();
|
||||
auth['allow_'+action](req.params.package, req.remote_user, function(error, allowed) {
|
||||
req.resume();
|
||||
if (error) {
|
||||
next(error);
|
||||
} else if (allowed) {
|
||||
next();
|
||||
} else {
|
||||
// last plugin (that's our built-in one) returns either
|
||||
// cb(err) or cb(null, true), so this should never happen
|
||||
throw Error('bug in the auth plugin system');
|
||||
}
|
||||
});
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
var _auth = req.headers.authorization
|
||||
if (_auth) req.headers.authorization = '<Classified>'
|
||||
req.log.info({req: req, ip: req.ip}, '@{ip} requested \'@{req.method} @{req.url}\'')
|
||||
if (_auth) req.headers.authorization = _auth
|
||||
module.exports.final = function(body, req, res, next) {
|
||||
if (res.statusCode === 401 && !res.getHeader('WWW-Authenticate')) {
|
||||
// they say it's required for 401, so...
|
||||
res.header('WWW-Authenticate', 'Basic, Bearer');
|
||||
}
|
||||
|
||||
var bytesin = 0
|
||||
req.on('data', function(chunk){ bytesin += chunk.length })
|
||||
try {
|
||||
if (typeof(body) === 'string' || typeof(body) === 'object') {
|
||||
if (!res.getHeader('Content-type')) {
|
||||
res.header('Content-type', 'application/json');
|
||||
}
|
||||
|
||||
var _send = res.send
|
||||
res.send = function(body) {
|
||||
if (typeof(body) === 'string' || typeof(body) === 'object') {
|
||||
res.header('Content-type', 'application/json')
|
||||
if (typeof(body) === 'object' && body != null) {
|
||||
if (typeof(body.error) === 'string') {
|
||||
res._verdaccio_error = body.error;
|
||||
}
|
||||
body = JSON.stringify(body, undefined, ' ') + '\n';
|
||||
}
|
||||
|
||||
if (typeof(body) === 'object' && body != null) {
|
||||
if (body.error) {
|
||||
res._sinopia_error = body.error
|
||||
}
|
||||
body = JSON.stringify(body, undefined, '\t')
|
||||
}
|
||||
// don't send etags with errors
|
||||
if (!res.statusCode || (res.statusCode >= 200 && res.statusCode < 300)) {
|
||||
res.header('ETag', '"' + md5sum(body) + '"');
|
||||
}
|
||||
} else {
|
||||
// send(null), send(204), etc.
|
||||
}
|
||||
} catch(err) {
|
||||
// if verdaccio sends headers first, and then calls res.send()
|
||||
// as an error handler, we can't report error properly,
|
||||
// and should just close socket
|
||||
if (err.message.match(/set headers after they are sent/)) {
|
||||
if (res.socket != null) res.socket.destroy();
|
||||
return;
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
// don't send etags with errors
|
||||
if (!res.statusCode || (res.statusCode >= 200 && res.statusCode < 300)) {
|
||||
res.header('ETag', '"' + md5sum(body) + '"')
|
||||
}
|
||||
} else {
|
||||
// send(null), send(204), etc.
|
||||
}
|
||||
res.send(body);
|
||||
};
|
||||
|
||||
res.send = _send
|
||||
res.send(body)
|
||||
}
|
||||
module.exports.log = function(req, res, next) {
|
||||
// logger
|
||||
req.log = Logger.logger.child({sub: 'in'});
|
||||
|
||||
var bytesout = 0
|
||||
, _write = res.write
|
||||
res.write = function(buf) {
|
||||
bytesout += buf.length
|
||||
_write.apply(res, arguments)
|
||||
}
|
||||
let _auth = req.headers.authorization;
|
||||
if (_auth != null) req.headers.authorization = '<Classified>';
|
||||
let _cookie = req.headers.cookie;
|
||||
if (_cookie != null) req.headers.cookie = '<Classified>';
|
||||
|
||||
function log() {
|
||||
var msg = '@{status}, user: @{user}, req: \'@{request.method} @{request.url}\''
|
||||
if (res._sinopia_error) {
|
||||
msg += ', error: @{!error}'
|
||||
} else {
|
||||
msg += ', bytes: @{bytes.in}/@{bytes.out}'
|
||||
}
|
||||
req.log.warn({
|
||||
request: {method: req.method, url: req.url},
|
||||
level: 35, // http
|
||||
user: req.user,
|
||||
status: res.statusCode,
|
||||
error: res._sinopia_error,
|
||||
bytes: {
|
||||
in: bytesin,
|
||||
out: bytesout,
|
||||
}
|
||||
}, msg)
|
||||
}
|
||||
req.url = req.originalUrl;
|
||||
req.log.info( {req: req, ip: req.ip}
|
||||
, '@{ip} requested \'@{req.method} @{req.url}\'' );
|
||||
req.originalUrl = req.url;
|
||||
|
||||
req.on('close', function() {
|
||||
log(true)
|
||||
})
|
||||
if (_auth != null) req.headers.authorization = _auth;
|
||||
if (_cookie != null) req.headers.cookie = _cookie;
|
||||
|
||||
var _end = res.end
|
||||
res.end = function(buf) {
|
||||
if (buf) bytesout += buf.length
|
||||
_end.apply(res, arguments)
|
||||
log()
|
||||
}
|
||||
next()
|
||||
}
|
||||
let bytesin = 0;
|
||||
req.on('data', function(chunk) {
|
||||
bytesin += chunk.length;
|
||||
});
|
||||
|
||||
let bytesout = 0;
|
||||
let _write = res.write;
|
||||
res.write = function(buf) {
|
||||
bytesout += buf.length;
|
||||
_write.apply(res, arguments);
|
||||
};
|
||||
|
||||
const log = function() {
|
||||
let forwardedFor = req.headers['x-forwarded-for'];
|
||||
let remoteAddress = req.connection.remoteAddress;
|
||||
let remoteIP = forwardedFor ? `${forwardedFor} via ${remoteAddress}` : remoteAddress;
|
||||
let message = '@{status}, user: @{user}(@{remoteIP}), req: \'@{request.method} @{request.url}\'';
|
||||
if (res._verdaccio_error) {
|
||||
message += ', error: @{!error}';
|
||||
} else {
|
||||
message += ', bytes: @{bytes.in}/@{bytes.out}';
|
||||
}
|
||||
|
||||
req.url = req.originalUrl;
|
||||
req.log.warn({
|
||||
request: {method: req.method, url: req.url},
|
||||
level: 35, // http
|
||||
user: req.remote_user && req.remote_user.name,
|
||||
remoteIP,
|
||||
status: res.statusCode,
|
||||
error: res._verdaccio_error,
|
||||
bytes: {
|
||||
in: bytesin,
|
||||
out: bytesout,
|
||||
},
|
||||
}, message);
|
||||
req.originalUrl = req.url;
|
||||
};
|
||||
|
||||
req.on('close', function() {
|
||||
log(true);
|
||||
});
|
||||
|
||||
let _end = res.end;
|
||||
res.end = function(buf) {
|
||||
if (buf) bytesout += buf.length;
|
||||
_end.apply(res, arguments);
|
||||
log();
|
||||
};
|
||||
next();
|
||||
};
|
||||
|
||||
81
lib/notify.js
Normal file
81
lib/notify.js
Normal file
@@ -0,0 +1,81 @@
|
||||
'use strict';
|
||||
|
||||
const Handlebars = require('handlebars');
|
||||
const request = require('request');
|
||||
const _ = require('lodash');
|
||||
const logger = require('./logger');
|
||||
|
||||
const handleNotify = function(metadata, notifyEntry) {
|
||||
let regex;
|
||||
if (metadata.name && notifyEntry.packagePattern) {
|
||||
// FUTURE: comment out due https://github.com/verdaccio/verdaccio/pull/108#issuecomment-312421052
|
||||
// regex = new RegExp(notifyEntry.packagePattern, notifyEntry.packagePatternFlags || '');
|
||||
regex = new RegExp(notifyEntry.packagePattern);
|
||||
if (!regex.test(metadata.name)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const template = Handlebars.compile(notifyEntry.content);
|
||||
const content = template( metadata );
|
||||
|
||||
const options = {
|
||||
body: content,
|
||||
};
|
||||
|
||||
// provides fallback support, it's accept an Object {} and Array of {}
|
||||
if (notifyEntry.headers && _.isArray(notifyEntry.headers)) {
|
||||
const header = {};
|
||||
notifyEntry.headers.map(function(item) {
|
||||
if (Object.is(item, item)) {
|
||||
for (const key in item) {
|
||||
if (item.hasOwnProperty(key)) {
|
||||
header[key] = item[key];
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
options.headers = header;
|
||||
} else if (Object.is(notifyEntry.headers, notifyEntry.headers)) {
|
||||
options.headers = notifyEntry.headers;
|
||||
}
|
||||
|
||||
options.method = notifyEntry.method;
|
||||
|
||||
if ( notifyEntry.endpoint ) {
|
||||
options.url = notifyEntry.endpoint;
|
||||
}
|
||||
|
||||
return new Promise(( resolve, reject) => {
|
||||
request(options, function(err, response, body) {
|
||||
if (err || response.statusCode >= 400) {
|
||||
const errorMessage = _.isNil(err) ? response.statusMessage : err;
|
||||
logger.logger.error({err: errorMessage}, ' notify error: @{err.message}' );
|
||||
reject(errorMessage);
|
||||
} else {
|
||||
logger.logger.info({content: content}, 'A notification has been shipped: @{content}');
|
||||
if (body) {
|
||||
logger.logger.debug({body: body}, ' body: @{body}' );
|
||||
}
|
||||
resolve(_.isNil(body) === false ? body : null);
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const notify = function(metadata, config) {
|
||||
if (config.notify) {
|
||||
if (config.notify.content) {
|
||||
return handleNotify(metadata, config.notify);
|
||||
} else {
|
||||
// multiple notifications endpoints PR #108
|
||||
for (const key in config.notify) {
|
||||
if (config.notify.hasOwnProperty(key)) {
|
||||
return handleNotify(metadata, config.notify[key]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
module.exports.notify = notify;
|
||||
@@ -1,62 +0,0 @@
|
||||
//
|
||||
// Get this thingy from `npmconf` package if it ever changes...
|
||||
//
|
||||
|
||||
module.exports = // the npm CA certificate.
|
||||
[ "-----BEGIN CERTIFICATE-----\n"+
|
||||
"MIIChzCCAfACCQDauvz/KHp8ejANBgkqhkiG9w0BAQUFADCBhzELMAkGA1UEBhMC\n"+
|
||||
"VVMxCzAJBgNVBAgTAkNBMRAwDgYDVQQHEwdPYWtsYW5kMQwwCgYDVQQKEwNucG0x\n"+
|
||||
"IjAgBgNVBAsTGW5wbSBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxDjAMBgNVBAMTBW5w\n"+
|
||||
"bUNBMRcwFQYJKoZIhvcNAQkBFghpQGl6cy5tZTAeFw0xMTA5MDUwMTQ3MTdaFw0y\n"+
|
||||
"MTA5MDIwMTQ3MTdaMIGHMQswCQYDVQQGEwJVUzELMAkGA1UECBMCQ0ExEDAOBgNV\n"+
|
||||
"BAcTB09ha2xhbmQxDDAKBgNVBAoTA25wbTEiMCAGA1UECxMZbnBtIENlcnRpZmlj\n"+
|
||||
"YXRlIEF1dGhvcml0eTEOMAwGA1UEAxMFbnBtQ0ExFzAVBgkqhkiG9w0BCQEWCGlA\n"+
|
||||
"aXpzLm1lMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDLI4tIqPpRW+ACw9GE\n"+
|
||||
"OgBlJZwK5f8nnKCLK629Pv5yJpQKs3DENExAyOgDcyaF0HD0zk8zTp+ZsLaNdKOz\n"+
|
||||
"Gn2U181KGprGKAXP6DU6ByOJDWmTlY6+Ad1laYT0m64fERSpHw/hjD3D+iX4aMOl\n"+
|
||||
"y0HdbT5m1ZGh6SJz3ZqxavhHLQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAC4ySDbC\n"+
|
||||
"l7W1WpLmtLGEQ/yuMLUf6Jy/vr+CRp4h+UzL+IQpCv8FfxsYE7dhf/bmWTEupBkv\n"+
|
||||
"yNL18lipt2jSvR3v6oAHAReotvdjqhxddpe5Holns6EQd1/xEZ7sB1YhQKJtvUrl\n"+
|
||||
"ZNufy1Jf1r0ldEGeA+0ISck7s+xSh9rQD2Op\n"+
|
||||
"-----END CERTIFICATE-----\n",
|
||||
|
||||
// "GlobalSign Root CA"
|
||||
"-----BEGIN CERTIFICATE-----\n"+
|
||||
"MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkGA1UEBhMCQkUx\n"+
|
||||
"GTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jvb3QgQ0ExGzAZBgNVBAMTEkds\n"+
|
||||
"b2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAwMDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNV\n"+
|
||||
"BAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYD\n"+
|
||||
"VQQDExJHbG9iYWxTaWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDa\n"+
|
||||
"DuaZjc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavpxy0Sy6sc\n"+
|
||||
"THAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp1Wrjsok6Vjk4bwY8iGlb\n"+
|
||||
"Kk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdGsnUOhugZitVtbNV4FpWi6cgKOOvyJBNP\n"+
|
||||
"c1STE4U6G7weNLWLBYy5d4ux2x8gkasJU26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrX\n"+
|
||||
"gzT/LCrBbBlDSgeF59N89iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV\n"+
|
||||
"HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0BAQUF\n"+
|
||||
"AAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOzyj1hTdNGCbM+w6Dj\n"+
|
||||
"Y1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE38NflNUVyRRBnMRddWQVDf9VMOyG\n"+
|
||||
"j/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymPAbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhH\n"+
|
||||
"hm4qxFYxldBniYUr+WymXUadDKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveC\n"+
|
||||
"X4XSQRjbgbMEHMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==\n"+
|
||||
"-----END CERTIFICATE-----\n",
|
||||
|
||||
// "GlobalSign Root CA - R2"
|
||||
"-----BEGIN CERTIFICATE-----\n"+
|
||||
"MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4GA1UECxMXR2xv\n"+
|
||||
"YmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2Jh\n"+
|
||||
"bFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxT\n"+
|
||||
"aWduIFJvb3QgQ0EgLSBSMjETMBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2ln\n"+
|
||||
"bjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6\n"+
|
||||
"ErPLv4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8eoLrvozp\n"+
|
||||
"s6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklqtTleiDTsvHgMCJiEbKjN\n"+
|
||||
"S7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzdC9XZzPnqJworc5HGnRusyMvo4KD0L5CL\n"+
|
||||
"TfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pazq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6C\n"+
|
||||
"ygPCm48CAwEAAaOBnDCBmTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E\n"+
|
||||
"FgQUm+IHV2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5nbG9i\n"+
|
||||
"YWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG3lm0mi3f3BmGLjAN\n"+
|
||||
"BgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4GsJ0/WwbgcQ3izDJr86iw8bmEbTUsp\n"+
|
||||
"9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu\n"+
|
||||
"01yiPqFbQfXf5WRDLenVOavSot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG7\n"+
|
||||
"9G+dwfCMNYxdAfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7\n"+
|
||||
"TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg==\n"+
|
||||
"-----END CERTIFICATE-----\n" ]
|
||||
75
lib/plugin-loader.js
Normal file
75
lib/plugin-loader.js
Normal file
@@ -0,0 +1,75 @@
|
||||
'use strict';
|
||||
|
||||
const Path = require('path');
|
||||
|
||||
/**
|
||||
* Requires a module.
|
||||
* @param {*} path the module's path
|
||||
* @return {Object}
|
||||
*/
|
||||
function try_load(path) {
|
||||
try {
|
||||
return require(path);
|
||||
} catch(err) {
|
||||
if (err.code === 'MODULE_NOT_FOUND') {
|
||||
return null;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load a plugin following the rules
|
||||
* - First try to load from the internal directory plugins (which will disappear soon or later).
|
||||
* - A seccond attempt from node_modules, in case to have multiple match as for instance verdaccio-ldap
|
||||
* and sinopia-ldap. All verdaccio prefix will have preferences.
|
||||
* @param {*} config a reference of the configuration settings
|
||||
* @param {*} plugin_configs
|
||||
* @param {*} params a set of params to initialise the plugin
|
||||
* @param {*} sanity_check callback that check the shape that should fulfill the plugin
|
||||
* @return {Array} list of plugins
|
||||
*/
|
||||
function load_plugins(config, plugin_configs, params, sanity_check) {
|
||||
let plugins = Object.keys(plugin_configs || {}).map(function(p) {
|
||||
let plugin;
|
||||
|
||||
// try local plugins first
|
||||
plugin = try_load(Path.resolve(__dirname + '/plugins', p));
|
||||
|
||||
// npm package
|
||||
if (plugin === null && p.match(/^[^\.\/]/)) {
|
||||
plugin = try_load(`verdaccio-${p}`);
|
||||
// compatibility for old sinopia plugins
|
||||
if (!plugin) {
|
||||
plugin = try_load(`sinopia-${p}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (plugin === null) {
|
||||
plugin = try_load(p);
|
||||
}
|
||||
|
||||
// relative to config path
|
||||
if (plugin === null && p.match(/^\.\.?($|\/)/)) {
|
||||
plugin = try_load(Path.resolve(Path.dirname(config.self_path), p));
|
||||
}
|
||||
|
||||
if (plugin === null) {
|
||||
throw Error('"' + p + '" plugin not found\ntry "npm install verdaccio-' + p + '"');
|
||||
}
|
||||
|
||||
if (typeof(plugin) !== 'function')
|
||||
throw Error('"' + p + '" doesn\'t look like a valid plugin');
|
||||
|
||||
plugin = plugin(plugin_configs[p], params);
|
||||
|
||||
if (plugin === null || !sanity_check(plugin))
|
||||
throw Error('"' + p + '" doesn\'t look like a valid plugin');
|
||||
|
||||
return plugin;
|
||||
});
|
||||
|
||||
return plugins;
|
||||
}
|
||||
|
||||
exports.load_plugins = load_plugins;
|
||||
59
lib/plugins/htpasswd/crypt3.js
Normal file
59
lib/plugins/htpasswd/crypt3.js
Normal file
@@ -0,0 +1,59 @@
|
||||
/* eslint require-jsdoc: off */
|
||||
|
||||
'use strict';
|
||||
|
||||
/** Node.js Crypt(3) Library
|
||||
|
||||
Inspired by (and intended to be compatible with) sendanor/crypt3
|
||||
|
||||
see https://github.com/sendanor/node-crypt3
|
||||
|
||||
The key difference is the removal of the dependency on the unix crypt(3) function
|
||||
which is not platform independent, and requires compilation. Instead, a pure
|
||||
javascript version is used.
|
||||
|
||||
*/
|
||||
|
||||
const crypt = require('unix-crypt-td-js');
|
||||
const crypto = require('crypto');
|
||||
|
||||
function createSalt(type) {
|
||||
type = type || 'sha512';
|
||||
|
||||
switch (type) {
|
||||
|
||||
case 'md5':
|
||||
return '$1$' + crypto.randomBytes(10).toString('base64');
|
||||
|
||||
case 'blowfish':
|
||||
return '$2a$' + crypto.randomBytes(10).toString('base64');
|
||||
|
||||
case 'sha256':
|
||||
return '$5$' + crypto.randomBytes(10).toString('base64');
|
||||
|
||||
case 'sha512':
|
||||
return '$6$' + crypto.randomBytes(10).toString('base64');
|
||||
|
||||
default:
|
||||
throw new TypeError('Unknown salt type at crypt3.createSalt: ' + type);
|
||||
}
|
||||
}
|
||||
|
||||
function crypt3(key, salt) {
|
||||
salt = salt || createSalt();
|
||||
return crypt(key, salt);
|
||||
}
|
||||
|
||||
/** Crypt(3) password and data encryption.
|
||||
* @param {string} key user's typed password
|
||||
* @param {string} salt Optional salt, for example SHA-512 use "$6$salt$".
|
||||
* @returns {string} A generated hash in format $id$salt$encrypted
|
||||
* @see https://en.wikipedia.org/wiki/Crypt_(C)
|
||||
*/
|
||||
module.exports = crypt3;
|
||||
|
||||
/** Create salt
|
||||
* @param {string} type The type of salt: md5, blowfish (only some linux distros), sha256 or sha512. Default is sha512.
|
||||
* @returns {string} Generated salt string
|
||||
*/
|
||||
module.exports.createSalt = createSalt;
|
||||
137
lib/plugins/htpasswd/index.js
Normal file
137
lib/plugins/htpasswd/index.js
Normal file
@@ -0,0 +1,137 @@
|
||||
/* eslint require-jsdoc: off */
|
||||
|
||||
'use strict';
|
||||
|
||||
let fs = require('fs');
|
||||
let Path = require('path');
|
||||
let utils = require('./utils');
|
||||
|
||||
module.exports = HTPasswd;
|
||||
|
||||
function HTPasswd(config, stuff) {
|
||||
let self = Object.create(HTPasswd.prototype);
|
||||
self._users = {};
|
||||
|
||||
// config for this module
|
||||
self._config = config;
|
||||
|
||||
// verdaccio logger
|
||||
self._logger = stuff.logger;
|
||||
|
||||
// verdaccio main config object
|
||||
self._verdaccio_config = stuff.config;
|
||||
|
||||
// all this "verdaccio_config" stuff is for b/w compatibility only
|
||||
self._maxusers = self._config.max_users;
|
||||
if (!self._maxusers) self._maxusers = self._verdaccio_config.max_users;
|
||||
// set maxusers to Infinity if not specified
|
||||
if (!self._maxusers) self._maxusers = Infinity;
|
||||
|
||||
self._last_time = null;
|
||||
let file = self._config.file;
|
||||
if (!file) file = self._verdaccio_config.users_file;
|
||||
if (!file) throw new Error('should specify "file" in config');
|
||||
self._path = Path.resolve(Path.dirname(self._verdaccio_config.self_path), file);
|
||||
return self;
|
||||
}
|
||||
|
||||
HTPasswd.prototype.authenticate = function(user, password, cb) {
|
||||
let self = this;
|
||||
self._reload(function(err) {
|
||||
if (err) return cb(err.code === 'ENOENT' ? null : err);
|
||||
if (!self._users[user]) return cb(null, false);
|
||||
if (!utils.verify_password(user, password, self._users[user])) return cb(null, false);
|
||||
|
||||
// authentication succeeded!
|
||||
// return all usergroups this user has access to;
|
||||
// (this particular package has no concept of usergroups, so just return user herself)
|
||||
return cb(null, [user]);
|
||||
});
|
||||
};
|
||||
|
||||
// hopefully race-condition-free way to add users:
|
||||
// 1. lock file for writing (other processes can still read)
|
||||
// 2. reload .htpasswd
|
||||
// 3. write new data into .htpasswd.tmp
|
||||
// 4. move .htpasswd.tmp to .htpasswd
|
||||
// 5. reload .htpasswd
|
||||
// 6. unlock file
|
||||
HTPasswd.prototype.adduser = function(user, password, real_cb) {
|
||||
let self = this;
|
||||
|
||||
function sanity_check() {
|
||||
let err = null;
|
||||
if (self._users[user]) {
|
||||
err = Error('this user already exists');
|
||||
} else if (Object.keys(self._users).length >= self._maxusers) {
|
||||
err = Error('maximum amount of users reached');
|
||||
}
|
||||
if (err) err.status = 403;
|
||||
return err;
|
||||
}
|
||||
|
||||
// preliminary checks, just to ensure that file won't be reloaded if it's not needed
|
||||
let s_err = sanity_check();
|
||||
if (s_err) return real_cb(s_err, false);
|
||||
|
||||
utils.lock_and_read(self._path, function(err, res) {
|
||||
let locked = false;
|
||||
|
||||
// callback that cleans up lock first
|
||||
function cb(err) {
|
||||
if (locked) {
|
||||
utils.unlock_file(self._path, function() {
|
||||
// ignore any error from the unlock
|
||||
real_cb(err, !err);
|
||||
});
|
||||
} else {
|
||||
real_cb(err, !err);
|
||||
}
|
||||
}
|
||||
|
||||
if (!err) {
|
||||
locked = true;
|
||||
}
|
||||
|
||||
// ignore ENOENT errors, we'll just create .htpasswd in that case
|
||||
if (err && err.code !== 'ENOENT') return cb(err);
|
||||
|
||||
let body = (res || '').toString('utf8');
|
||||
self._users = utils.parse_htpasswd(body);
|
||||
|
||||
// real checks, to prevent race conditions
|
||||
let s_err = sanity_check();
|
||||
if (s_err) return cb(s_err);
|
||||
|
||||
try {
|
||||
body = utils.add_user_to_htpasswd(body, user, password);
|
||||
} catch (err) {
|
||||
return cb(err);
|
||||
}
|
||||
fs.writeFile(self._path, body, function(err) {
|
||||
if (err) return cb(err);
|
||||
self._reload(function() {
|
||||
cb(null, true);
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
HTPasswd.prototype._reload = function(_callback) {
|
||||
let self = this;
|
||||
|
||||
fs.stat(self._path, function(err, stats) {
|
||||
if (err) return _callback(err);
|
||||
|
||||
if (self._last_time === stats.mtime) return _callback();
|
||||
self._last_time = stats.mtime;
|
||||
|
||||
fs.readFile(self._path, 'utf8', function(err, buffer) {
|
||||
if (err) return _callback(err);
|
||||
|
||||
self._users = utils.parse_htpasswd(buffer);
|
||||
|
||||
_callback();
|
||||
});
|
||||
});
|
||||
};
|
||||
72
lib/plugins/htpasswd/utils.js
Normal file
72
lib/plugins/htpasswd/utils.js
Normal file
@@ -0,0 +1,72 @@
|
||||
/* eslint require-jsdoc: off */
|
||||
|
||||
'use strict';
|
||||
|
||||
let crypto = require('crypto');
|
||||
let crypt3 = require('./crypt3');
|
||||
let md5 = require('apache-md5');
|
||||
let locker = require('../../file-locking');
|
||||
|
||||
// this function neither unlocks file nor closes it
|
||||
// it'll have to be done manually later
|
||||
function lock_and_read(name, cb) {
|
||||
locker.readFile(name, {lock: true}, function(err, res) {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
return cb(null, res);
|
||||
});
|
||||
}
|
||||
|
||||
// close and unlock file
|
||||
function unlock_file(name, cb) {
|
||||
locker.unlockFile(name, cb);
|
||||
}
|
||||
|
||||
function parse_htpasswd(input) {
|
||||
let result = {};
|
||||
input.split('\n').forEach(function(line) {
|
||||
let args = line.split(':', 3);
|
||||
if (args.length > 1) result[args[0]] = args[1];
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
function verify_password(user, passwd, hash) {
|
||||
if (hash.indexOf('{PLAIN}') === 0) {
|
||||
return passwd === hash.substr(7);
|
||||
} else if (hash.indexOf('{SHA}') === 0) {
|
||||
return crypto.createHash('sha1').update(passwd, 'binary').digest('base64') === hash.substr(5);
|
||||
} else {
|
||||
return (
|
||||
// for backwards compatibility, first check md5 then check crypt3
|
||||
md5(passwd, hash) === hash ||
|
||||
crypt3(passwd, hash) === hash
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
function add_user_to_htpasswd(body, user, passwd) {
|
||||
if (user !== encodeURIComponent(user)) {
|
||||
let err = Error('username should not contain non-uri-safe characters');
|
||||
err.status = 409;
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (crypt3) {
|
||||
passwd = crypt3(passwd);
|
||||
} else {
|
||||
passwd = '{SHA}' + crypto.createHash('sha1').update(passwd, 'binary').digest('base64');
|
||||
}
|
||||
let comment = 'autocreated ' + (new Date()).toJSON();
|
||||
|
||||
let newline = user + ':' + passwd + ':' + comment + '\n';
|
||||
if (body.length && body[body.length-1] !== '\n') newline = '\n' + newline;
|
||||
return body + newline;
|
||||
}
|
||||
|
||||
module.exports.parse_htpasswd = parse_htpasswd;
|
||||
module.exports.verify_password = verify_password;
|
||||
module.exports.add_user_to_htpasswd = add_user_to_htpasswd;
|
||||
module.exports.lock_and_read = lock_and_read;
|
||||
module.exports.unlock_file = unlock_file;
|
||||
83
lib/search.js
Normal file
83
lib/search.js
Normal file
@@ -0,0 +1,83 @@
|
||||
/* eslint no-invalid-this: "off" */
|
||||
|
||||
'use strict';
|
||||
|
||||
const lunr = require('lunr');
|
||||
|
||||
/**
|
||||
* Handle the search Indexer.
|
||||
*/
|
||||
class Search {
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*/
|
||||
constructor() {
|
||||
this.index = lunr(function() {
|
||||
this.field('name', {boost: 10});
|
||||
this.field('description', {boost: 4});
|
||||
this.field('author', {boost: 6});
|
||||
this.field('readme');
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs a query to the indexer.
|
||||
* If the keyword is a * it returns all local elements
|
||||
* otherwise performs a search
|
||||
* @param {*} q the keyword
|
||||
* @return {Array} list of results.
|
||||
*/
|
||||
query(q) {
|
||||
return q === '*'
|
||||
? this.storage.localStorage.localList.get().map( function( pkg ) {
|
||||
return {ref: pkg, score: 1};
|
||||
}) : this.index.search(q);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new element to index
|
||||
* @param {*} pkg the package
|
||||
*/
|
||||
add(pkg) {
|
||||
this.index.add({
|
||||
id: pkg.name,
|
||||
name: pkg.name,
|
||||
description: pkg.description,
|
||||
author: pkg._npmUser ? pkg._npmUser.name : '???',
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove an element from the index.
|
||||
* @param {*} name the id element
|
||||
*/
|
||||
remove(name) {
|
||||
this.index.remove({id: name});
|
||||
}
|
||||
|
||||
/**
|
||||
* Force a reindex.
|
||||
*/
|
||||
reindex() {
|
||||
let self = this;
|
||||
this.storage.get_local(function(err, packages) {
|
||||
if (err) throw err; // that function shouldn't produce any
|
||||
let i = packages.length;
|
||||
while (i--) {
|
||||
self.add(packages[i]);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Set up the {Storage}
|
||||
* @param {*} storage An storage reference.
|
||||
*/
|
||||
configureStorage(storage) {
|
||||
this.storage = storage;
|
||||
this.reindex();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new Search();
|
||||
BIN
lib/static/ajax.gif
Normal file
BIN
lib/static/ajax.gif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 1.9 KiB |
BIN
lib/static/favicon.ico
Normal file
BIN
lib/static/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 6.9 KiB |
BIN
lib/static/favicon.png
Normal file
BIN
lib/static/favicon.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 315 B |
BIN
lib/static/fontello.eot
Normal file
BIN
lib/static/fontello.eot
Normal file
Binary file not shown.
15
lib/static/fontello.svg
Normal file
15
lib/static/fontello.svg
Normal file
@@ -0,0 +1,15 @@
|
||||
<?xml version="1.0" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg xmlns="http://www.w3.org/2000/svg">
|
||||
<metadata>Copyright (C) 2014 by original authors @ fontello.com</metadata>
|
||||
<defs>
|
||||
<font id="fontello" horiz-adv-x="1000" >
|
||||
<font-face font-family="fontello" font-weight="400" font-stretch="normal" units-per-em="1000" ascent="850" descent="-150" />
|
||||
<missing-glyph horiz-adv-x="1000" />
|
||||
<glyph glyph-name="search" unicode="" d="m643 386q0 103-74 176t-176 74-177-74-73-176 73-177 177-73 176 73 74 177z m286-465q0-29-22-50t-50-21q-30 0-50 21l-191 191q-100-69-223-69-80 0-153 31t-125 84-84 125-31 153 31 152 84 126 125 84 153 31 152-31 126-84 84-126 31-152q0-123-69-223l191-191q21-21 21-51z" horiz-adv-x="928.6" />
|
||||
<glyph glyph-name="cancel" unicode="" d="m724 112q0-22-15-38l-76-76q-16-15-38-15t-38 15l-164 165-164-165q-16-15-38-15t-38 15l-76 76q-16 16-16 38t16 38l164 164-164 164q-16 16-16 38t16 38l76 76q16 16 38 16t38-16l164-164 164 164q16 16 38 16t38-16l76-76q15-15 15-38t-15-38l-164-164 164-164q15-15 15-38z" horiz-adv-x="785.7" />
|
||||
<glyph glyph-name="right-open" unicode="" d="m613 386q0-29-20-51l-364-363q-21-21-50-21t-51 21l-42 42q-21 21-21 50 0 30 21 51l271 271-271 270q-21 22-21 51 0 30 21 50l42 42q20 21 51 21t50-21l364-363q20-21 20-50z" horiz-adv-x="642.9" />
|
||||
<glyph glyph-name="angle-right" unicode="" d="m332 314q0-7-6-13l-260-260q-5-5-12-5t-13 5l-28 28q-6 6-6 13t6 13l219 219-219 220q-6 5-6 12t6 13l28 28q5 6 13 6t12-6l260-260q6-5 6-13z" horiz-adv-x="357.1" />
|
||||
</font>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.6 KiB |
BIN
lib/static/fontello.ttf
Normal file
BIN
lib/static/fontello.ttf
Normal file
Binary file not shown.
BIN
lib/static/fontello.woff
Normal file
BIN
lib/static/fontello.woff
Normal file
Binary file not shown.
4
lib/static/jquery.min.js
vendored
Normal file
4
lib/static/jquery.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
BIN
lib/static/logo-sm.png
Normal file
BIN
lib/static/logo-sm.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 3.4 KiB |
BIN
lib/static/logo.png
Normal file
BIN
lib/static/logo.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 25 KiB |
7413
lib/static/main.css
Normal file
7413
lib/static/main.css
Normal file
File diff suppressed because it is too large
Load Diff
1849
lib/static/main.js
Normal file
1849
lib/static/main.js
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,74 +1,77 @@
|
||||
/* eslint prefer-rest-params: "off" */
|
||||
|
||||
'use strict';
|
||||
|
||||
// see https://secure.flickr.com/photos/girliemac/sets/72157628409467125
|
||||
|
||||
var images = {
|
||||
100: 'aVvDhR', // '6512768893', // 100 - Continue
|
||||
101: 'aXXExP', // '6540479029', // 101 - Switching Protocols
|
||||
200: 'aVuVsF', // '6512628175', // 200 - OK
|
||||
201: 'aXWm1Z', // '6540221577', // 201 - Created
|
||||
202: 'aXXEyF', // '6540479079', // 202 - Accepted
|
||||
204: 'aYyJ7B', // '6547319943', // 204 - No Content
|
||||
206: 'aVEnUP', // '6514473163', // 206 - Partial Content
|
||||
207: 'aVEnRD', // '6514472979', // 207 - Multi-Status
|
||||
300: 'aW7mac', // '6519540181', // 300 - Multiple Choices
|
||||
301: 'aW7mb4', // '6519540231', // 301 - Moved Permanently
|
||||
302: 'aV6jKp', // '6508023829', // 302 - Found
|
||||
303: 'aVxtaK', // '6513125065', // 303 - See Other
|
||||
304: 'aXY3dH', // '6540551929', // 304 - Not Modified
|
||||
305: 'aXX5LK', // '6540365403', // 305 - Use Proxy
|
||||
307: 'aVwQnk', // '6513001269', // 307 - Temporary Redirect
|
||||
400: 'aXYDeT', // '6540669737', // 400 - Bad Request
|
||||
401: 'aV6jwe', // '6508023065', // 401 - Unauthorized
|
||||
402: 'aVwQoe', // '6513001321', // 402 - Payment Required
|
||||
403: 'aV6jFK', // '6508023617', // 403 - Forbidden
|
||||
404: 'aV6juR', // '6508022985', // 404 - Not Found
|
||||
405: 'aV6jE8', // '6508023523', // 405 - Method Not Allowed
|
||||
406: 'aV6jxa', // '6508023119', // 406 - Not Acceptable
|
||||
408: 'aV6jyc', // '6508023179', // 408 - Request Timeout
|
||||
409: 'aV6jzz', // '6508023259', // 409 - Conflict
|
||||
410: 'aVES2H', // '6514567755', // 410 - Gone
|
||||
411: 'aXYVpT', // '6540724141', // 411 - Length Required
|
||||
413: 'aV6jHZ', // '6508023747', // 413 - Request Entity Too Large
|
||||
414: 'aV6jBa', // '6508023351', // 414 - Request-URI Too Long
|
||||
416: 'aVxQvr', // '6513196851', // 416 - Requested Range Not Satisfiable
|
||||
417: 'aV6jGP', // '6508023679', // 417 - Expectation Failed
|
||||
418: 'aV6J7c', // '6508102407', // 418 - I'm a teapot
|
||||
422: 'aVEnTt', // '6514473085', // 422 - Unprocessable Entity
|
||||
423: 'aVEyVZ', // '6514510235', // 423 - Locked
|
||||
424: 'aVEWZ6', // '6514584423', // 424 - Failed Dependency
|
||||
425: 'aXYdzH', // '6540586787', // 425 - Unordered Collection
|
||||
426: 'aVdo4M', // '6509400771', // 426 - Upgrade Required
|
||||
429: 'aVdo8F', // '6509400997', // 429 - Too Many Requests
|
||||
431: 'aVdo3n', // '6509400689', // 431 - Request Header Fields Too Large
|
||||
444: 'aVdo1P', // '6509400599', // 444 - No Response
|
||||
450: 'aVxtbK', // '6513125123', // 450 - Blocked by Windows Parental Controls
|
||||
451: 'eTiGQd', // '9113233540', // 451 - Unavailable for Legal Reasons
|
||||
500: 'aVdo6e', // '6509400855', // 500 - Internal Server Error
|
||||
502: 'aV6jCv', // '6508023429', // 502 - Bad Gateway
|
||||
503: 'aXYvop', // '6540643319', // 503 - Service Unavailable
|
||||
506: 'aXYvnH', // '6540643279', // 506 - Variant Also Negotiates
|
||||
507: 'aVdnZa', // '6509400503', // 507 - Insufficient Storage
|
||||
508: 'aVdnYa', // '6509400445', // 508 - Loop Detected
|
||||
509: 'aXXg1V', // '6540399865', // 509 - Bandwidth Limit Exceeded
|
||||
599: 'aVdo7v', // '6509400929', // 599 - Network connect timeout error
|
||||
}
|
||||
const images = {
|
||||
100: 'aVvDhR', // '6512768893', // 100 - Continue
|
||||
101: 'aXXExP', // '6540479029', // 101 - Switching Protocols
|
||||
200: 'aVuVsF', // '6512628175', // 200 - OK
|
||||
201: 'aXWm1Z', // '6540221577', // 201 - Created
|
||||
202: 'aXXEyF', // '6540479079', // 202 - Accepted
|
||||
204: 'aYyJ7B', // '6547319943', // 204 - No Content
|
||||
206: 'aVEnUP', // '6514473163', // 206 - Partial Content
|
||||
207: 'aVEnRD', // '6514472979', // 207 - Multi-Status
|
||||
300: 'aW7mac', // '6519540181', // 300 - Multiple Choices
|
||||
301: 'aW7mb4', // '6519540231', // 301 - Moved Permanently
|
||||
302: 'aV6jKp', // '6508023829', // 302 - Found
|
||||
303: 'aVxtaK', // '6513125065', // 303 - See Other
|
||||
304: 'aXY3dH', // '6540551929', // 304 - Not Modified
|
||||
305: 'aXX5LK', // '6540365403', // 305 - Use Proxy
|
||||
307: 'aVwQnk', // '6513001269', // 307 - Temporary Redirect
|
||||
400: 'aXYDeT', // '6540669737', // 400 - Bad Request
|
||||
401: 'aV6jwe', // '6508023065', // 401 - Unauthorized
|
||||
402: 'aVwQoe', // '6513001321', // 402 - Payment Required
|
||||
403: 'aV6jFK', // '6508023617', // 403 - Forbidden
|
||||
404: 'aV6juR', // '6508022985', // 404 - Not Found
|
||||
405: 'aV6jE8', // '6508023523', // 405 - Method Not Allowed
|
||||
406: 'aV6jxa', // '6508023119', // 406 - Not Acceptable
|
||||
408: 'aV6jyc', // '6508023179', // 408 - Request Timeout
|
||||
409: 'aV6jzz', // '6508023259', // 409 - Conflict
|
||||
410: 'aVES2H', // '6514567755', // 410 - Gone
|
||||
411: 'aXYVpT', // '6540724141', // 411 - Length Required
|
||||
413: 'aV6jHZ', // '6508023747', // 413 - Request Entity Too Large
|
||||
414: 'aV6jBa', // '6508023351', // 414 - Request-URI Too Long
|
||||
416: 'aVxQvr', // '6513196851', // 416 - Requested Range Not Satisfiable
|
||||
417: 'aV6jGP', // '6508023679', // 417 - Expectation Failed
|
||||
418: 'aV6J7c', // '6508102407', // 418 - I'm a teapot
|
||||
422: 'aVEnTt', // '6514473085', // 422 - Unprocessable Entity
|
||||
423: 'aVEyVZ', // '6514510235', // 423 - Locked
|
||||
424: 'aVEWZ6', // '6514584423', // 424 - Failed Dependency
|
||||
425: 'aXYdzH', // '6540586787', // 425 - Unordered Collection
|
||||
426: 'aVdo4M', // '6509400771', // 426 - Upgrade Required
|
||||
429: 'aVdo8F', // '6509400997', // 429 - Too Many Requests
|
||||
431: 'aVdo3n', // '6509400689', // 431 - Request Header Fields Too Large
|
||||
444: 'aVdo1P', // '6509400599', // 444 - No Response
|
||||
450: 'aVxtbK', // '6513125123', // 450 - Blocked by Windows Parental Controls
|
||||
451: 'eTiGQd', // '9113233540', // 451 - Unavailable for Legal Reasons
|
||||
500: 'aVdo6e', // '6509400855', // 500 - Internal Server Error
|
||||
502: 'aV6jCv', // '6508023429', // 502 - Bad Gateway
|
||||
503: 'aXYvop', // '6540643319', // 503 - Service Unavailable
|
||||
506: 'aXYvnH', // '6540643279', // 506 - Variant Also Negotiates
|
||||
507: 'aVdnZa', // '6509400503', // 507 - Insufficient Storage
|
||||
508: 'aVdnYa', // '6509400445', // 508 - Loop Detected
|
||||
509: 'aXXg1V', // '6540399865', // 509 - Bandwidth Limit Exceeded
|
||||
599: 'aVdo7v', // '6509400929', // 599 - Network connect timeout error
|
||||
};
|
||||
|
||||
module.exports.get_image = function(status) {
|
||||
if (status in images) {
|
||||
return 'http://flic.kr/p/'+images[status]
|
||||
//return 'https://secure.flickr.com/photos/girliemac/'+images[status]+'/in/set-72157628409467125/lightbox/'
|
||||
}
|
||||
}
|
||||
if (status in images) {
|
||||
return 'http://flic.kr/p/' + images[status];
|
||||
// return 'https://secure.flickr.com/photos/girliemac/'+images[status]+'/in/set-72157628409467125/lightbox/'
|
||||
}
|
||||
};
|
||||
|
||||
module.exports.middleware = function(req, res, next) {
|
||||
var _writeHead = res.writeHead
|
||||
res.writeHead = function(status) {
|
||||
if (status in images) {
|
||||
res.setHeader("X-Status-Cat", module.exports.get_image(status))
|
||||
}
|
||||
_writeHead.apply(res, arguments)
|
||||
}
|
||||
let _writeHead = res.writeHead;
|
||||
res.writeHead = function(status) {
|
||||
if (status in images) {
|
||||
res.setHeader('X-Status-Cat', module.exports.get_image(status));
|
||||
}
|
||||
_writeHead.apply(res, arguments);
|
||||
};
|
||||
|
||||
next()
|
||||
}
|
||||
next();
|
||||
};
|
||||
|
||||
|
||||
1148
lib/storage.js
1148
lib/storage.js
File diff suppressed because it is too large
Load Diff
72
lib/storage/local/local-data.js
Normal file
72
lib/storage/local/local-data.js
Normal file
@@ -0,0 +1,72 @@
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const Path = require('path');
|
||||
|
||||
/**
|
||||
* Handle local database.
|
||||
* FUTURE: must be a plugin.
|
||||
*/
|
||||
class LocalData {
|
||||
|
||||
/**
|
||||
* Load an parse the local json database.
|
||||
* @param {*} path the database path
|
||||
*/
|
||||
constructor(path) {
|
||||
this.path = path;
|
||||
try {
|
||||
this.data = JSON.parse(fs.readFileSync(this.path, 'utf8'));
|
||||
} catch(_) {
|
||||
this.data = {list: []};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new element.
|
||||
* @param {*} name
|
||||
*/
|
||||
add(name) {
|
||||
if (this.data.list.indexOf(name) === -1) {
|
||||
this.data.list.push(name);
|
||||
this.sync();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove an element from the database.
|
||||
* @param {*} name
|
||||
*/
|
||||
remove(name) {
|
||||
const i = this.data.list.indexOf(name);
|
||||
if (i !== -1) {
|
||||
this.data.list.splice(i, 1);
|
||||
}
|
||||
this.sync();
|
||||
}
|
||||
|
||||
/**
|
||||
* Return all database elements.
|
||||
* @return {Array}
|
||||
*/
|
||||
get() {
|
||||
return this.data.list;
|
||||
}
|
||||
|
||||
/**
|
||||
* Syncronize {create} database whether does not exist.
|
||||
*/
|
||||
sync() {
|
||||
// Uses sync to prevent ugly race condition
|
||||
try {
|
||||
require('mkdirp').sync(Path.dirname(this.path));
|
||||
} catch(err) {
|
||||
// perhaps a logger instance?
|
||||
/* eslint no-empty:off */
|
||||
}
|
||||
fs.writeFileSync(this.path, JSON.stringify(this.data));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = LocalData;
|
||||
253
lib/storage/local/local-fs.js
Normal file
253
lib/storage/local/local-fs.js
Normal file
@@ -0,0 +1,253 @@
|
||||
/* eslint prefer-spread: "off" */
|
||||
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const createError = require('http-errors');
|
||||
const mkdirp = require('mkdirp');
|
||||
const MyStream = require('../streams');
|
||||
const locker = require('../../file-locking');
|
||||
const fileExist = 'EEXISTS';
|
||||
const noSuchFile = 'ENOENT';
|
||||
|
||||
const fSError = function(code) {
|
||||
const err = createError(code);
|
||||
err.code = code;
|
||||
return err;
|
||||
};
|
||||
|
||||
const readFile = function(name) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.readFile(name, (err, data) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve(data);
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const tempFile = function(str) {
|
||||
return `${str}.tmp${String(Math.random()).substr(2)}`;
|
||||
};
|
||||
|
||||
const renameTmp = function(src, dst, _cb) {
|
||||
const cb = function(err) {
|
||||
if (err) {
|
||||
fs.unlink(src, function() {});
|
||||
}
|
||||
_cb(err);
|
||||
};
|
||||
|
||||
if (process.platform !== 'win32') {
|
||||
return fs.rename(src, dst, cb);
|
||||
}
|
||||
|
||||
// windows can't remove opened file,
|
||||
// but it seem to be able to rename it
|
||||
const tmp = tempFile(dst);
|
||||
fs.rename(dst, tmp, function(err) {
|
||||
fs.rename(src, dst, cb);
|
||||
if (!err) {
|
||||
fs.unlink(tmp, () => {});
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const writeFile = function(dest, data, cb) {
|
||||
const createTempFile = function(cb) {
|
||||
const tempFilePath = tempFile(dest);
|
||||
fs.writeFile(tempFilePath, data, function(err) {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
renameTmp(tempFilePath, dest, cb);
|
||||
});
|
||||
};
|
||||
|
||||
createTempFile(function(err) {
|
||||
if (err && err.code === noSuchFile) {
|
||||
mkdirp(path.dirname(dest), function(err) {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
createTempFile(cb);
|
||||
});
|
||||
} else {
|
||||
cb(err);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const createWriteStream = function(name) {
|
||||
const uploadStream = new MyStream.UploadTarball();
|
||||
let _ended = 0;
|
||||
uploadStream.on('end', function() {
|
||||
_ended = 1;
|
||||
});
|
||||
|
||||
fs.exists(name, function(exists) {
|
||||
if (exists) {
|
||||
return uploadStream.emit('error', fSError(fileExist));
|
||||
}
|
||||
|
||||
const temporalName = `${name}.tmp-${String(Math.random()).replace(/^0\./, '')}`;
|
||||
const file = fs.createWriteStream(temporalName);
|
||||
let opened = false;
|
||||
uploadStream.pipe(file);
|
||||
|
||||
uploadStream.done = function() {
|
||||
const onend = function() {
|
||||
file.on('close', function() {
|
||||
renameTmp(temporalName, name, function(err) {
|
||||
if (err) {
|
||||
uploadStream.emit('error', err);
|
||||
} else {
|
||||
uploadStream.emit('success');
|
||||
}
|
||||
});
|
||||
});
|
||||
file.destroySoon();
|
||||
};
|
||||
if (_ended) {
|
||||
onend();
|
||||
} else {
|
||||
uploadStream.on('end', onend);
|
||||
}
|
||||
};
|
||||
uploadStream.abort = function() {
|
||||
if (opened) {
|
||||
opened = false;
|
||||
file.on('close', function() {
|
||||
fs.unlink(temporalName, function() {});
|
||||
});
|
||||
}
|
||||
file.destroySoon();
|
||||
};
|
||||
file.on('open', function() {
|
||||
opened = true;
|
||||
// re-emitting open because it's handled in storage.js
|
||||
uploadStream.emit('open');
|
||||
});
|
||||
file.on('error', function(err) {
|
||||
uploadStream.emit('error', err);
|
||||
});
|
||||
});
|
||||
return uploadStream;
|
||||
};
|
||||
|
||||
const createReadStream = function(name, readTarballStream, callback) {
|
||||
let readStream = fs.createReadStream(name);
|
||||
readStream.on('error', function(err) {
|
||||
readTarballStream.emit('error', err);
|
||||
});
|
||||
readStream.on('open', function(fd) {
|
||||
fs.fstat(fd, function(err, stats) {
|
||||
if (err) return readTarballStream.emit('error', err);
|
||||
readTarballStream.emit('content-length', stats.size);
|
||||
readTarballStream.emit('open');
|
||||
readStream.pipe(readTarballStream);
|
||||
});
|
||||
});
|
||||
|
||||
readTarballStream = new MyStream.ReadTarball();
|
||||
readTarballStream.abort = function() {
|
||||
readStream.close();
|
||||
};
|
||||
return readTarballStream;
|
||||
};
|
||||
|
||||
const createFile = function(name, contents, callback) {
|
||||
fs.exists(name, function(exists) {
|
||||
if (exists) {
|
||||
return callback( fSError(fileExist) );
|
||||
}
|
||||
writeFile(name, contents, callback);
|
||||
});
|
||||
};
|
||||
|
||||
const updateFile = function(name, contents, callback) {
|
||||
fs.exists(name, function(exists) {
|
||||
if (!exists) {
|
||||
return callback( fSError(noSuchFile) );
|
||||
}
|
||||
writeFile(name, contents, callback);
|
||||
});
|
||||
};
|
||||
|
||||
const readJSON = function(name, cb) {
|
||||
readFile(name).then(function(res) {
|
||||
let args = [];
|
||||
try {
|
||||
args = [null, JSON.parse(res.toString('utf8'))];
|
||||
} catch(err) {
|
||||
args = [err];
|
||||
}
|
||||
cb.apply(null, args);
|
||||
}, function(err) {
|
||||
return cb(err);
|
||||
});
|
||||
};
|
||||
|
||||
const lock_and_read = function(name, cb) {
|
||||
locker.readFile(name, {lock: true}, function(err, res) {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
return cb(null, res);
|
||||
});
|
||||
};
|
||||
|
||||
const lockAndReadJSON = function(name, cb) {
|
||||
locker.readFile(name, {lock: true, parse: true}, function(err, res) {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
return cb(null, res);
|
||||
});
|
||||
};
|
||||
|
||||
const unlock_file = function(name, cb) {
|
||||
locker.unlockFile(name, cb);
|
||||
};
|
||||
|
||||
const createJSON = function(name, value, cb) {
|
||||
createFile(name, JSON.stringify(value, null, '\t'), cb);
|
||||
};
|
||||
|
||||
|
||||
const updateJSON = function(name, value, cb) {
|
||||
updateFile(name, JSON.stringify(value, null, '\t'), cb);
|
||||
};
|
||||
|
||||
|
||||
const writeJSON = function(name, value, cb) {
|
||||
writeFile(name, JSON.stringify(value, null, '\t'), cb);
|
||||
};
|
||||
|
||||
// fs
|
||||
module.exports.unlink = fs.unlink;
|
||||
module.exports.rmdir = fs.rmdir;
|
||||
|
||||
// streams
|
||||
module.exports.createWriteStream = createWriteStream;
|
||||
module.exports.createReadStream = createReadStream;
|
||||
|
||||
// io
|
||||
module.exports.read = readFile;
|
||||
module.exports.write = writeFile;
|
||||
module.exports.update = updateFile;
|
||||
module.exports.create = createFile;
|
||||
|
||||
// json
|
||||
module.exports.readJSON = readJSON;
|
||||
module.exports.lockAndReadJSON = lockAndReadJSON;
|
||||
module.exports.writeJSON = writeJSON;
|
||||
module.exports.updateJSON = updateJSON;
|
||||
module.exports.createJSON = createJSON;
|
||||
|
||||
// lock
|
||||
module.exports.unlock_file = unlock_file;
|
||||
module.exports.lock_and_read = lock_and_read;
|
||||
880
lib/storage/local/local-storage.js
Normal file
880
lib/storage/local/local-storage.js
Normal file
@@ -0,0 +1,880 @@
|
||||
/* eslint prefer-rest-params: "off" */
|
||||
/* eslint prefer-spread: "off" */
|
||||
'use strict';
|
||||
|
||||
const assert = require('assert');
|
||||
const Crypto = require('crypto');
|
||||
const fs = require('fs');
|
||||
const Path = require('path');
|
||||
const Stream = require('stream');
|
||||
const URL = require('url');
|
||||
const async = require('async');
|
||||
const createError = require('http-errors');
|
||||
const _ = require('lodash');
|
||||
|
||||
const fsStorage = require('./local-fs');
|
||||
const LocalData = require('./local-data');
|
||||
const Logger = require('../../logger');
|
||||
const customStream = require('../streams');
|
||||
const Utils = require('../../utils');
|
||||
|
||||
const pkgFileName = 'package.json';
|
||||
const fileExist = 'EEXISTS';
|
||||
const noSuchFile = 'ENOENT';
|
||||
const resourceNotAvailable = 'EAGAIN';
|
||||
|
||||
const generatePackageTemplate = function(name) {
|
||||
return {
|
||||
// standard things
|
||||
'name': name,
|
||||
'versions': {},
|
||||
'dist-tags': {},
|
||||
'time': {},
|
||||
|
||||
// our own object
|
||||
'_distfiles': {},
|
||||
'_attachments': {},
|
||||
'_uplinks': {},
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Implements Storage interface (same for storage.js, local-storage.js, up-storage.js).
|
||||
*/
|
||||
class LocalStorage {
|
||||
/**
|
||||
* Constructor
|
||||
* @param {Object} config config list of properties
|
||||
*/
|
||||
constructor(config) {
|
||||
this.config = config;
|
||||
this.localList = new LocalData(this._buildStoragePath(this.config));
|
||||
this.logger = Logger.logger.child({sub: 'fs'});
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the local database path.
|
||||
* @param {Object} config
|
||||
* @return {string|String|*}
|
||||
* @private
|
||||
*/
|
||||
_buildStoragePath(config) {
|
||||
// FUTURE: the database might be parameterizable from config.yaml
|
||||
return Path.join(Path.resolve(Path.dirname(config.self_path || ''),
|
||||
config.storage,
|
||||
'.sinopia-db.json'
|
||||
));
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Add a package.
|
||||
* @param {*} name
|
||||
* @param {*} info
|
||||
* @param {*} callback
|
||||
* @return {Function}
|
||||
*/
|
||||
addPackage(name, info, callback) {
|
||||
const storage = this.storage(name);
|
||||
|
||||
if (!storage) {
|
||||
return callback( createError(404, 'this package cannot be added'));
|
||||
}
|
||||
|
||||
storage.createJSON(pkgFileName, generatePackageTemplate(name), function(err) {
|
||||
if (err && err.code === fileExist) {
|
||||
return callback( createError(409, 'this package is already present'));
|
||||
}
|
||||
const latest = info['dist-tags'].latest;
|
||||
if (latest && info.versions[latest]) {
|
||||
return callback(null, info.versions[latest]);
|
||||
}
|
||||
return callback();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove package.
|
||||
* @param {*} name
|
||||
* @param {*} callback
|
||||
* @return {Function}
|
||||
*/
|
||||
removePackage(name, callback) {
|
||||
this.logger.info( {name: name}, 'unpublishing @{name} (all)');
|
||||
|
||||
let storage = this.storage(name);
|
||||
if (!storage) {
|
||||
return callback( createError(404, 'no such package available'));
|
||||
}
|
||||
|
||||
storage.readJSON(pkgFileName, (err, data) => {
|
||||
if (err) {
|
||||
if (err.code === noSuchFile) {
|
||||
return callback( createError(404, 'no such package available'));
|
||||
} else {
|
||||
return callback(err);
|
||||
}
|
||||
}
|
||||
this._normalizePackage(data);
|
||||
|
||||
storage.unlink(pkgFileName, function(err) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
const files = Object.keys(data._attachments);
|
||||
|
||||
const unlinkNext = function(cb) {
|
||||
if (files.length === 0) {
|
||||
return cb();
|
||||
}
|
||||
|
||||
let file = files.shift();
|
||||
storage.unlink(file, function() {
|
||||
unlinkNext(cb);
|
||||
});
|
||||
};
|
||||
|
||||
unlinkNext(function() {
|
||||
// try to unlink the directory, but ignore errors because it can fail
|
||||
storage.rmdir('.', function(err) {
|
||||
callback(err);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
this.localList.remove(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Synchronize remote package info with the local one
|
||||
* @param {*} name
|
||||
* @param {*} packageInfo
|
||||
* @param {*} callback
|
||||
*/
|
||||
updateVersions(name, packageInfo, callback) {
|
||||
this._readCreatePackage(name, (err, packageLocalJson) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
let change = false;
|
||||
for (let versionId in packageInfo.versions) {
|
||||
if (_.isNil(packageLocalJson.versions[versionId])) {
|
||||
const version = packageInfo.versions[versionId];
|
||||
|
||||
// we don't keep readmes for package versions,
|
||||
// only one readme per package
|
||||
delete version.readme;
|
||||
|
||||
change = true;
|
||||
packageLocalJson.versions[versionId] = version;
|
||||
|
||||
if (version.dist && version.dist.tarball) {
|
||||
let filename = URL.parse(version.dist.tarball).pathname.replace(/^.*\//, '');
|
||||
// we do NOT overwrite any existing records
|
||||
if (_.isNil(packageLocalJson._distfiles[filename])) {
|
||||
let hash = packageLocalJson._distfiles[filename] = {
|
||||
url: version.dist.tarball,
|
||||
sha: version.dist.shasum,
|
||||
};
|
||||
// if (verdata[Symbol('_verdaccio_uplink')]) {
|
||||
if (version._verdaccio_uplink) {
|
||||
// if we got this information from a known registry,
|
||||
// use the same protocol for the tarball
|
||||
//
|
||||
// see https://github.com/rlidwka/sinopia/issues/166
|
||||
const tarballUrl = URL.parse(hash.url);
|
||||
const uplinkUrl = URL.parse(this.config.uplinks[version._verdaccio_uplink].url);
|
||||
if (uplinkUrl.host === tarballUrl.host) {
|
||||
tarballUrl.protocol = uplinkUrl.protocol;
|
||||
hash.registry = version._verdaccio_uplink;
|
||||
hash.url = URL.format(tarballUrl);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (let tag in packageInfo['dist-tags']) {
|
||||
if (!packageLocalJson['dist-tags'][tag] || packageLocalJson['dist-tags'][tag] !== packageInfo['dist-tags'][tag]) {
|
||||
change = true;
|
||||
packageLocalJson['dist-tags'][tag] = packageInfo['dist-tags'][tag];
|
||||
}
|
||||
}
|
||||
for (let up in packageInfo._uplinks) {
|
||||
if (Object.prototype.hasOwnProperty.call(packageInfo._uplinks, up)) {
|
||||
const need_change = !Utils.is_object(packageLocalJson._uplinks[up])
|
||||
|| packageInfo._uplinks[up].etag !== packageLocalJson._uplinks[up].etag
|
||||
|| packageInfo._uplinks[up].fetched !== packageLocalJson._uplinks[up].fetched;
|
||||
|
||||
if (need_change) {
|
||||
change = true;
|
||||
packageLocalJson._uplinks[up] = packageInfo._uplinks[up];
|
||||
}
|
||||
}
|
||||
}
|
||||
if (packageInfo.readme !== packageLocalJson.readme) {
|
||||
packageLocalJson.readme = packageInfo.readme;
|
||||
change = true;
|
||||
}
|
||||
|
||||
if ('time' in packageInfo) {
|
||||
packageLocalJson.time = packageInfo.time;
|
||||
change = true;
|
||||
}
|
||||
|
||||
if (change) {
|
||||
this.logger.debug('updating package info');
|
||||
this._writePackage(name, packageLocalJson, function(err) {
|
||||
callback(err, packageLocalJson);
|
||||
});
|
||||
} else {
|
||||
callback(null, packageLocalJson);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new version to a previous local package.
|
||||
* @param {*} name
|
||||
* @param {*} version
|
||||
* @param {*} metadata
|
||||
* @param {*} tag
|
||||
* @param {*} callback
|
||||
*/
|
||||
addVersion(name, version, metadata, tag, callback) {
|
||||
this._updatePackage(name, (data, cb) => {
|
||||
// keep only one readme per package
|
||||
data.readme = metadata.readme;
|
||||
delete metadata.readme;
|
||||
|
||||
if (data.versions[version] != null) {
|
||||
return cb( createError[409]('this version already present') );
|
||||
}
|
||||
|
||||
// if uploaded tarball has a different shasum, it's very likely that we have some kind of error
|
||||
if (Utils.is_object(metadata.dist) && typeof(metadata.dist.tarball) === 'string') {
|
||||
let tarball = metadata.dist.tarball.replace(/.*\//, '');
|
||||
if (Utils.is_object(data._attachments[tarball])) {
|
||||
if (data._attachments[tarball].shasum != null && metadata.dist.shasum != null) {
|
||||
if (data._attachments[tarball].shasum != metadata.dist.shasum) {
|
||||
return cb( createError[400]('shasum error, '
|
||||
+ data._attachments[tarball].shasum
|
||||
+ ' != ' + metadata.dist.shasum) );
|
||||
}
|
||||
}
|
||||
let currentDate = new Date().toISOString();
|
||||
data.time['modified'] = currentDate;
|
||||
if (('created' in data.time) === false) {
|
||||
data.time.created = currentDate;
|
||||
}
|
||||
data.time[version] = currentDate;
|
||||
data._attachments[tarball].version = version;
|
||||
}
|
||||
}
|
||||
|
||||
data.versions[version] = metadata;
|
||||
Utils.tag_version(data, version, tag);
|
||||
this.localList.add(name);
|
||||
cb();
|
||||
}, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge a new list of tags for a local packages with the existing one.
|
||||
* @param {*} name
|
||||
* @param {*} tags
|
||||
* @param {*} callback
|
||||
*/
|
||||
mergeTags(name, tags, callback) {
|
||||
this._updatePackage(name, function updater(data, cb) {
|
||||
for (let t in tags) {
|
||||
if (tags[t] === null) {
|
||||
delete data['dist-tags'][t];
|
||||
continue;
|
||||
}
|
||||
// be careful here with == (cast)
|
||||
if (_.isNil(data.versions[tags[t]])) {
|
||||
return cb( createError[404]('this version doesn\'t exist') );
|
||||
}
|
||||
|
||||
Utils.tag_version(data, tags[t], t);
|
||||
}
|
||||
cb();
|
||||
}, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Replace the complete list of tags for a local package.
|
||||
* @param {*} name
|
||||
* @param {*} tags
|
||||
* @param {*} callback
|
||||
*/
|
||||
replaceTags(name, tags, callback) {
|
||||
this._updatePackage(name, function updater(data, cb) {
|
||||
data['dist-tags'] = {};
|
||||
|
||||
for (let t in tags) {
|
||||
if (_.isNull(tags[t])) {
|
||||
delete data['dist-tags'][t];
|
||||
continue;
|
||||
}
|
||||
|
||||
if (_.isNil(data.versions[tags[t]])) {
|
||||
return cb( createError[404]('this version doesn\'t exist') );
|
||||
}
|
||||
|
||||
Utils.tag_version(data, tags[t], t);
|
||||
}
|
||||
cb();
|
||||
}, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the package metadata, tags and attachments (tarballs).
|
||||
* Note: Currently supports unpublishing only.
|
||||
* @param {*} name
|
||||
* @param {*} metadata
|
||||
* @param {*} revision
|
||||
* @param {*} callback
|
||||
* @return {Function}
|
||||
*/
|
||||
changePackage(name, metadata, revision, callback) {
|
||||
if (!Utils.is_object(metadata.versions) || !Utils.is_object(metadata['dist-tags'])) {
|
||||
return callback( createError[422]('bad data') );
|
||||
}
|
||||
|
||||
this._updatePackage(name, (data, cb) => {
|
||||
for (let ver in data.versions) {
|
||||
if (_.isNil(metadata.versions[ver])) {
|
||||
this.logger.info( {name: name, version: ver},
|
||||
'unpublishing @{name}@@{version}');
|
||||
delete data.versions[ver];
|
||||
for (let file in data._attachments) {
|
||||
if (data._attachments[file].version === ver) {
|
||||
delete data._attachments[file].version;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
data['dist-tags'] = metadata['dist-tags'];
|
||||
cb();
|
||||
}, function(err) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
callback();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a tarball.
|
||||
* @param {*} name
|
||||
* @param {*} filename
|
||||
* @param {*} revision
|
||||
* @param {*} callback
|
||||
*/
|
||||
removeTarball(name, filename, revision, callback) {
|
||||
assert(Utils.validate_name(filename));
|
||||
|
||||
this._updatePackage(name, (data, cb) => {
|
||||
if (data._attachments[filename]) {
|
||||
delete data._attachments[filename];
|
||||
cb();
|
||||
} else {
|
||||
cb(createError[404]('no such file available'));
|
||||
}
|
||||
}, (err) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
let storage = this.storage(name);
|
||||
if (storage) {
|
||||
storage.unlink(filename, callback);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a tarball.
|
||||
* @param {String} name
|
||||
* @param {String} filename
|
||||
* @return {Stream}
|
||||
*/
|
||||
addTarball(name, filename) {
|
||||
assert(Utils.validate_name(filename));
|
||||
|
||||
let length = 0;
|
||||
const shaOneHash = Crypto.createHash('sha1');
|
||||
const uploadStream = new customStream.UploadTarball();
|
||||
const _transform = uploadStream._transform;
|
||||
const storage = this.storage(name);
|
||||
uploadStream.abort = function() {};
|
||||
uploadStream.done = function() {};
|
||||
|
||||
uploadStream._transform = function(data) {
|
||||
shaOneHash.update(data);
|
||||
// measure the length for validation reasons
|
||||
length += data.length;
|
||||
_transform.apply(uploadStream, arguments);
|
||||
};
|
||||
|
||||
if (name === pkgFileName || name === '__proto__') {
|
||||
process.nextTick(function() {
|
||||
uploadStream.emit('error', createError[403]('can\'t use this filename'));
|
||||
});
|
||||
return uploadStream;
|
||||
}
|
||||
|
||||
if (!storage) {
|
||||
process.nextTick(function() {
|
||||
uploadStream.emit('error', createError[404]('can\'t upload this package'));
|
||||
});
|
||||
return uploadStream;
|
||||
}
|
||||
|
||||
const writeStream = storage.createWriteStream(filename);
|
||||
|
||||
writeStream.on('error', (err) => {
|
||||
if (err.code === fileExist) {
|
||||
uploadStream.emit('error', createError[409]('this tarball is already present'));
|
||||
} else if (err.code === noSuchFile) {
|
||||
// check if package exists to throw an appropriate message
|
||||
this.getPackage(name, function(_err, res) {
|
||||
if (_err) {
|
||||
uploadStream.emit('error', _err);
|
||||
} else {
|
||||
uploadStream.emit('error', err);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
uploadStream.emit('error', err);
|
||||
}
|
||||
});
|
||||
|
||||
writeStream.on('open', function() {
|
||||
// re-emitting open because it's handled in storage.js
|
||||
uploadStream.emit('open');
|
||||
});
|
||||
|
||||
writeStream.on('success', () => {
|
||||
this._updatePackage(name, function updater(data, cb) {
|
||||
data._attachments[filename] = {
|
||||
shasum: shaOneHash.digest('hex'),
|
||||
};
|
||||
cb();
|
||||
}, function(err) {
|
||||
if (err) {
|
||||
uploadStream.emit('error', err);
|
||||
} else {
|
||||
uploadStream.emit('success');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
uploadStream.abort = function() {
|
||||
writeStream.abort();
|
||||
};
|
||||
|
||||
uploadStream.done = function() {
|
||||
if (!length) {
|
||||
uploadStream.emit('error', createError[422]('refusing to accept zero-length file'));
|
||||
writeStream.abort();
|
||||
} else {
|
||||
writeStream.done();
|
||||
}
|
||||
};
|
||||
|
||||
uploadStream.pipe(writeStream);
|
||||
|
||||
return uploadStream;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a tarball.
|
||||
* @param {*} name
|
||||
* @param {*} filename
|
||||
* @param {*} callback
|
||||
* @return {Function}
|
||||
*/
|
||||
getTarball(name, filename, callback) {
|
||||
assert(Utils.validate_name(filename));
|
||||
const stream = new customStream.ReadTarball();
|
||||
stream.abort = function() {
|
||||
if (rstream) {
|
||||
rstream.abort();
|
||||
}
|
||||
};
|
||||
|
||||
let storage = this.storage(name);
|
||||
if (!storage) {
|
||||
process.nextTick(function() {
|
||||
stream.emit('error', createError[404]('no such file available'));
|
||||
});
|
||||
return stream;
|
||||
}
|
||||
/* eslint no-var: "off" */
|
||||
var rstream = storage.createReadStream(filename);
|
||||
rstream.on('error', function(err) {
|
||||
if (err && err.code === noSuchFile) {
|
||||
stream.emit('error', createError(404, 'no such file available'));
|
||||
} else {
|
||||
stream.emit('error', err);
|
||||
}
|
||||
});
|
||||
rstream.on('content-length', function(v) {
|
||||
stream.emit('content-length', v);
|
||||
});
|
||||
rstream.on('open', function() {
|
||||
// re-emitting open because it's handled in storage.js
|
||||
stream.emit('open');
|
||||
rstream.pipe(stream);
|
||||
});
|
||||
return stream;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve a package by name.
|
||||
* @param {*} name
|
||||
* @param {*} options
|
||||
* @param {*} callback
|
||||
* @return {Function}
|
||||
*/
|
||||
getPackage(name, options, callback) {
|
||||
if (_.isFunction(options)) {
|
||||
callback = options || {};
|
||||
}
|
||||
|
||||
let storage = this.storage(name);
|
||||
if (!storage) {
|
||||
return callback( createError[404]('no such package available') );
|
||||
}
|
||||
|
||||
storage.readJSON(pkgFileName, (err, result) => {
|
||||
if (err) {
|
||||
if (err.code === noSuchFile) {
|
||||
return callback( createError[404]('no such package available') );
|
||||
} else {
|
||||
return callback(this._internalError(err, pkgFileName, 'error reading'));
|
||||
}
|
||||
}
|
||||
this._normalizePackage(result);
|
||||
callback(err, result);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Search a local package.
|
||||
* @param {*} startKey
|
||||
* @param {*} options
|
||||
* @return {Function}
|
||||
*/
|
||||
search(startKey, options) {
|
||||
const stream = new Stream.PassThrough({objectMode: true});
|
||||
|
||||
this._eachPackage((item, cb) => {
|
||||
fs.stat(item.path, (err, stats) => {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
if (stats.mtime > startKey) {
|
||||
this.getPackage(item.name, options, function(err, data) {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
const versions = Utils.semver_sort(Object.keys(data.versions));
|
||||
const latest = data['dist-tags'] && data['dist-tags'].latest ? data['dist-tags'].latest : versions.pop();
|
||||
|
||||
if (data.versions[latest]) {
|
||||
const version = data.versions[latest];
|
||||
stream.push({
|
||||
'name': version.name,
|
||||
'description': version.description,
|
||||
'dist-tags': {latest: latest},
|
||||
'maintainers': version.maintainers || [version.author].filter(Boolean),
|
||||
'author': version.author,
|
||||
'repository': version.repository,
|
||||
'readmeFilename': version.readmeFilename || '',
|
||||
'homepage': version.homepage,
|
||||
'keywords': version.keywords,
|
||||
'bugs': version.bugs,
|
||||
'license': version.license,
|
||||
'time': {
|
||||
modified: item.time ? new Date(item.time).toISOString() : undefined,
|
||||
},
|
||||
'versions': {},
|
||||
});
|
||||
}
|
||||
|
||||
cb();
|
||||
});
|
||||
} else {
|
||||
cb();
|
||||
}
|
||||
});
|
||||
}, function on_end(err) {
|
||||
if (err) return stream.emit('error', err);
|
||||
stream.end();
|
||||
});
|
||||
|
||||
return stream;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve a wrapper that provide access to the package location.
|
||||
* @param {*} pkg package name.
|
||||
* @return {Object}
|
||||
*/
|
||||
storage(pkg) {
|
||||
let path = this.config.getMatchedPackagesSpec(pkg).storage;
|
||||
if (_.isNil(path)) {
|
||||
path = this.config.storage;
|
||||
}
|
||||
if (_.isNil(path) || path === false) {
|
||||
this.logger.debug( {name: pkg}, 'this package has no storage defined: @{name}' );
|
||||
return null;
|
||||
}
|
||||
return new PathWrapper(
|
||||
Path.join(
|
||||
Path.resolve(Path.dirname(this.config.self_path || ''), path),
|
||||
pkg
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Walks through each package and calls `on_package` on them.
|
||||
* @param {*} onPackage
|
||||
* @param {*} on_end
|
||||
*/
|
||||
_eachPackage(onPackage, on_end) {
|
||||
let storages = {};
|
||||
|
||||
storages[this.config.storage] = true;
|
||||
if (this.config.packages) {
|
||||
Object.keys(this.config.packages || {}).map( (pkg) => {
|
||||
if (this.config.packages[pkg].storage) {
|
||||
storages[this.config.packages[pkg].storage] = true;
|
||||
}
|
||||
});
|
||||
}
|
||||
const base = Path.dirname(this.config.self_path);
|
||||
|
||||
async.eachSeries(Object.keys(storages), function(storage, cb) {
|
||||
fs.readdir(Path.resolve(base, storage), function(err, files) {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
async.eachSeries(files, function(file, cb) {
|
||||
if (file.match(/^@/)) {
|
||||
// scoped
|
||||
fs.readdir(Path.resolve(base, storage, file), function(err, files) {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
async.eachSeries(files, function(file2, cb) {
|
||||
if (Utils.validate_name(file2)) {
|
||||
onPackage({
|
||||
name: `${file}/${file2}`,
|
||||
path: Path.resolve(base, storage, file, file2),
|
||||
}, cb);
|
||||
} else {
|
||||
cb();
|
||||
}
|
||||
}, cb);
|
||||
});
|
||||
} else if (Utils.validate_name(file)) {
|
||||
onPackage({
|
||||
name: file,
|
||||
path: Path.resolve(base, storage, file),
|
||||
}, cb);
|
||||
} else {
|
||||
cb();
|
||||
}
|
||||
}, cb);
|
||||
});
|
||||
}, on_end);
|
||||
|
||||
// Object.keys(storages).reduce(() => {
|
||||
//
|
||||
// }, Promise.resolve());
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalise package properties, tags, revision id.
|
||||
* @param {Object} pkg package reference.
|
||||
*/
|
||||
_normalizePackage(pkg) {
|
||||
['versions', 'dist-tags', '_distfiles', '_attachments', '_uplinks', 'time'].forEach(function(key) {
|
||||
if (!Utils.is_object(pkg[key])) {
|
||||
pkg[key] = {};
|
||||
}
|
||||
});
|
||||
if (typeof(pkg._rev) !== 'string') {
|
||||
pkg._rev = '0-0000000000000000';
|
||||
}
|
||||
// normalize dist-tags
|
||||
Utils.normalize_dist_tags(pkg);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve either a previous created local package or a boilerplate.
|
||||
* @param {*} name
|
||||
* @param {*} callback
|
||||
* @return {Function}
|
||||
*/
|
||||
_readCreatePackage(name, callback) {
|
||||
const storage = this.storage(name);
|
||||
if (!storage) {
|
||||
const data = generatePackageTemplate(name);
|
||||
this._normalizePackage(data);
|
||||
return callback(null, data);
|
||||
}
|
||||
storage.readJSON(pkgFileName, (err, data) => {
|
||||
// TODO: race condition
|
||||
if (err) {
|
||||
if (err.code === noSuchFile) {
|
||||
// if package doesn't exist, we create it here
|
||||
data = generatePackageTemplate(name);
|
||||
} else {
|
||||
return callback(this._internalError(err, pkgFileName, 'error reading'));
|
||||
}
|
||||
}
|
||||
this._normalizePackage(data);
|
||||
callback(null, data);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle internal error
|
||||
* @param {*} err
|
||||
* @param {*} file
|
||||
* @param {*} message
|
||||
* @return {Object} Error instance
|
||||
*/
|
||||
_internalError(err, file, message) {
|
||||
this.logger.error( {err: err, file: file},
|
||||
message + ' @{file}: @{!err.message}' );
|
||||
return createError[500]();
|
||||
}
|
||||
|
||||
/**
|
||||
* This function allows to update the package thread-safely
|
||||
Algorithm:
|
||||
1. lock package.json for writing
|
||||
2. read package.json
|
||||
3. updateFn(pkg, cb), and wait for cb
|
||||
4. write package.json.tmp
|
||||
5. move package.json.tmp package.json
|
||||
6. callback(err?)
|
||||
* @param {*} name package name
|
||||
* @param {*} updateFn function(package, cb) - update function
|
||||
* @param {*} _callback callback that gets invoked after it's all updated
|
||||
* @return {Function}
|
||||
*/
|
||||
_updatePackage(name, updateFn, _callback) {
|
||||
const storage = this.storage(name);
|
||||
if (!storage) {
|
||||
return _callback( createError[404]('no such package available') );
|
||||
}
|
||||
storage.lockAndReadJSON(pkgFileName, (err, json) => {
|
||||
let locked = false;
|
||||
|
||||
// callback that cleans up lock first
|
||||
const callback = function(err) {
|
||||
let _args = arguments;
|
||||
if (locked) {
|
||||
storage.unlock_file(pkgFileName, function() {
|
||||
// ignore any error from the unlock
|
||||
_callback.apply(err, _args);
|
||||
});
|
||||
} else {
|
||||
_callback.apply(null, _args);
|
||||
}
|
||||
};
|
||||
|
||||
if (!err) {
|
||||
locked = true;
|
||||
}
|
||||
|
||||
if (err) {
|
||||
if (err.code === resourceNotAvailable) {
|
||||
return callback( createError[503]('resource temporarily unavailable') );
|
||||
} else if (err.code === noSuchFile) {
|
||||
return callback( createError[404]('no such package available') );
|
||||
} else {
|
||||
return callback(err);
|
||||
}
|
||||
}
|
||||
|
||||
this._normalizePackage(json);
|
||||
updateFn(json, (err) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
this._writePackage(name, json, callback);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the revision (_rev) string for a package.
|
||||
* @param {*} name
|
||||
* @param {*} json
|
||||
* @param {*} callback
|
||||
* @return {Function}
|
||||
*/
|
||||
_writePackage(name, json, callback) {
|
||||
// calculate revision a la couchdb
|
||||
if (typeof(json._rev) !== 'string') {
|
||||
json._rev = '0-0000000000000000';
|
||||
}
|
||||
const rev = json._rev.split('-');
|
||||
json._rev = ((+rev[0] || 0) + 1) + '-' + Crypto.pseudoRandomBytes(8).toString('hex');
|
||||
|
||||
let storage = this.storage(name);
|
||||
if (!storage) {
|
||||
return callback();
|
||||
}
|
||||
storage.writeJSON(pkgFileName, json, callback);
|
||||
}
|
||||
}
|
||||
|
||||
const PathWrapper = (function() {
|
||||
/**
|
||||
* A wrapper adding paths to fs_storage methods.
|
||||
*/
|
||||
class Wrapper {
|
||||
|
||||
/**
|
||||
* @param {*} path
|
||||
*/
|
||||
constructor(path) {
|
||||
this.path = path;
|
||||
}
|
||||
}
|
||||
|
||||
const wrapLocalStorageMethods = function(method) {
|
||||
return function() {
|
||||
let args = Array.prototype.slice.apply(arguments);
|
||||
/* eslint no-invalid-this: off */
|
||||
args[0] = Path.join(this.path, args[0] || '');
|
||||
return fsStorage[method].apply(null, args);
|
||||
};
|
||||
};
|
||||
|
||||
for (let i in fsStorage) {
|
||||
if (fsStorage.hasOwnProperty(i)) {
|
||||
Wrapper.prototype[i] = wrapLocalStorageMethods(i);
|
||||
}
|
||||
}
|
||||
|
||||
return Wrapper;
|
||||
})();
|
||||
|
||||
module.exports = LocalStorage;
|
||||
68
lib/storage/streams.js
Normal file
68
lib/storage/streams.js
Normal file
@@ -0,0 +1,68 @@
|
||||
'use strict';
|
||||
|
||||
const Stream = require('stream');
|
||||
|
||||
/**
|
||||
* This stream is used to read tarballs from repository.
|
||||
* @param {*} options
|
||||
* @return {Stream}
|
||||
*/
|
||||
class ReadTarball extends Stream.PassThrough {
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Object} options
|
||||
*/
|
||||
constructor(options) {
|
||||
super(options);
|
||||
// called when data is not needed anymore
|
||||
add_abstract_method(this, 'abort');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This stream is used to upload tarballs to a repository.
|
||||
* @param {*} options
|
||||
* @return {Stream}
|
||||
*/
|
||||
class UploadTarball extends Stream.PassThrough {
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Object} options
|
||||
*/
|
||||
constructor(options) {
|
||||
super(options);
|
||||
// called when user closes connection before upload finishes
|
||||
add_abstract_method(this, 'abort');
|
||||
|
||||
// called when upload finishes successfully
|
||||
add_abstract_method(this, 'done');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This function intercepts abstract calls and replays them allowing.
|
||||
* us to attach those functions after we are ready to do so
|
||||
* @param {*} self
|
||||
* @param {*} name
|
||||
*/
|
||||
function add_abstract_method(self, name) {
|
||||
self._called_methods = self._called_methods || {};
|
||||
self.__defineGetter__(name, function() {
|
||||
return function() {
|
||||
self._called_methods[name] = true;
|
||||
};
|
||||
});
|
||||
self.__defineSetter__(name, function(fn) {
|
||||
delete self[name];
|
||||
self[name] = fn;
|
||||
if (self._called_methods && self._called_methods[name]) {
|
||||
delete self._called_methods[name];
|
||||
self[name]();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
module.exports.ReadTarball = ReadTarball;
|
||||
module.exports.UploadTarball = UploadTarball;
|
||||
480
lib/storage/up-storage.js
Normal file
480
lib/storage/up-storage.js
Normal file
@@ -0,0 +1,480 @@
|
||||
'use strict';
|
||||
|
||||
const JSONStream = require('JSONStream');
|
||||
const createError = require('http-errors');
|
||||
const _ = require('lodash');
|
||||
const request = require('request');
|
||||
const Stream = require('stream');
|
||||
const URL = require('url');
|
||||
const Logger = require('../logger');
|
||||
const MyStreams = require('./streams');
|
||||
const Utils = require('../utils');
|
||||
const zlib = require('zlib');
|
||||
const encode = function(thing) {
|
||||
return encodeURIComponent(thing).replace(/^%40/, '@');
|
||||
};
|
||||
/**
|
||||
* Just a helper (`config[key] || default` doesn't work because of zeroes)
|
||||
* @param {Object} config
|
||||
* @param {Object} key
|
||||
* @param {Object} def
|
||||
* @return {String}
|
||||
*/
|
||||
const setConfig = (config, key, def) => {
|
||||
return _.isNil(config[key]) === false ? config[key] : def;
|
||||
};
|
||||
|
||||
/**
|
||||
* Implements Storage interface
|
||||
* (same for storage.js, local-storage.js, up-storage.js)
|
||||
*/
|
||||
class ProxyStorage {
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param {*} config
|
||||
* @param {*} mainConfig
|
||||
*/
|
||||
constructor(config, mainConfig) {
|
||||
this.config = config;
|
||||
this.failed_requests = 0;
|
||||
this.userAgent = mainConfig.user_agent;
|
||||
this.ca = config.ca;
|
||||
this.logger = Logger.logger.child({sub: 'out'});
|
||||
this.server_id = mainConfig.server_id;
|
||||
|
||||
this.url = URL.parse(this.config.url);
|
||||
|
||||
this._setupProxy(this.url.hostname, config, mainConfig, this.url.protocol === 'https:');
|
||||
|
||||
this.config.url = this.config.url.replace(/\/$/, '');
|
||||
|
||||
if (Number(this.config.timeout) >= 1000) {
|
||||
this.logger.warn(['Too big timeout value: ' + this.config.timeout,
|
||||
'We changed time format to nginx-like one',
|
||||
'(see http://nginx.org/en/docs/syntax.html)',
|
||||
'so please update your config accordingly'].join('\n'));
|
||||
}
|
||||
|
||||
// a bunch of different configurable timers
|
||||
this.maxage = Utils.parseInterval(setConfig(this.config, 'maxage', '2m' ));
|
||||
this.timeout = Utils.parseInterval(setConfig(this.config, 'timeout', '30s'));
|
||||
this.max_fails = Number(setConfig(this.config, 'max_fails', 2 ));
|
||||
this.fail_timeout = Utils.parseInterval(setConfig(this.config, 'fail_timeout', '5m' ));
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch an asset.
|
||||
* @param {*} options
|
||||
* @param {*} cb
|
||||
* @return {Request}
|
||||
*/
|
||||
request(options, cb) {
|
||||
let json;
|
||||
if (this._statusCheck() === false) {
|
||||
let streamRead = new Stream.Readable();
|
||||
process.nextTick(function() {
|
||||
if (_.isFunction(cb)) {
|
||||
cb(createError('uplink is offline'));
|
||||
}
|
||||
streamRead.emit('error', createError('uplink is offline'));
|
||||
});
|
||||
streamRead._read = function() {};
|
||||
// preventing 'Uncaught, unspecified "error" event'
|
||||
streamRead.on('error', function() {});
|
||||
return streamRead;
|
||||
}
|
||||
|
||||
let self = this;
|
||||
let headers = options.headers || {};
|
||||
headers['Accept'] = headers['Accept'] || 'application/json';
|
||||
headers['Accept-Encoding'] = headers['Accept-Encoding'] || 'gzip';
|
||||
// registry.npmjs.org will only return search result if user-agent include string 'npm'
|
||||
headers['User-Agent'] = headers['User-Agent'] || `npm (${this.userAgent})`;
|
||||
this._addProxyHeaders(options.req, headers);
|
||||
|
||||
// add/override headers specified in the config
|
||||
for (let key in this.config.headers) {
|
||||
if (Object.prototype.hasOwnProperty.call(this.config.headers, key)) {
|
||||
headers[key] = this.config.headers[key];
|
||||
}
|
||||
}
|
||||
|
||||
const method = options.method || 'GET';
|
||||
const uri = options.uri_full || (this.config.url + options.uri);
|
||||
|
||||
self.logger.info({
|
||||
method: method,
|
||||
headers: headers,
|
||||
uri: uri,
|
||||
}, 'making request: \'@{method} @{uri}\'');
|
||||
|
||||
if (Utils.is_object(options.json)) {
|
||||
json = JSON.stringify(options.json);
|
||||
headers['Content-Type'] = headers['Content-Type'] || 'application/json';
|
||||
}
|
||||
|
||||
let requestCallback = cb ? (function(err, res, body) {
|
||||
let error;
|
||||
const responseLength = err ? 0 : body.length;
|
||||
|
||||
processBody(err, body);
|
||||
logActivity();
|
||||
cb(err, res, body);
|
||||
|
||||
/**
|
||||
* Perform a decode.
|
||||
*/
|
||||
function processBody() {
|
||||
if (err) {
|
||||
error = err.message;
|
||||
return;
|
||||
}
|
||||
|
||||
if (options.json && res.statusCode < 300) {
|
||||
try {
|
||||
body = JSON.parse(body.toString('utf8'));
|
||||
} catch(_err) {
|
||||
body = {};
|
||||
err = _err;
|
||||
error = err.message;
|
||||
}
|
||||
}
|
||||
|
||||
if (!err && Utils.is_object(body)) {
|
||||
if (_.isString(body.error)) {
|
||||
error = body.error;
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Perform a log.
|
||||
*/
|
||||
function logActivity() {
|
||||
let message = '@{!status}, req: \'@{request.method} @{request.url}\'';
|
||||
message += error
|
||||
? ', error: @{!error}'
|
||||
: ', bytes: @{bytes.in}/@{bytes.out}';
|
||||
self.logger.warn({
|
||||
err: err,
|
||||
request: {method: method, url: uri},
|
||||
level: 35, // http
|
||||
status: res != null ? res.statusCode : 'ERR',
|
||||
error: error,
|
||||
bytes: {
|
||||
in: json ? json.length : 0,
|
||||
out: responseLength || 0,
|
||||
},
|
||||
}, message);
|
||||
}
|
||||
}) : undefined;
|
||||
|
||||
const req = request({
|
||||
url: uri,
|
||||
method: method,
|
||||
headers: headers,
|
||||
body: json,
|
||||
ca: this.ca,
|
||||
proxy: this.proxy,
|
||||
encoding: null,
|
||||
gzip: true,
|
||||
timeout: this.timeout,
|
||||
}, requestCallback);
|
||||
|
||||
let statusCalled = false;
|
||||
req.on('response', function(res) {
|
||||
if (!req._verdaccio_aborted && _.isNil(statusCalled) === false) {
|
||||
statusCalled = true;
|
||||
self._statusCheck(true);
|
||||
}
|
||||
|
||||
if (_.isNil(requestCallback) === false) {
|
||||
(function do_log() {
|
||||
const message = '@{!status}, req: \'@{request.method} @{request.url}\' (streaming)';
|
||||
self.logger.warn({
|
||||
request: {
|
||||
method: method,
|
||||
url: uri,
|
||||
},
|
||||
level: 35, // http
|
||||
status: _.isNull(res) === false ? res.statusCode : 'ERR',
|
||||
}, message);
|
||||
})();
|
||||
}
|
||||
});
|
||||
req.on('error', function(_err) {
|
||||
if (!req._verdaccio_aborted && !statusCalled) {
|
||||
statusCalled = true;
|
||||
self._statusCheck(false);
|
||||
}
|
||||
});
|
||||
return req;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine whether can fetch from the provided URL.
|
||||
* @param {*} url
|
||||
* @return {Boolean}
|
||||
*/
|
||||
isUplinkValid(url) {
|
||||
url = URL.parse(url);
|
||||
return url.protocol === this.url.protocol && url.host === this.url.host && url.path.indexOf(this.url.path) === 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a remote package.
|
||||
* @param {*} name
|
||||
* @param {*} options
|
||||
* @param {*} callback
|
||||
*/
|
||||
getRemotePackage(name, options, callback) {
|
||||
const headers = {};
|
||||
if (_.isNil(options.etag) === false) {
|
||||
headers['If-None-Match'] = options.etag;
|
||||
headers['Accept'] = 'application/octet-stream';
|
||||
}
|
||||
|
||||
this.request({
|
||||
uri: `/${encode(name)}`,
|
||||
json: true,
|
||||
headers: headers,
|
||||
req: options.req,
|
||||
}, (err, res, body) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
if (res.statusCode === 404) {
|
||||
return callback( createError[404]('package doesn\'t exist on uplink') );
|
||||
}
|
||||
if (!(res.statusCode >= 200 && res.statusCode < 300)) {
|
||||
const error = createError(`bad status code: ${res.statusCode}`);
|
||||
error.remoteStatus = res.statusCode;
|
||||
return callback(error);
|
||||
}
|
||||
callback(null, body, res.headers.etag);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an url.
|
||||
* @param {String} url
|
||||
* @return {Stream}
|
||||
*/
|
||||
get_url(url) {
|
||||
const stream = new MyStreams.ReadTarball({});
|
||||
stream.abort = function() {};
|
||||
let current_length = 0;
|
||||
let expected_length;
|
||||
let readStream = this.request({
|
||||
uri_full: url,
|
||||
encoding: null,
|
||||
headers: {Accept: 'application/octet-stream'},
|
||||
});
|
||||
|
||||
readStream.on('response', function(res) {
|
||||
if (res.statusCode === 404) {
|
||||
return stream.emit('error', createError[404]('file doesn\'t exist on uplink'));
|
||||
}
|
||||
if (!(res.statusCode >= 200 && res.statusCode < 300)) {
|
||||
return stream.emit('error', createError('bad uplink status code: ' + res.statusCode));
|
||||
}
|
||||
if (res.headers['content-length']) {
|
||||
expected_length = res.headers['content-length'];
|
||||
stream.emit('content-length', res.headers['content-length']);
|
||||
}
|
||||
|
||||
readStream.pipe(stream);
|
||||
});
|
||||
|
||||
readStream.on('error', function(err) {
|
||||
stream.emit('error', err);
|
||||
});
|
||||
readStream.on('data', function(data) {
|
||||
current_length += data.length;
|
||||
});
|
||||
readStream.on('end', function(data) {
|
||||
if (data) {
|
||||
current_length += data.length;
|
||||
}
|
||||
if (expected_length && current_length != expected_length)
|
||||
stream.emit('error', createError('content length mismatch'));
|
||||
});
|
||||
return stream;
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a stream search.
|
||||
* @param {*} options request options
|
||||
* @return {Stream}
|
||||
*/
|
||||
search(options) {
|
||||
const transformStream = new Stream.PassThrough({objectMode: true});
|
||||
const requestStream = this.request({
|
||||
uri: options.req.url,
|
||||
req: options.req,
|
||||
headers: {
|
||||
referer: options.req.headers.referer,
|
||||
},
|
||||
});
|
||||
|
||||
let parsePackage = (pkg) => {
|
||||
if (Utils.is_object(pkg)) {
|
||||
transformStream.emit('data', pkg);
|
||||
}
|
||||
};
|
||||
|
||||
requestStream.on('response', (res) => {
|
||||
if (!String(res.statusCode).match(/^2\d\d$/)) {
|
||||
return transformStream.emit('error', createError(`bad status code ${res.statusCode} from uplink`));
|
||||
}
|
||||
|
||||
// See https://github.com/request/request#requestoptions-callback
|
||||
// Request library will not decode gzip stream.
|
||||
let jsonStream;
|
||||
if (res.headers['content-encoding'] === 'gzip') {
|
||||
jsonStream = res.pipe(zlib.createUnzip());
|
||||
} else {
|
||||
jsonStream = res;
|
||||
}
|
||||
jsonStream.pipe(JSONStream.parse('*')).on('data', parsePackage);
|
||||
jsonStream.on('end', () => {
|
||||
transformStream.emit('end');
|
||||
});
|
||||
});
|
||||
|
||||
requestStream.on('error', (err) => {
|
||||
transformStream.emit('error', err);
|
||||
});
|
||||
|
||||
transformStream.abort = () => {
|
||||
requestStream.abort();
|
||||
transformStream.emit('end');
|
||||
};
|
||||
|
||||
return transformStream;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add proxy headers.
|
||||
* @param {*} req the http request
|
||||
* @param {*} headers the request headers
|
||||
*/
|
||||
_addProxyHeaders(req, headers) {
|
||||
if (req) {
|
||||
// Only submit X-Forwarded-For field if we don't have a proxy selected
|
||||
// in the config file.
|
||||
//
|
||||
// Otherwise misconfigured proxy could return 407:
|
||||
// https://github.com/rlidwka/sinopia/issues/254
|
||||
//
|
||||
if (this.proxy === false) {
|
||||
headers['X-Forwarded-For'] = (
|
||||
req && req.headers['x-forwarded-for']
|
||||
? req.headers['x-forwarded-for'] + ', '
|
||||
: ''
|
||||
) + req.connection.remoteAddress;
|
||||
}
|
||||
}
|
||||
|
||||
// always attach Via header to avoid loops, even if we're not proxying
|
||||
headers['Via'] =
|
||||
req && req.headers['via']
|
||||
? req.headers['via'] + ', '
|
||||
: '';
|
||||
|
||||
headers['Via'] += '1.1 ' + this.server_id + ' (Verdaccio)';
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the remote host is available.
|
||||
* @param {*} alive
|
||||
* @return {Boolean}
|
||||
*/
|
||||
_statusCheck(alive) {
|
||||
if (arguments.length === 0) {
|
||||
return this._ifRequestFailure() === false;
|
||||
} else {
|
||||
if (alive) {
|
||||
if (this.failed_requests >= this.max_fails) {
|
||||
this.logger.warn({
|
||||
host: this.url.host,
|
||||
}, 'host @{host} is back online');
|
||||
}
|
||||
this.failed_requests = 0;
|
||||
} else {
|
||||
this.failed_requests ++;
|
||||
if (this.failed_requests === this.max_fails) {
|
||||
this.logger.warn({
|
||||
host: this.url.host,
|
||||
}, 'host @{host} is now offline');
|
||||
}
|
||||
}
|
||||
this.last_request_time = Date.now();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* If the request failure.
|
||||
* @return {boolean}
|
||||
* @private
|
||||
*/
|
||||
_ifRequestFailure() {
|
||||
return this.failed_requests >= this.max_fails && Math.abs(Date.now() - this.last_request_time) < this.fail_timeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set up a proxy.
|
||||
* @param {*} hostname
|
||||
* @param {*} config
|
||||
* @param {*} mainconfig
|
||||
* @param {*} isHTTPS
|
||||
*/
|
||||
_setupProxy(hostname, config, mainconfig, isHTTPS) {
|
||||
let noProxyList;
|
||||
let proxy_key = isHTTPS ? 'https_proxy' : 'http_proxy';
|
||||
|
||||
// get http_proxy and no_proxy configs
|
||||
if (proxy_key in config) {
|
||||
this.proxy = config[proxy_key];
|
||||
} else if (proxy_key in mainconfig) {
|
||||
this.proxy = mainconfig[proxy_key];
|
||||
}
|
||||
if ('no_proxy' in config) {
|
||||
noProxyList = config.no_proxy;
|
||||
} else if ('no_proxy' in mainconfig) {
|
||||
noProxyList = mainconfig.no_proxy;
|
||||
}
|
||||
|
||||
// use wget-like algorithm to determine if proxy shouldn't be used
|
||||
if (hostname[0] !== '.') {
|
||||
hostname = '.' + hostname;
|
||||
}
|
||||
if (_.isString(noProxyList) && noProxyList.length) {
|
||||
noProxyList = noProxyList.split(',');
|
||||
}
|
||||
if (_.isArray(noProxyList)) {
|
||||
for (let i = 0; i < noProxyList.length; i++) {
|
||||
let noProxyItem = noProxyList[i];
|
||||
if (noProxyItem[0] !== '.') noProxyItem = '.' + noProxyItem;
|
||||
if (hostname.lastIndexOf(noProxyItem) === hostname.length - noProxyItem.length) {
|
||||
if (this.proxy) {
|
||||
this.logger.debug({url: this.url.href, rule: noProxyItem},
|
||||
'not using proxy for @{url}, excluded by @{rule} rule');
|
||||
this.proxy = false;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// if it's non-string (i.e. "false"), don't use it
|
||||
if (_.isString(this.proxy) === false) {
|
||||
delete this.proxy;
|
||||
} else {
|
||||
this.logger.debug( {url: this.url.href, proxy: this.proxy}, 'using proxy @{proxy} for @{url}' );
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = ProxyStorage;
|
||||
@@ -1,66 +0,0 @@
|
||||
var stream = require('stream')
|
||||
, util = require('util')
|
||||
|
||||
//
|
||||
// This stream is used to read tarballs from repository
|
||||
//
|
||||
function ReadTarball(options) {
|
||||
stream.PassThrough.call(this, options)
|
||||
|
||||
// called when data is not needed anymore
|
||||
add_abstract_method(this, 'abort')
|
||||
}
|
||||
|
||||
util.inherits(ReadTarball, stream.PassThrough)
|
||||
module.exports.ReadTarballStream = ReadTarball
|
||||
|
||||
//
|
||||
// This stream is used to upload tarballs to a repository
|
||||
//
|
||||
function UploadTarball(options) {
|
||||
stream.PassThrough.call(this, options)
|
||||
|
||||
// called when user closes connection before upload finishes
|
||||
add_abstract_method(this, 'abort')
|
||||
|
||||
// called when upload finishes successfully
|
||||
add_abstract_method(this, 'done')
|
||||
}
|
||||
|
||||
util.inherits(UploadTarball, stream.PassThrough)
|
||||
module.exports.UploadTarballStream = UploadTarball
|
||||
|
||||
//
|
||||
// This function intercepts abstract calls and replays them allowing
|
||||
// us to attach those functions after we are ready to do so
|
||||
//
|
||||
function add_abstract_method(self, name) {
|
||||
self._called_methods = self._called_methods || {}
|
||||
self.__defineGetter__(name, function() {
|
||||
return function() {
|
||||
self._called_methods[name] = true
|
||||
}
|
||||
})
|
||||
self.__defineSetter__(name, function(fn) {
|
||||
delete self[name]
|
||||
self[name] = fn
|
||||
if (self._called_methods && self._called_methods[name]) {
|
||||
delete self._called_methods[name]
|
||||
self[name]()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function __test() {
|
||||
var test = new ReadTarball()
|
||||
test.abort()
|
||||
setTimeout(function() {
|
||||
test.abort = function() {
|
||||
console.log('ok')
|
||||
}
|
||||
test.abort = function() {
|
||||
throw 'fail'
|
||||
}
|
||||
}, 100)
|
||||
}
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
var async = require('async')
|
||||
|
||||
//
|
||||
// Function performs a certain task on a multiple uplinks
|
||||
// and reverts changes if something fails
|
||||
//
|
||||
// uplinks - list of uplinks not counting local
|
||||
// localAction, localRollback - function(cb)
|
||||
// remoteAction, remoteRollback - function(uplink, cb)
|
||||
//
|
||||
module.exports = function(uplinks, localAction, localRollback, remoteAction, remoteRollback, callback) {
|
||||
var uplink_ids = uplinks.map(function(_, i) {
|
||||
return i
|
||||
})
|
||||
|
||||
localAction(function(err) {
|
||||
if (err) return callback(err)
|
||||
async.map(uplink_ids, function(i, cb) {
|
||||
remoteAction(uplinks[i], function(err) {
|
||||
cb(null, err)
|
||||
})
|
||||
}, function(err, res) {
|
||||
var return_err = err
|
||||
|
||||
// let err be first non-null element in the array
|
||||
for (var i=0; i<res.length; i++) {
|
||||
if (return_err) break
|
||||
return_err = res[i]
|
||||
}
|
||||
|
||||
if (!return_err) return callback()
|
||||
|
||||
async.map(uplink_ids, function(i, cb) {
|
||||
if (res[i]) return cb()
|
||||
remoteRollback(uplinks[i], function() {
|
||||
cb()
|
||||
})
|
||||
}, function(err) {
|
||||
localRollback(function() {
|
||||
callback(return_err)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,313 +0,0 @@
|
||||
var URL = require('url')
|
||||
, request = require('request')
|
||||
, UError = require('./error').UserError
|
||||
, mystreams = require('./streams')
|
||||
, Logger = require('./logger')
|
||||
, utils = require('./utils')
|
||||
|
||||
//
|
||||
// Implements Storage interface
|
||||
// (same for storage.js, local-storage.js, up-storage.js)
|
||||
//
|
||||
function Storage(config, mainconfig) {
|
||||
if (!(this instanceof Storage)) return new Storage(config)
|
||||
this.config = config
|
||||
this.is_alive = false
|
||||
this.userAgent = mainconfig.user_agent
|
||||
this.ca = config.ca
|
||||
this.logger = Logger.logger.child({sub: 'out'})
|
||||
|
||||
this.url = URL.parse(this.config.url)
|
||||
if (this.url.hostname === 'registry.npmjs.org') {
|
||||
this.ca = this.ca || require('./npmsslkeys')
|
||||
|
||||
// npm registry is too slow working with ssl :(
|
||||
/*if (this.config._autogenerated) {
|
||||
// encrypt all the things!
|
||||
this.url.protocol = 'https'
|
||||
this.config.url = URL.format(this.url)
|
||||
}*/
|
||||
}
|
||||
|
||||
_setupProxy.call(this, this.url.hostname, config, mainconfig, this.url.protocol === 'https:')
|
||||
|
||||
this.config.url = this.config.url.replace(/\/$/, '')
|
||||
if (isNaN(parseFloat(this.config.timeout)) || !isFinite(this.config.timeout)) {
|
||||
this.config.timeout = 30000
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
function _setupProxy(hostname, config, mainconfig, isHTTPS) {
|
||||
var no_proxy
|
||||
var proxy_key = isHTTPS ? 'https_proxy' : 'http_proxy'
|
||||
|
||||
// get http_proxy and no_proxy configs
|
||||
if (proxy_key in config) {
|
||||
this.proxy = config[proxy_key]
|
||||
} else if (proxy_key in mainconfig) {
|
||||
this.proxy = mainconfig[proxy_key]
|
||||
}
|
||||
if ('no_proxy' in config) {
|
||||
no_proxy = config.no_proxy
|
||||
} else if ('no_proxy' in mainconfig) {
|
||||
no_proxy = mainconfig.no_proxy
|
||||
}
|
||||
|
||||
// use wget-like algorithm to determine if proxy shouldn't be used
|
||||
if (hostname[0] !== '.') hostname = '.' + hostname
|
||||
if (typeof(no_proxy) === 'string' && no_proxy.length) {
|
||||
no_proxy = no_proxy.split(',')
|
||||
}
|
||||
if (Array.isArray(no_proxy)) {
|
||||
for (var i=0; i<no_proxy.length; i++) {
|
||||
var no_proxy_item = no_proxy[i]
|
||||
if (no_proxy_item[0] !== '.') no_proxy_item = '.' + no_proxy_item
|
||||
if (hostname.lastIndexOf(no_proxy_item) === hostname.length - no_proxy_item.length) {
|
||||
if (this.proxy) {
|
||||
this.logger.debug({url: this.url.href, rule: no_proxy_item}, 'not using proxy for @{url}, excluded by @{rule} rule')
|
||||
this.proxy = false
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// if it's non-string (i.e. "false"), don't use it
|
||||
if (typeof(this.proxy) !== 'string') {
|
||||
delete this.proxy
|
||||
} else {
|
||||
this.logger.debug({url: this.url.href, proxy: this.proxy}, 'using proxy @{proxy} for @{url}')
|
||||
}
|
||||
}
|
||||
|
||||
Storage.prototype.request = function(options, cb) {
|
||||
var self = this
|
||||
, headers = options.headers || {}
|
||||
headers.accept = headers.accept || 'application/json'
|
||||
headers['user-agent'] = headers['user-agent'] || this.userAgent
|
||||
|
||||
var method = options.method || 'GET'
|
||||
, uri = options.uri_full || (this.config.url + options.uri)
|
||||
self.logger.info({
|
||||
method: method,
|
||||
headers: headers,
|
||||
uri: uri,
|
||||
}, "making request: '@{method} @{uri}'")
|
||||
|
||||
if (utils.is_object(options.json)) {
|
||||
var json = JSON.stringify(options.json)
|
||||
headers['content-type'] = headers['content-type'] || 'application/json'
|
||||
}
|
||||
|
||||
var req = request({
|
||||
url: uri,
|
||||
method: method,
|
||||
headers: headers,
|
||||
body: json,
|
||||
ca: this.ca,
|
||||
proxy: this.proxy,
|
||||
timeout: this.config.timeout
|
||||
}, function(err, res, body) {
|
||||
var error
|
||||
if (!err) {
|
||||
var res_length = body.length
|
||||
|
||||
if (options.json && res.statusCode < 300) {
|
||||
try {
|
||||
body = JSON.parse(body)
|
||||
} catch(_err) {
|
||||
body = {}
|
||||
err = _err
|
||||
error = err.message
|
||||
}
|
||||
}
|
||||
|
||||
if (!err && utils.is_object(body)) {
|
||||
if (body.error) {
|
||||
error = body.error
|
||||
}
|
||||
}
|
||||
} else {
|
||||
error = err.message
|
||||
}
|
||||
|
||||
var msg = '@{!status}, req: \'@{request.method} @{request.url}\''
|
||||
if (error) {
|
||||
msg += ', error: @{!error}'
|
||||
} else {
|
||||
msg += ', bytes: @{bytes.in}/@{bytes.out}'
|
||||
}
|
||||
self.logger.warn({
|
||||
err: err,
|
||||
request: {method: method, url: uri},
|
||||
level: 35, // http
|
||||
status: res != null ? res.statusCode : 'ERR',
|
||||
error: error,
|
||||
bytes: {
|
||||
in: json ? json.length : 0,
|
||||
out: res_length || 0,
|
||||
}
|
||||
}, msg)
|
||||
if (cb) cb.apply(self, arguments)
|
||||
})
|
||||
req.on('response', function(res) {
|
||||
self.status_check(true)
|
||||
})
|
||||
req.on('error', function() {
|
||||
self.status_check(false)
|
||||
})
|
||||
return req
|
||||
}
|
||||
|
||||
Storage.prototype.status_check = function(alive) {
|
||||
if (arguments.length === 0) {
|
||||
if (!this.is_alive && Math.abs(Date.now() - this.is_alive_time()) > 60*1000) {
|
||||
return false
|
||||
} else {
|
||||
return true
|
||||
}
|
||||
} else {
|
||||
this.is_alive = alive
|
||||
this.is_alive_time = Date.now()
|
||||
}
|
||||
}
|
||||
|
||||
Storage.prototype.can_fetch_url = function(url) {
|
||||
url = URL.parse(url)
|
||||
|
||||
return url.protocol === this.url.protocol
|
||||
&& url.host === this.url.host
|
||||
&& url.path.indexOf(this.url.path) === 0
|
||||
}
|
||||
|
||||
Storage.prototype.add_package = function(name, metadata, callback) {
|
||||
this.request({
|
||||
uri: '/' + escape(name),
|
||||
method: 'PUT',
|
||||
json: metadata,
|
||||
}, function(err, res, body) {
|
||||
if (err) return callback(err)
|
||||
if (!(res.statusCode >= 200 && res.statusCode < 300)) {
|
||||
return callback(new Error('bad status code: ' + res.statusCode))
|
||||
}
|
||||
callback(null, body)
|
||||
})
|
||||
}
|
||||
|
||||
Storage.prototype.add_version = function(name, version, metadata, tag, callback) {
|
||||
this.request({
|
||||
uri: '/' + escape(name) + '/' + escape(version) + '/-tag/' + escape(tag),
|
||||
method: 'PUT',
|
||||
json: metadata,
|
||||
}, function(err, res, body) {
|
||||
if (err) return callback(err)
|
||||
if (!(res.statusCode >= 200 && res.statusCode < 300)) {
|
||||
return callback(new Error('bad status code: ' + res.statusCode))
|
||||
}
|
||||
callback(null, body)
|
||||
})
|
||||
}
|
||||
|
||||
Storage.prototype.add_tarball = function(name, filename) {
|
||||
var stream = new mystreams.UploadTarballStream()
|
||||
, self = this
|
||||
|
||||
var wstream = this.request({
|
||||
uri: '/' + escape(name) + '/-/' + escape(filename) + '/whatever',
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'content-type': 'application/octet-stream'
|
||||
},
|
||||
})
|
||||
|
||||
wstream.on('response', function(res) {
|
||||
if (!(res.statusCode >= 200 && res.statusCode < 300)) {
|
||||
return stream.emit('error', new UError({
|
||||
msg: 'bad uplink status code: ' + res.statusCode,
|
||||
status: 500,
|
||||
}))
|
||||
}
|
||||
stream.emit('success')
|
||||
})
|
||||
|
||||
wstream.on('error', function(err) {
|
||||
stream.emit('error', err)
|
||||
})
|
||||
|
||||
stream.abort = function() {
|
||||
process.nextTick(function() {
|
||||
if (wstream.req) {
|
||||
wstream.req.abort()
|
||||
}
|
||||
})
|
||||
}
|
||||
stream.done = function() {}
|
||||
stream.pipe(wstream)
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
Storage.prototype.get_package = function(name, etag, callback) {
|
||||
if (etag) {
|
||||
var headers = {
|
||||
'if-none-match': etag
|
||||
}
|
||||
}
|
||||
this.request({
|
||||
uri: '/' + escape(name),
|
||||
json: true,
|
||||
headers: headers,
|
||||
}, function(err, res, body) {
|
||||
if (err) return callback(err)
|
||||
if (res.statusCode === 404) {
|
||||
return callback(new UError({
|
||||
msg: 'package doesn\'t exist on uplink',
|
||||
status: 404,
|
||||
}))
|
||||
}
|
||||
if (!(res.statusCode >= 200 && res.statusCode < 300)) {
|
||||
return callback(new Error('bad status code: ' + res.statusCode))
|
||||
}
|
||||
callback(null, body, res.headers.etag)
|
||||
})
|
||||
}
|
||||
|
||||
Storage.prototype.get_tarball = function(name, filename) {
|
||||
return this.get_url(this.config.url + '/' + name + '/-/' + filename)
|
||||
}
|
||||
|
||||
Storage.prototype.get_url = function(url) {
|
||||
var stream = new mystreams.ReadTarballStream()
|
||||
stream.abort = function() {}
|
||||
|
||||
var rstream = this.request({
|
||||
uri_full: url,
|
||||
encoding: null,
|
||||
})
|
||||
|
||||
rstream.on('response', function(res) {
|
||||
if (res.statusCode === 404) {
|
||||
return stream.emit('error', new UError({
|
||||
msg: 'file doesn\'t exist on uplink',
|
||||
status: 404,
|
||||
}))
|
||||
}
|
||||
if (!(res.statusCode >= 200 && res.statusCode < 300)) {
|
||||
return stream.emit('error', new UError({
|
||||
msg: 'bad uplink status code: ' + res.statusCode,
|
||||
status: 500,
|
||||
}))
|
||||
}
|
||||
|
||||
rstream.pipe(stream)
|
||||
})
|
||||
|
||||
rstream.on('error', function(err) {
|
||||
stream.emit('error', err)
|
||||
})
|
||||
return stream
|
||||
}
|
||||
|
||||
module.exports = Storage
|
||||
|
||||
363
lib/utils.js
363
lib/utils.js
@@ -1,89 +1,328 @@
|
||||
var assert = require('assert')
|
||||
, URL = require('url')
|
||||
'use strict';
|
||||
|
||||
// from normalize-package-data/lib/fixer.js
|
||||
module.exports.validate_name = function(name) {
|
||||
name = name.toLowerCase()
|
||||
if (
|
||||
name.charAt(0) === "." || // ".bin", etc.
|
||||
name.match(/[\/@\s\+%:]/) ||
|
||||
name !== encodeURIComponent(name) ||
|
||||
name.toLowerCase() === "node_modules" ||
|
||||
name.toLowerCase() === "__proto__" ||
|
||||
name.toLowerCase() === "package.json" ||
|
||||
name.toLowerCase() === "favicon.ico"
|
||||
const assert = require('assert');
|
||||
const semver = require('semver');
|
||||
const URL = require('url');
|
||||
const _ = require('lodash');
|
||||
const Logger = require('./logger');
|
||||
|
||||
/**
|
||||
* Validate a package.
|
||||
* @param {*} name
|
||||
* @return {Boolean} whether the package is valid or not
|
||||
*/
|
||||
function validate_package(name) {
|
||||
name = name.split('/', 2);
|
||||
if (name.length === 1) {
|
||||
// normal package
|
||||
return module.exports.validate_name(name[0]);
|
||||
} else {
|
||||
// scoped package
|
||||
return name[0][0] === '@'
|
||||
&& module.exports.validate_name(name[0].slice(1))
|
||||
&& module.exports.validate_name(name[1]);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* From normalize-package-data/lib/fixer.js
|
||||
* @param {*} name the package name
|
||||
* @return {Boolean} whether is valid or not
|
||||
*/
|
||||
function validate_name(name) {
|
||||
if (_.isString(name) === false) {
|
||||
return false;
|
||||
}
|
||||
name = name.toLowerCase();
|
||||
|
||||
// all URL-safe characters and "@" for issue #75
|
||||
if (!name.match(/^[-a-zA-Z0-9_.!~*'()@]+$/)
|
||||
|| name.charAt(0) === '.' // ".bin", etc.
|
||||
|| name.charAt(0) === '-' // "-" is reserved by couchdb
|
||||
|| name === 'node_modules'
|
||||
|| name === '__proto__'
|
||||
|| name === 'package.json'
|
||||
|| name === 'favicon.ico'
|
||||
) {
|
||||
return false
|
||||
return false;
|
||||
} else {
|
||||
return true
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.is_object = function(obj) {
|
||||
return typeof(obj) === 'object' && obj !== null && !Array.isArray(obj)
|
||||
/**
|
||||
* Check whether an element is an Object
|
||||
* @param {*} obj the element
|
||||
* @return {Boolean}
|
||||
*/
|
||||
function isObject(obj) {
|
||||
return _.isObject(obj) && _.isNull(obj) === false && _.isArray(obj) === false;
|
||||
}
|
||||
|
||||
module.exports.validate_metadata = function(object, name) {
|
||||
assert(module.exports.is_object(object), 'not a json object')
|
||||
assert.equal(object.name, name)
|
||||
/**
|
||||
* Validate the package metadata, add additional properties whether are missing within
|
||||
* the metadata properties.
|
||||
* @param {*} object
|
||||
* @param {*} name
|
||||
* @return {Object} the object with additional properties as dist-tags ad versions
|
||||
*/
|
||||
function validate_metadata(object, name) {
|
||||
assert(isObject(object), 'not a json object');
|
||||
assert.equal(object.name, name);
|
||||
|
||||
if (!module.exports.is_object(object['dist-tags'])) {
|
||||
object['dist-tags'] = {}
|
||||
if (!isObject(object['dist-tags'])) {
|
||||
object['dist-tags'] = {};
|
||||
}
|
||||
|
||||
if (!module.exports.is_object(object['versions'])) {
|
||||
object['versions'] = {}
|
||||
if (!isObject(object['versions'])) {
|
||||
object['versions'] = {};
|
||||
}
|
||||
|
||||
return object
|
||||
return object;
|
||||
}
|
||||
|
||||
module.exports.parse_tarball_url = function(_url) {
|
||||
var url = URL.parse(_url)
|
||||
/**
|
||||
* Create base url for registry.
|
||||
* @param {String} protocol
|
||||
* @param {String} host
|
||||
* @param {String} prefix
|
||||
* @return {String} base registry url
|
||||
*/
|
||||
function combineBaseUrl(protocol, host, prefix) {
|
||||
let result = `${protocol}://${host}`;
|
||||
|
||||
var path = url.path.replace(/^\//, '').split('/')
|
||||
if (path.length >= 3 && path[path.length-2] === '-') {
|
||||
var filename = path.pop()
|
||||
, pkgpath = '/' + filename // tarball name
|
||||
pkgpath = '/' + path.pop() + pkgpath // "-"
|
||||
pkgpath = '/' + path.pop() + pkgpath // package.name
|
||||
} else {
|
||||
return null
|
||||
if (prefix) {
|
||||
prefix = prefix.replace(/\/$/, '');
|
||||
|
||||
result = (prefix.indexOf('/') === 0)
|
||||
? `${result}${prefix}`
|
||||
: prefix;
|
||||
}
|
||||
|
||||
return {
|
||||
protocol: url.protocol,
|
||||
host: url.host,
|
||||
prepath: '/' + path.join('/'),
|
||||
pkgpath: pkgpath,
|
||||
filename: filename,
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
module.exports.filter_tarball_urls = function(pkg, req, config) {
|
||||
function filter(_url) {
|
||||
if (!req.headers.host) return _url
|
||||
|
||||
var url = module.exports.parse_tarball_url(_url)
|
||||
// weird url, just return it
|
||||
if (url == null) return _url
|
||||
|
||||
if (config.url_prefix != null) {
|
||||
var result = config.url_prefix.replace(/\/$/, '')
|
||||
} else {
|
||||
var result = req.protocol + '://' + req.headers.host
|
||||
/**
|
||||
* Iterate a packages's versions and filter each original tarbal url.
|
||||
* @param {*} pkg
|
||||
* @param {*} req
|
||||
* @param {*} config
|
||||
* @return {String} a filtered package
|
||||
*/
|
||||
function filter_tarball_urls(pkg, req, config) {
|
||||
/**
|
||||
* Filter a tarball url.
|
||||
* @param {*} _url
|
||||
* @return {String} a parsed url
|
||||
*/
|
||||
const filter = function(_url) {
|
||||
if (!req.headers.host) {
|
||||
return _url;
|
||||
}
|
||||
const filename = URL.parse(_url).pathname.replace(/^.*\//, '');
|
||||
const base = combineBaseUrl(req.protocol, req.headers.host, config.url_prefix);
|
||||
|
||||
return result + url.pkgpath
|
||||
}
|
||||
return `${base}/${pkg.name.replace(/\//g, '%2f')}/-/${filename}`;
|
||||
};
|
||||
|
||||
for (var ver in pkg.versions) {
|
||||
if (pkg.versions[ver].dist != null
|
||||
&& pkg.versions[ver].dist.tarball != null) {
|
||||
pkg.versions[ver].dist.__sinopia_orig_tarball = pkg.versions[ver].dist.tarball
|
||||
pkg.versions[ver].dist.tarball = filter(pkg.versions[ver].dist.tarball)
|
||||
for (let ver in pkg.versions) {
|
||||
if (Object.prototype.hasOwnProperty.call(pkg.versions, ver)) {
|
||||
const dist = pkg.versions[ver].dist;
|
||||
if (_.isNull(dist) === false && _.isNull(dist.tarball) === false) {
|
||||
dist.tarball = filter(dist.tarball);
|
||||
}
|
||||
}
|
||||
}
|
||||
return pkg
|
||||
return pkg;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a tag for a package
|
||||
* @param {*} data
|
||||
* @param {*} version
|
||||
* @param {*} tag
|
||||
* @return {Boolean} whether a package has been tagged
|
||||
*/
|
||||
function tag_version(data, version, tag) {
|
||||
if (tag) {
|
||||
if (data['dist-tags'][tag] !== version) {
|
||||
if (semver.parse(version, true)) {
|
||||
// valid version - store
|
||||
data['dist-tags'][tag] = version;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
Logger.logger.warn({ver: version, tag: tag}, 'ignoring bad version @{ver} in @{tag}');
|
||||
if (tag && data['dist-tags'][tag]) {
|
||||
delete data['dist-tags'][tag];
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets version from a package object taking into account semver weirdness.
|
||||
* @param {*} object
|
||||
* @param {*} version
|
||||
* @return {String} return the semantic version of a package
|
||||
*/
|
||||
function get_version(object, version) {
|
||||
// this condition must allow cast
|
||||
if (object.versions[version] != null) {
|
||||
return object.versions[version];
|
||||
}
|
||||
try {
|
||||
version = semver.parse(version, true);
|
||||
for (let k in object.versions) {
|
||||
if (version.compare(semver.parse(k, true)) === 0) {
|
||||
return object.versions[k];
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse an internet address
|
||||
* Allow:
|
||||
- https:localhost:1234 - protocol + host + port
|
||||
- localhost:1234 - host + port
|
||||
- 1234 - port
|
||||
- http::1234 - protocol + port
|
||||
- https://localhost:443/ - full url + https
|
||||
- http://[::1]:443/ - ipv6
|
||||
- unix:/tmp/http.sock - unix sockets
|
||||
- https://unix:/tmp/http.sock - unix sockets (https)
|
||||
* @param {*} urlAddress the internet address definition
|
||||
* @return {Object|Null} literal object that represent the address parsed
|
||||
*/
|
||||
function parse_address(urlAddress) {
|
||||
//
|
||||
// TODO: refactor it to something more reasonable?
|
||||
//
|
||||
// protocol : // ( host )|( ipv6 ): port /
|
||||
let urlPattern = /^((https?):(\/\/)?)?((([^\/:]*)|\[([^\[\]]+)\]):)?(\d+)\/?$/.exec(urlAddress);
|
||||
|
||||
if (urlPattern) {
|
||||
return {
|
||||
proto: urlPattern[2] || 'http',
|
||||
host: urlPattern[6] || urlPattern[7] || 'localhost',
|
||||
port: urlPattern[8] || '4873',
|
||||
};
|
||||
}
|
||||
|
||||
urlPattern = /^((https?):(\/\/)?)?unix:(.*)$/.exec(urlAddress);
|
||||
|
||||
if (urlPattern) {
|
||||
return {
|
||||
proto: urlPattern[2] || 'http',
|
||||
path: urlPattern[4],
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Function filters out bad semver versions and sorts the array.
|
||||
* @param {*} array
|
||||
* @return {Array} sorted Array
|
||||
*/
|
||||
function semverSort(array) {
|
||||
return array
|
||||
.filter(function(x) {
|
||||
if (!semver.parse(x, true)) {
|
||||
Logger.logger.warn( {ver: x}, 'ignoring bad version @{ver}' );
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
})
|
||||
.sort(semver.compareLoose)
|
||||
.map(String);
|
||||
}
|
||||
|
||||
/**
|
||||
* Flatten arrays of tags.
|
||||
* @param {*} data
|
||||
*/
|
||||
function normalize_dist_tags(data) {
|
||||
let sorted;
|
||||
if (!data['dist-tags'].latest) {
|
||||
// overwrite latest with highest known version based on semver sort
|
||||
sorted = semverSort(Object.keys(data.versions));
|
||||
if (sorted && sorted.length) {
|
||||
data['dist-tags'].latest = sorted.pop();
|
||||
}
|
||||
}
|
||||
|
||||
for (let tag in data['dist-tags']) {
|
||||
if (_.isArray(data['dist-tags'][tag])) {
|
||||
if (data['dist-tags'][tag].length) {
|
||||
// sort array
|
||||
sorted = semverSort(data['dist-tags'][tag]);
|
||||
if (sorted.length) {
|
||||
// use highest version based on semver sort
|
||||
data['dist-tags'][tag] = sorted.pop();
|
||||
}
|
||||
} else {
|
||||
delete data['dist-tags'][tag];
|
||||
}
|
||||
} else if (_.isString(data['dist-tags'][tag] )) {
|
||||
if (!semver.parse(data['dist-tags'][tag], true)) {
|
||||
// if the version is invalid, delete the dist-tag entry
|
||||
delete data['dist-tags'][tag];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const parseIntervalTable = {
|
||||
'': 1000,
|
||||
'ms': 1,
|
||||
's': 1000,
|
||||
'm': 60*1000,
|
||||
'h': 60*60*1000,
|
||||
'd': 86400000,
|
||||
'w': 7*86400000,
|
||||
'M': 30*86400000,
|
||||
'y': 365*86400000,
|
||||
};
|
||||
|
||||
/**
|
||||
* Parse an internal string to number
|
||||
* @param {*} interval
|
||||
* @return {Number}
|
||||
*/
|
||||
function parseInterval(interval) {
|
||||
if (typeof(interval) === 'number') {
|
||||
return interval * 1000;
|
||||
}
|
||||
let result = 0;
|
||||
let last_suffix = Infinity;
|
||||
interval.split(/\s+/).forEach(function(x) {
|
||||
if (!x) return;
|
||||
let m = x.match(/^((0|[1-9][0-9]*)(\.[0-9]+)?)(ms|s|m|h|d|w|M|y|)$/);
|
||||
if (!m
|
||||
|| parseIntervalTable[m[4]] >= last_suffix
|
||||
|| (m[4] === '' && last_suffix !== Infinity)) {
|
||||
throw Error('invalid interval: ' + interval);
|
||||
}
|
||||
last_suffix = parseIntervalTable[m[4]];
|
||||
result += Number(m[1]) * parseIntervalTable[m[4]];
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
module.exports.parseInterval = parseInterval;
|
||||
module.exports.semver_sort = semverSort;
|
||||
module.exports.parse_address = parse_address;
|
||||
module.exports.get_version = get_version;
|
||||
module.exports.normalize_dist_tags = normalize_dist_tags;
|
||||
module.exports.tag_version = tag_version;
|
||||
module.exports.combineBaseUrl = combineBaseUrl;
|
||||
module.exports.filter_tarball_urls = filter_tarball_urls;
|
||||
module.exports.validate_metadata = validate_metadata;
|
||||
module.exports.is_object = isObject;
|
||||
module.exports.validate_name = validate_name;
|
||||
module.exports.validate_package = validate_package;
|
||||
|
||||
5154
package-lock.json
generated
Normal file
5154
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
92
package.json
Normal file
92
package.json
Normal file
@@ -0,0 +1,92 @@
|
||||
{
|
||||
"name": "verdaccio",
|
||||
"version": "2.2.5",
|
||||
"description": "Private npm repository server",
|
||||
"author": {
|
||||
"name": "Alex Kocharin",
|
||||
"email": "alex@kocharin.ru"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/verdaccio/verdaccio"
|
||||
},
|
||||
"main": "index.js",
|
||||
"bin": {
|
||||
"verdaccio": "./bin/verdaccio"
|
||||
},
|
||||
"dependencies": {
|
||||
"JSONStream": "^1.1.1",
|
||||
"apache-md5": "^1.1.2",
|
||||
"async": "^2.0.1",
|
||||
"body-parser": "^1.15.0",
|
||||
"bunyan": "^1.8.0",
|
||||
"chalk": "^1.1.3",
|
||||
"commander": "^2.9.0",
|
||||
"compression": "^1.6.1",
|
||||
"cookies": "^0.6.1",
|
||||
"express": "^4.13.4",
|
||||
"handlebars": "^4.0.5",
|
||||
"highlight.js": "^9.3.0",
|
||||
"http-errors": "^1.4.0",
|
||||
"jju": "^1.3.0",
|
||||
"js-yaml": "^3.6.0",
|
||||
"lockfile": "^1.0.1",
|
||||
"lodash": "^4.17.4",
|
||||
"lunr": "^0.7.0",
|
||||
"minimatch": "^3.0.2",
|
||||
"mkdirp": "^0.5.1",
|
||||
"pkginfo": "^0.4.0",
|
||||
"render-readme": "^1.3.1",
|
||||
"request": "^2.72.0",
|
||||
"semver": "^5.1.0",
|
||||
"unix-crypt-td-js": "^1.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"browserify": "^13.0.0",
|
||||
"browserify-handlebars": "^1.0.0",
|
||||
"codecov": "^2.2.0",
|
||||
"eslint": "^3.19.0",
|
||||
"eslint-config-google": "^0.7.1",
|
||||
"grunt": "^1.0.1",
|
||||
"grunt-browserify": "^5.0.0",
|
||||
"grunt-cli": "^1.2.0",
|
||||
"grunt-contrib-less": "^1.3.0",
|
||||
"grunt-contrib-watch": "^1.0.0",
|
||||
"mocha": "^3.2.0",
|
||||
"mocha-lcov-reporter": "^1.3.0",
|
||||
"nyc": "^10.1.2",
|
||||
"onclick": "^0.1.0",
|
||||
"rimraf": "^2.5.2",
|
||||
"transition-complete": "^0.0.2",
|
||||
"unopinionate": "^0.0.4"
|
||||
},
|
||||
"keywords": [
|
||||
"private",
|
||||
"package",
|
||||
"repository",
|
||||
"registry",
|
||||
"modules",
|
||||
"proxy",
|
||||
"server"
|
||||
],
|
||||
"scripts": {
|
||||
"test": "npm run lint && mocha ./test/functional ./test/unit",
|
||||
"test:coverage": "nyc mocha -R spec ./test/functional ./test/unit",
|
||||
"coverage:html": "nyc report --reporter=html",
|
||||
"coverage:codecov": "nyc report --reporter=lcov | codecov",
|
||||
"test-travis": "npm run lint && npm run test:coverage",
|
||||
"test-only": "mocha ./test/functional ./test/unit",
|
||||
"lint": "eslint .",
|
||||
"build-docker": "docker build -t verdaccio .",
|
||||
"build-docker:rpi": "docker build -f Dockerfile.rpi -t verdaccio:rpi ."
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4.6.1",
|
||||
"npm": ">=2.15.9"
|
||||
},
|
||||
"preferGlobal": true,
|
||||
"publishConfig": {
|
||||
"registry": "https://registry.npmjs.org/"
|
||||
},
|
||||
"license": "WTFPL"
|
||||
}
|
||||
55
package.yaml
55
package.yaml
@@ -1,55 +0,0 @@
|
||||
# use "yapm install ." if you're installing this from git repository
|
||||
|
||||
name: sinopia
|
||||
version: 0.5.5
|
||||
description: Private npm repository server
|
||||
|
||||
author:
|
||||
name: Alex Kocharin
|
||||
email: alex@kocharin.ru
|
||||
|
||||
repository:
|
||||
type: git
|
||||
url: git://github.com/rlidwka/sinopia
|
||||
|
||||
main: index.js
|
||||
|
||||
bin:
|
||||
sinopia: ./bin/sinopia
|
||||
|
||||
dependencies:
|
||||
express: '>= 3.2.5'
|
||||
commander: '>= 1.1.1'
|
||||
js-yaml: '>= 2.0.5'
|
||||
cookies: '>= 0.3.6'
|
||||
request: '*'
|
||||
async: '*'
|
||||
semver: '*'
|
||||
minimatch: '*'
|
||||
bunyan: '>= 0.16.4'
|
||||
fs-ext: '*'
|
||||
|
||||
devDependencies:
|
||||
rimraf: '*'
|
||||
mocha: '*'
|
||||
|
||||
keywords:
|
||||
- private
|
||||
- package
|
||||
- repository
|
||||
- registry
|
||||
- modules
|
||||
- proxy
|
||||
- server
|
||||
|
||||
scripts:
|
||||
test: ./test/start.sh
|
||||
|
||||
# we depend on streams2 stuff
|
||||
# it can be replaced with isaacs/readable-stream, ask if you need to use 0.8
|
||||
engines:
|
||||
node: '>=0.10'
|
||||
|
||||
preferGlobal: true
|
||||
license: WTFPL
|
||||
|
||||
6
scripts/generate_authors.sh
Executable file
6
scripts/generate_authors.sh
Executable file
@@ -0,0 +1,6 @@
|
||||
#!/bin/sh
|
||||
|
||||
git shortlog -se \
|
||||
| perl -spe 's/^\s+\d+\s+//' \
|
||||
| sed -e '/^CommitSyncScript.*$/d' \
|
||||
> AUTHORS
|
||||
12
systemd/verdaccio.service
Normal file
12
systemd/verdaccio.service
Normal file
@@ -0,0 +1,12 @@
|
||||
[Unit]
|
||||
Description=verdaccio Service
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=verdaccio
|
||||
WorkingDirectory=/home/verdaccio
|
||||
ExecStart=/usr/local/lib/npm/bin/verdaccio
|
||||
ExecStop=/usr/bin/bash -c "kill $(ps -ef | grep ^verdacc+ | awk {'print $2'})"
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
12
test/.eslintrc
Normal file
12
test/.eslintrc
Normal file
@@ -0,0 +1,12 @@
|
||||
# vim: syntax=yaml
|
||||
|
||||
extends: ["eslint:recommended"]
|
||||
|
||||
env:
|
||||
node: true
|
||||
mocha: true
|
||||
es6: true
|
||||
|
||||
valid-jsdoc: 0
|
||||
no-redeclare: 1
|
||||
no-console: 1
|
||||
7
test/README.md
Normal file
7
test/README.md
Normal file
@@ -0,0 +1,7 @@
|
||||
All tests are split in three folders:
|
||||
|
||||
- `unit` - Tests that cover functions that transform data in an non-trivial way. These tests simply `require()` a few files and run code in there, so they are very fast.
|
||||
- `functional` - Tests that launch a verdaccio instance and perform a series of requests to it over http. They are slower than unit tests.
|
||||
- `integration` - Tests that launch a verdaccio instance and do requests to it using npm. They are really slow and can hit a real npm registry.
|
||||
|
||||
Unit and functional tests are executed automatically by running `npm test` from the project's root directory. Integration tests are supposed to be executed manually from time to time.
|
||||
111
test/basic.js
111
test/basic.js
@@ -1,111 +0,0 @@
|
||||
var assert = require('assert')
|
||||
, readfile = require('fs').readFileSync
|
||||
, crypto = require('crypto')
|
||||
, ex = module.exports
|
||||
, server = process.server
|
||||
, server2 = process.server2
|
||||
|
||||
ex['trying to fetch non-existent package'] = function(cb) {
|
||||
server.get_package('testpkg', function(res, body) {
|
||||
// shouldn't exist yet
|
||||
assert.equal(res.statusCode, 404);
|
||||
assert(~body.error.indexOf('no such package'));
|
||||
cb();
|
||||
});
|
||||
};
|
||||
|
||||
ex['creating new package'] = function(cb) {
|
||||
server.put_package('testpkg', require('./lib/package')('testpkg'), function(res, body) {
|
||||
assert.equal(res.statusCode, 201);
|
||||
assert(~body.ok.indexOf('created new package'));
|
||||
cb();
|
||||
});
|
||||
};
|
||||
|
||||
ex['downloading non-existent tarball'] = function(cb) {
|
||||
server.get_tarball('testpkg', 'blahblah', function(res, body) {
|
||||
assert.equal(res.statusCode, 404);
|
||||
assert(~body.error.indexOf('no such file'));
|
||||
cb();
|
||||
});
|
||||
};
|
||||
|
||||
ex['uploading incomplete tarball'] = function(cb) {
|
||||
server.put_tarball_incomplete('testpkg', 'blahblah1', readfile('fixtures/binary'), 3000, function(res, body) {
|
||||
cb();
|
||||
});
|
||||
};
|
||||
|
||||
ex['uploading new tarball'] = function(cb) {
|
||||
server.put_tarball('testpkg', 'blahblah', readfile('fixtures/binary'), function(res, body) {
|
||||
assert.equal(res.statusCode, 201);
|
||||
assert(body.ok);
|
||||
cb();
|
||||
});
|
||||
};
|
||||
|
||||
ex['doubleerr test'] = function(cb) {
|
||||
server.put_tarball('testfwd2', 'blahblah', readfile('fixtures/binary'), function(res, body) {
|
||||
assert.equal(res.statusCode, 404);
|
||||
assert(body.error);
|
||||
cb();
|
||||
});
|
||||
};
|
||||
|
||||
ex['downloading newly created tarball'] = function(cb) {
|
||||
server.get_tarball('testpkg', 'blahblah', function(res, body) {
|
||||
assert.equal(res.statusCode, 200);
|
||||
assert.deepEqual(body, readfile('fixtures/binary').toString('utf8'));
|
||||
cb();
|
||||
});
|
||||
};
|
||||
|
||||
ex['uploading new package version for bad pkg'] = function(cb) {
|
||||
server.put_version('testpxg', '0.0.1', require('./lib/package')('testpxg'), function(res, body) {
|
||||
assert.equal(res.statusCode, 404);
|
||||
assert(~body.error.indexOf('no such package'));
|
||||
cb();
|
||||
});
|
||||
};
|
||||
|
||||
ex['uploading new package version (bad sha)'] = function(cb) {
|
||||
var pkg = require('./lib/package')('testpkg')
|
||||
pkg.dist.shasum = crypto.createHash('sha1').update('fake').digest('hex')
|
||||
server.put_version('testpkg', '0.0.1', pkg, function(res, body) {
|
||||
assert.equal(res.statusCode, 400);
|
||||
assert(~body.error.indexOf('shasum error'));
|
||||
cb();
|
||||
});
|
||||
};
|
||||
|
||||
ex['uploading new package version'] = function(cb) {
|
||||
var pkg = require('./lib/package')('testpkg')
|
||||
pkg.dist.shasum = crypto.createHash('sha1').update(readfile('fixtures/binary')).digest('hex')
|
||||
server.put_version('testpkg', '0.0.1', pkg, function(res, body) {
|
||||
assert.equal(res.statusCode, 201);
|
||||
assert(~body.ok.indexOf('published'));
|
||||
cb();
|
||||
});
|
||||
};
|
||||
|
||||
ex['downloading newly created package'] = function(cb) {
|
||||
server.get_package('testpkg', function(res, body) {
|
||||
assert.equal(res.statusCode, 200);
|
||||
assert.equal(body.name, 'testpkg');
|
||||
assert.equal(body.versions['0.0.1'].name, 'testpkg');
|
||||
assert.equal(body.versions['0.0.1'].dist.tarball, 'http://localhost:55551/testpkg/-/blahblah');
|
||||
assert.deepEqual(body['dist-tags'], {latest: '0.0.1'});
|
||||
cb();
|
||||
});
|
||||
};
|
||||
|
||||
ex['downloading package via server2'] = function(cb) {
|
||||
server2.get_package('testpkg', function(res, body) {
|
||||
assert.equal(res.statusCode, 200);
|
||||
assert.equal(body.name, 'testpkg');
|
||||
assert.equal(body.versions['0.0.1'].name, 'testpkg');
|
||||
assert.equal(body.versions['0.0.1'].dist.tarball, 'http://localhost:55552/testpkg/-/blahblah');
|
||||
assert.deepEqual(body['dist-tags'], {latest: '0.0.1'});
|
||||
cb();
|
||||
});
|
||||
};
|
||||
@@ -1,35 +0,0 @@
|
||||
storage: ./test-storage
|
||||
|
||||
users:
|
||||
test:
|
||||
password: a94a8fe5ccb19ba61c4c0873d391e987982fbbd3
|
||||
|
||||
uplinks:
|
||||
server2:
|
||||
url: http://localhost:55552/
|
||||
|
||||
logs:
|
||||
- {type: stdout, format: pretty, level: trace}
|
||||
|
||||
packages:
|
||||
'testfwd*':
|
||||
allow_access: all
|
||||
allow_publish: all
|
||||
proxy_access: server2
|
||||
proxy_publish: server2
|
||||
|
||||
'testloop':
|
||||
allow_access: all
|
||||
allow_publish: all
|
||||
proxy_access: server2
|
||||
proxy_publish: server2
|
||||
|
||||
'*':
|
||||
allow_access: test undefined
|
||||
allow_publish: test undefined
|
||||
|
||||
# this should not matter
|
||||
testpkg:
|
||||
allow_access: none
|
||||
|
||||
listen: 55551
|
||||
@@ -1,31 +0,0 @@
|
||||
storage: ./test-storage2
|
||||
|
||||
users:
|
||||
test:
|
||||
password: a94a8fe5ccb19ba61c4c0873d391e987982fbbd3
|
||||
|
||||
uplinks:
|
||||
server1:
|
||||
url: http://localhost:55551/
|
||||
|
||||
packages:
|
||||
'testfwd':
|
||||
allow_access: all
|
||||
allow_publish: all
|
||||
|
||||
'testloop':
|
||||
allow_access: all
|
||||
allow_publish: all
|
||||
proxy_access: server1
|
||||
proxy_publish: server1
|
||||
|
||||
testpkg:
|
||||
allow_access: test anonymous
|
||||
allow_publish: test anonymous
|
||||
proxy_access: server1
|
||||
|
||||
'*':
|
||||
allow_access: test anonymous
|
||||
allow_publish: test anonymous
|
||||
|
||||
listen: 55552
|
||||
@@ -1,13 +0,0 @@
|
||||
var assert = require('assert');
|
||||
var ex = module.exports;
|
||||
|
||||
ex['trying to fetch non-existent package'] = function(cb) {
|
||||
var f = fork('../bin/sinopia', ['-c', './config/log-1.yaml'], {silent: true});
|
||||
f.on('message', function(msg) {
|
||||
if ('sinopia_started' in msg) {
|
||||
f.kill();
|
||||
cb();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
stuff used for stress-testing, using against real servers is unadvisable
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user