Compare commits

...

214 Commits

Author SHA1 Message Date
Alex Kocharin
d0c2ac0f9d 0.8.0 2014-04-01 00:17:10 +00:00
Alex Kocharin
d820a030a1 0.8.0 2014-04-01 00:17:03 +00:00
Alex Kocharin
b4c0b89365 set ignore_latest_tag to false by default 2014-04-01 00:13:59 +00:00
Alex Kocharin
752d0f62f0 make fs-ext optional 2014-03-31 04:32:11 +00:00
Alex Kocharin
9404e811a5 accept gzip from uplinks, fix #54 2014-03-30 21:05:42 +00:00
Alex Kocharin
2102e71c10 removing outdated todo 2014-03-30 19:43:03 +00:00
Alex Kocharin
f023826a59 0.7.1 2014-03-29 06:16:07 +00:00
Alex Kocharin
f03512d32b 0.7.1 2014-03-29 06:15:59 +00:00
Alex Kocharin
7f56593ee3 document new config option 2014-03-29 06:15:43 +00:00
Alex Kocharin
2995858bb0 updating eslint config 2014-03-29 05:38:21 +00:00
Alex Kocharin
a8cdfcd7cd add err.status instead of checking for exact error message
it's done for consistency reasons
2014-03-29 04:32:05 +00:00
Alex Kocharin
4470cb7d55 making latest tag behaviour configurable 2014-03-29 02:31:34 +00:00
Alex Kocharin
4b06026d2e Revert "Make 404 responses compatible with CouchDB API"
This reverts commit dabf5e1c9a.

See discussion in #57
2014-03-29 01:08:28 +00:00
Alex Kocharin
7967d5857d Revert "fix logs and tests for #56"
This reverts commit df49fb84c1.
2014-03-29 01:08:00 +00:00
Alex Kocharin
4ea84e8b74 0.7.0 2014-03-13 19:48:42 +00:00
Alex Kocharin
bf63b9f738 0.7.0 2014-03-13 19:47:49 +00:00
Alex Kocharin
8af77df0e0 fix zero timeouts in config 2014-03-13 19:45:47 +00:00
Alex Kocharin
df49fb84c1 fix logs and tests for #56 2014-03-13 19:23:21 +00:00
Alex Kocharin
d657e180d5 fix tests 2014-03-13 19:19:02 +00:00
Alex Kocharin
0afd9d1bb3 Merge pull request #56 from strongloop/fix-unknown-package-name-response
Make 404 responses compatible with CouchDB API
2014-03-13 19:15:04 +00:00
Alex Kocharin
f839837f34 document new config options 2014-03-13 18:56:31 +00:00
Alex Kocharin
a030e1110d release 0.7.0 2014-03-13 18:56:14 +00:00
Miroslav Bajtoš
dabf5e1c9a Make 404 responses compatible with CouchDB API
The CouchDB REST API returns always `"error": "not_found"` in the body
of a 404 response:
  http://couchdb-13.readthedocs.org/en/latest/api-basics/#http-status-codes

The npm client depends on the magic string 'not_found' as can be seen
in requestDone() in npm-registry-client/lib/request.js.

Before this change, npm install of an unknown package was reporting
the Sinopia error string and a stack trace of npm.

After this change, npm install of an unknown package returns a nice
error saying "the package is not in the npm registry, bug the author"
2014-03-13 19:47:44 +01:00
Alex Kocharin
bb9612441c trim intervals 2014-03-08 04:38:37 +00:00
Alex Kocharin
48825a2e46 change intervals formatting to match nginx
see http://wiki.nginx.org/ConfigNotation
2014-03-08 04:37:16 +00:00
Alex Kocharin
6a2a463b76 count each failed request only once 2014-03-08 04:00:07 +00:00
Alex Kocharin
f1ec18dc4b implement nginx-like logic to avoid hitting upstream when it's down 2014-03-08 03:54:28 +00:00
Alex Kocharin
6b9001ef6c change interval formatting in config
All intervals are now in milliseconds. But you can add
multiples ("ms", "s", "m", "h", "d", "M", "y") to set
value using different units.

For example, value "1.5h" would mean 1.5 hours.
2014-03-08 03:49:59 +00:00
Alex Kocharin
9ff1203688 throw away incomplete downloads 2014-03-07 19:48:24 +00:00
Alex Kocharin
a891354a32 always return content-length for tarballs 2014-03-07 19:39:20 +00:00
Alex Kocharin
9c4c93695b fix crash in #52 2014-03-07 18:20:41 +00:00
Alex Kocharin
3b510437a8 Merge pull request #47 from samcday/maxage-support
Support maxage for uplinks
2014-03-06 07:11:07 +00:00
Alex Kocharin
ed1ec0c27a eslint version bump 2014-03-05 20:07:38 +00:00
Alex Kocharin
526f61b136 0.6.7 2014-03-05 19:16:47 +00:00
Alex Kocharin
568058d7d9 pin down express@3 with backward compat semver range
closes #49
2014-03-05 19:16:19 +00:00
Alex Kocharin
b77da91094 Merge branch 'master' of github.com:rlidwka/sinopia 2014-03-05 15:18:39 +00:00
Alex Kocharin
e2457e9e5c 0.6.6 2014-03-05 14:20:20 +00:00
Alex Kocharin
c6714cd591 pin express@3 dependency 2014-03-05 14:19:41 +00:00
Sam Day
58e98c7263 Support maxage for uplinks 2014-03-02 20:47:09 +11:00
Alex Kocharin
234deb4e7e Merge pull request #46 from BarthV/patch-1
adding Chef Cookbook to README
2014-03-01 08:48:06 +00:00
Barthélemy Vessemont
4466cf6be1 Chef Cookbook is out !
Hi !

http://community.opscode.com/cookbooks/sinopia - https://github.com/BarthV/sinopia-cookbook

I made a Chef cookbook, it provides a Sinopia server for ubuntu 12.04 (for the moment) and covers most of the config.yaml options. You can easily test this cookbook with Vbox & Vagrant.

I would be happy to help Sinopia, and reply to any questions/issues about my work.

Feel free to provides PR on sinopia-cookbook !

Thanks.
2014-02-28 22:49:08 +01:00
Alex Kocharin
cf5dddb174 republishing as 0.6.5 2014-02-28 11:30:08 +00:00
Alex Kocharin
70506d54ee 0.6.5 2014-02-28 11:29:08 +00:00
Alex Kocharin
09781df60a 0.6.4 2014-02-28 11:26:02 +00:00
Alex Kocharin
c730670711 version bump 2014-02-28 11:25:51 +00:00
Alex Kocharin
691d62f403 eslint update + style fixes 2014-02-23 21:20:50 +04:00
Alex Kocharin
a5d0094669 remove npmsslkeys
See commit in npmconf:
e0b4a4e5cc
2014-02-23 20:55:41 +04:00
Alex Kocharin
b65cf7afe3 move eslint config to yaml 2014-02-07 00:56:46 +04:00
Alex Kocharin
e2e2c39148 0.6.3 2014-02-03 00:54:22 +04:00
Alex Kocharin
de4dbd234a 0.6.3 2014-02-03 00:51:59 +04:00
Alex Kocharin
440f42415c adding test for invalid tags, fixes #40 2014-02-03 00:50:51 +04:00
Alex Kocharin
8840ac4c1f removing bad statement 2014-02-03 00:50:38 +04:00
Alex Kocharin
9f662a69e1 validate all url parameters better 2014-02-01 12:08:48 +04:00
Alex Kocharin
c098eb4661 Merge pull request #43 from saheba/link_to_puppet_module
added link to published puppet-sinopia module
2014-01-24 13:20:05 -08:00
saheba
2c28f3549e added link to published puppet-sinopia module
automates sinopia installations on unix machines with puppet.
2014-01-24 13:56:19 +01:00
Alex Kocharin
616ec7626a process.getuid doesn't always exist (fixes #41) 2014-01-24 06:36:03 +03:00
Alex Kocharin
9a915511b3 0.6.2 2014-01-18 23:07:50 +04:00
Alex Kocharin
0d37933259 version bump 2014-01-18 23:07:29 +04:00
Alex Kocharin
2add883370 adding config param to specify upload limits, fixes #39 2014-01-18 23:04:12 +04:00
Alex Kocharin
e522347667 making loose semver versions work, ref #38 2014-01-18 22:57:44 +04:00
Alex Kocharin
8987ee0b2a Merge branch 'master' of github.com:rlidwka/sinopia 2014-01-13 23:46:47 +04:00
Alex Kocharin
ceb9b5be1d 0.6.1 2014-01-13 23:41:23 +04:00
Alex Kocharin
6c258758c8 support setting different storage paths for different packages
fixes #35
2014-01-13 22:56:36 +04:00
Alex Kocharin
dadbf3a18b rename ChangeLog to History 2014-01-13 22:02:33 +04:00
Alex Kocharin
c18ac8015a update dependencies 2014-01-13 22:02:08 +04:00
Alex Kocharin
68d3cc7295 fs interface refactoring 2014-01-13 20:48:51 +04:00
Alex Kocharin
8ebe73d1f1 Merge pull request #34 from keyvanfatehi/patch-1
Add docker-sinopia link to README
2014-01-06 22:46:10 -08:00
Keyvan Fatehi
75ea41a55b Add docker-sinopia link to README 2014-01-06 19:09:33 -08:00
Alex Kocharin
d519e8e763 0.6.0 2013-12-30 12:32:20 +04:00
Alex Kocharin
bb6b31e7c0 v0.6.0 2013-12-30 12:29:54 +04:00
Alex Kocharin
5d19b66290 adding integration tests 2013-12-30 12:25:26 +04:00
Alex Kocharin
5422de642e tag support, closes #8 2013-12-29 10:41:31 +04:00
Alex Kocharin
02f8143097 tests for tags support 2013-12-29 10:40:47 +04:00
Alex Kocharin
3c7ae2f37d a few bugfixes 2013-12-29 04:58:48 +04:00
Alex Kocharin
6075034521 proxy_access -> proxy (since we're removing proxy_publish) 2013-12-29 04:54:46 +04:00
Alex Kocharin
d1a1a8f4cd bugfix 2013-12-29 04:54:39 +04:00
Alex Kocharin
54535893ab adding support for new npm behaviour, closes #31 2013-12-29 04:53:31 +04:00
Alex Kocharin
8c05cfe6a2 require heapdump if present on the system 2013-12-29 04:52:23 +04:00
Alex Kocharin
6c838c7947 remove all replication-like functionality
apparently it was a bad idea, it's simpler to just run a single
sinopia instance as a master

TODO: write some help in readme about it
2013-12-27 17:23:14 +04:00
Alex Kocharin
f3f4fdc4ac tests 2013-12-27 17:06:57 +04:00
Alex Kocharin
985d705ad2 working on tags / incomplete 2013-12-27 17:06:30 +04:00
Alex Kocharin
6234f8cba9 Merge branch 'master' of github.com:rlidwka/sinopia 2013-12-27 17:05:05 +04:00
Alex Kocharin
c0e34890e4 req.user -> req.remoteUser 2013-12-27 15:29:23 +04:00
Alex Kocharin
b42687d230 Merge tag 'v0.5.9' of github.com:rlidwka/sinopia
0.5.9
2013-12-26 23:46:08 +04:00
Alex Kocharin
4ce0142651 0.5.9 2013-12-26 23:25:27 +04:00
Alex Kocharin
b44255de3c set right Accept header for tarballs, closes #32 2013-12-26 23:25:19 +04:00
Alex Kocharin
b0fa7ee2d1 using eslint to check the code 2013-12-23 04:14:57 +04:00
Alex Kocharin
1c17291654 0.5.8 2013-12-21 16:31:25 +04:00
Alex Kocharin
3b2bd161b7 logging bad versions 2013-12-21 16:09:29 +04:00
Alex Kocharin
d29e22ed4b version bump 2013-12-21 13:20:25 +04:00
Alex Kocharin
1a0f577689 adding nice screenshot 2013-12-21 12:06:38 +03:00
Alex Kocharin
a3a59aa8f3 reorganize tests, and add new ones 2013-12-19 19:11:54 +04:00
Alex Kocharin
3a31064ded deal with js-yaml warning 2013-12-19 18:50:31 +04:00
Alex Kocharin
151136381e style fix 2013-12-19 07:18:45 +04:00
Alex Kocharin
503b60a45b catch bind errors 2013-12-19 07:17:52 +04:00
Alex Kocharin
225c2bb65f added a warning to not run it under root 2013-12-17 20:25:17 +04:00
Alex Kocharin
8ea985ff6b don't color multiline strings in logs output
stack traces should be easily recognizable
2013-12-16 03:07:19 +04:00
Alex Kocharin
0cec69ee1d Merge branch 'master' of github.com:rlidwka/sinopia 2013-12-16 00:58:09 +04:00
Alex Kocharin
d5d2a8bdcc moving unit tests to separate folder 2013-12-16 00:54:50 +04:00
Alex Kocharin
693aa576b4 safeguard against bad tarball names 2013-12-16 00:54:29 +04:00
Alex Kocharin
3abce5e2b6 better error message when publishing with bad auth header 2013-12-16 00:38:16 +04:00
Alex Kocharin
5c6d6301ef 0.5.7 2013-12-15 04:06:19 +04:00
Alex Kocharin
f43ec71ccb 0.5.7 2013-12-15 04:05:58 +04:00
Alex Kocharin
ef61515f28 try to fetch package from uplinks if user requested a tarball we don't know about
closes #29 + tests fix
2013-12-15 03:15:58 +04:00
Alex Kocharin
9094d0742f better error message for incorrect package names 2013-12-15 03:09:55 +04:00
Alex Kocharin
ede09b5c77 refactoring: move uplink.get_package() calls to it's own function 2013-12-13 20:50:41 +04:00
Alex Kocharin
19e4d78197 test suite for #29 (nothing is fixed yet) 2013-12-13 20:49:48 +04:00
Alex Kocharin
380ec1e917 always end response with a newline 2013-12-13 19:12:24 +04:00
Alex Kocharin
ee16b06b3a style fix 2013-12-13 18:00:46 +04:00
Alex Kocharin
8149f883b0 default express.js to production mode, not development 2013-12-12 03:19:40 +04:00
Alex Kocharin
c75249b5b0 commenting out tests that doesn't work yet 2013-12-12 02:02:20 +04:00
Alex Kocharin
942a7747ac fixing tests 2013-12-12 02:01:23 +04:00
Alex Kocharin
96b336acc3 if uplink doesn't answer, stop sending requests to it for a while (2 min default) 2013-12-12 02:00:26 +04:00
Alex Kocharin
520a3b0220 cleanup 2013-12-12 01:55:40 +04:00
Alex Kocharin
230750ff88 adding sinopia version to logs 2013-12-12 01:55:17 +04:00
Alex Kocharin
84a6e36d84 semver version bump 2013-12-12 01:54:36 +04:00
Alex Kocharin
203f0300e3 Merge branch '0.5.x' of github.com:rlidwka/sinopia
Conflicts:
	lib/storage.js
	test/start.sh
2013-12-12 01:44:29 +04:00
Alex Kocharin
1cb12fec35 0.5.6 2013-12-12 01:27:34 +04:00
Alex Kocharin
e5195ecfb5 version bump 2013-12-12 01:27:21 +04:00
Alex Kocharin
7ee2361700 better tests for various tags (including bad ones), ref #21 2013-12-12 01:22:35 +04:00
Alex Kocharin
47a92ff273 fail-safe semver parsing + tests, ref #25 2013-12-12 00:51:48 +04:00
Brett Trotter
fc99692219 Update semver.compare to semver.compareLoose to tolerate grunt and other packages 2013-12-12 00:35:07 +04:00
Alex Kocharin
bd7dde229a Merge branch 'master' of github.com:rlidwka/sinopia 2013-12-11 21:40:41 +04:00
Alex Kocharin
21a5c788e1 Merge pull request #26 from bretttrotter-uar/test-exit-status
exit with appropriate status code
2013-12-11 09:23:22 -08:00
Brett Trotter
0b286d1fe3 exit with appropriate status code 2013-12-11 09:04:35 -06:00
Alex Kocharin
987f540cde Merge pull request #25 from bretttrotter-uar/loosen-semver
Update semver.compare to semver.compareLoose to tolerate grunt and other...
2013-12-11 05:53:46 -08:00
Alex Kocharin
a11da1545a 0.5.5 2013-12-11 17:04:32 +04:00
Alex Kocharin
d149252a3b fixing crash on bad tags 2013-12-11 17:04:08 +04:00
Brett Trotter
ed3386f044 Update semver.compare to semver.compareLoose to tolerate grunt and other packages 2013-12-10 13:04:47 -06:00
Alex Kocharin
36fa1ba655 log fatal errors 2013-12-10 14:29:46 +04:00
Alex Kocharin
77182a755d expose sinopia's version in Server header for debugging purposes 2013-12-10 14:12:34 +04:00
Alex Kocharin
f05a4784fd fixing crash on bad tags 2013-12-10 13:55:35 +04:00
Alex Kocharin
7f7c056ecd various bugfixes, camelcase for http headers 2013-12-09 08:00:16 +04:00
Alex Kocharin
bbb402f762 detecting http loops 2013-12-09 07:59:31 +04:00
Alex Kocharin
4d70d8065e add via tag to prevent loops 2013-12-09 07:58:25 +04:00
Alex Kocharin
3617a91f87 0.5.4 2013-12-08 04:56:14 +04:00
Alex Kocharin
03445c4c49 version bump 2013-12-08 04:55:52 +04:00
Alex Kocharin
2f45649a2c improving tags support, ref #8 2013-12-08 02:55:50 +04:00
Alex Kocharin
08d1011433 use standard \033 escape sequence for vt esc codes 2013-12-08 02:54:26 +04:00
Alex Kocharin
6e71913c46 setting default timeout to 30 seconds, #18 2013-12-08 02:37:27 +04:00
Alex Kocharin
cc7165dd3c Merge branch 'master' of github.com:rlidwka/sinopia 2013-12-06 21:49:31 +04:00
Alex Kocharin
a257fc3962 swallow bad auth errors, fixes #17 2013-12-06 21:46:51 +04:00
Alex Kocharin
6a295ac196 don't send etags with errors 2013-12-06 21:46:11 +04:00
Alex Kocharin
ff52b00a7c Merge pull request #18 from iambrandonn/master
Adding a configurable timeout for each uplink
2013-12-05 09:31:35 -08:00
Alex Kocharin
9ee8d484db dealing with internal errors in express.js 2013-12-05 16:31:21 +04:00
Alex Kocharin
7b0ab14d4c dealing with express.js errors 2013-12-05 16:27:23 +04:00
Brandon Nicholls
1d109f8071 Adding a configurable timeout for each uplink 2013-12-04 12:39:29 -07:00
Alex Kocharin
75e78b4137 0.5.3 2013-11-24 21:19:02 +04:00
Alex Kocharin
2e4a4b1dc1 version 0.5.3 changelog 2013-11-24 21:18:18 +04:00
Alex Kocharin
98d2c3ae18 tests fix 2013-11-24 21:13:21 +04:00
Alex Kocharin
8f05e141c3 retrieving proxy values from environment if present 2013-11-24 21:08:20 +04:00
Alex Kocharin
1df6d53612 config examples for proxy 2013-11-24 21:07:54 +04:00
Alex Kocharin
13242c8237 add proxy support, fix #13 2013-11-24 21:07:18 +04:00
Alex Kocharin
fecffa2a39 return an appropriate X-Status-Cat header 2013-11-24 21:06:01 +04:00
Alex Kocharin
6a7226bb83 Merge branch 'master' of github.com:rlidwka/sinopia 2013-11-24 21:03:05 +04:00
Alex Kocharin
fbb98b1101 Merge pull request #12 from Qwerios/master
Updated documentation, #12
2013-11-19 22:44:36 -08:00
Mark Doeswijk
9f2871f381 Added listen all documentation 2013-11-19 13:26:32 +01:00
Mark Doeswijk
558598c6be Added information about how to add a new user 2013-11-19 11:56:38 +01:00
Mark Doeswijk
e6ce764750 Added documentation for how to setup sinopia on a linux server 2013-11-19 11:53:53 +01:00
Alex Kocharin
aca1dc57d7 taking $PATH into account when launching tests 2013-10-28 10:01:27 +04:00
Alex Kocharin
b9d5066f82 changing license to more (in)appropriate 2013-10-26 19:52:24 +04:00
Alex Kocharin
15d46e3280 log 'new packages directory' message at a warn level 2013-10-26 16:20:42 +04:00
Alex Kocharin
b2f6128e9c style fix 2013-10-26 16:18:36 +04:00
Alex Kocharin
aba48ee6dd 0.5.2 2013-10-26 15:43:59 +04:00
Alex Kocharin
5da3c8ba58 transaction stub 2013-10-26 15:43:42 +04:00
Alex Kocharin
3f8166fa19 0.5.2 2013-10-26 15:43:15 +04:00
Alex Kocharin
dafcf8647c basic support for unpublishing individual versions (local only) 2013-10-23 10:15:17 +04:00
Alex Kocharin
6ae26226eb removing all tarballs on unpublish --force 2013-10-22 17:33:39 +04:00
Alex Kocharin
3a407396b8 better error logging 2013-10-22 13:37:28 +04:00
Alex Kocharin
782abbb86d honor etags when making requests 2013-10-22 13:31:48 +04:00
Alex Kocharin
ec26083e81 added a maxage option for uplinks 2013-10-22 12:34:07 +04:00
Alex Kocharin
fea98dfa59 unlink directory when package is unpublished 2013-10-22 11:53:59 +04:00
Alex Kocharin
78f856cf81 shasum check for uploaded tarballs 2013-10-22 11:45:19 +04:00
Alex Kocharin
61658cfbdc added utils.is_object function for convenience 2013-10-22 11:29:57 +04:00
Alex Kocharin
4c2c4b87c2 calculating sha1sum of uploaded tarballs 2013-10-22 11:12:06 +04:00
Alex Kocharin
5622b2283d added couchdb-like revisions 2013-10-22 11:00:04 +04:00
Alex Kocharin
8b314040d9 fixing race conditions when updating package data 2013-10-22 09:10:25 +04:00
Alex Kocharin
e35c02f8f1 0.5.1 2013-10-20 23:17:31 +04:00
Alex Kocharin
f1d26652f3 0.5.1 2013-10-20 23:15:06 +04:00
Alex Kocharin
acc2e571ff logging didn't work on chunked output 2013-10-19 01:53:27 +04:00
Alex Kocharin
fa51797602 return 500 instead of 404 if local storage is corrupted 2013-10-19 01:35:41 +04:00
Alex Kocharin
3ca3ac2dae logging fs errors 2013-10-19 01:17:53 +04:00
Alex Kocharin
012892600b handing JSON.parse errors 2013-10-19 00:46:13 +04:00
Alex Kocharin
674f944942 tweaking fs logger color 2013-10-19 00:45:36 +04:00
Alex Kocharin
8a2a91c1a7 0.5.0 2013-10-12 21:28:09 +04:00
Alex Kocharin
473d3d5c2f 0.5.0 release 2013-10-12 21:28:01 +04:00
Alex Kocharin
5b6f22c0b9 changelog added 2013-10-12 18:38:21 +04:00
Alex Kocharin
1a58a0f8d8 hide authorization header in logs 2013-10-12 18:37:47 +04:00
Alex Kocharin
fcebeea3ee don't print colors if output stream is not tty 2013-10-12 11:57:59 +04:00
Alex Kocharin
015623f9ae making various test cases work 2013-10-11 13:50:41 +04:00
Alex Kocharin
957f915d42 fixing a next(err) function called twice 2013-10-11 13:49:00 +04:00
Alex Kocharin
70f717a295 using undefined instead of anonymous for non-logged in users 2013-10-11 09:53:54 +04:00
Alex Kocharin
3596a12eb7 logging caused failure in case of errors 2013-10-11 09:46:37 +04:00
Alex Kocharin
9ee525b317 logging engine added, much better logs now 2013-10-11 09:32:59 +04:00
Alex Kocharin
b6082f1216 some work towards unpublishing packages 2013-10-11 09:32:12 +04:00
Alex Kocharin
0aa687624d only create config if we're asked to (+ nice help) 2013-10-09 20:22:29 +04:00
Alex Kocharin
a76a443994 moved bin/sinopia to lib/cli.js 2013-10-09 19:47:55 +04:00
Alex Kocharin
94d37441ae error not raised when updating versions - fix #10 2013-10-08 22:55:32 +04:00
Alex Kocharin
7a3255d1ab very basic support for unpublishing a package 2013-10-06 12:27:50 +04:00
Alex Kocharin
b9cb60da64 replacing connect.bodyParser with connect.json 2013-10-06 11:26:05 +04:00
Alex Kocharin
566f2c4896 0.4.3 2013-10-05 20:39:31 +04:00
Alex Kocharin
459002128c ignore tarballs 2013-10-05 20:37:44 +04:00
Alex Kocharin
bffc6db38f better error message, ref #9 2013-10-05 18:49:08 +04:00
Alex Kocharin
57dc1c64d9 Merge branch 'master' of github.com:rlidwka/sinopia 2013-10-05 18:29:05 +04:00
Alex Kocharin
8dcb735626 rename tarball from uplink when download is finished
closes #11
2013-10-05 18:26:51 +04:00
Alex Kocharin
d59bdb0c37 Update README.md 2013-10-02 22:56:06 +04:00
Alex Kocharin
2f15d1c0fa fd leak 2013-10-02 22:54:46 +04:00
Alex Kocharin
16730bb7b4 fix fetching tarballs from npmjs registry 2013-10-02 22:48:32 +04:00
Alex Kocharin
f38a897fa2 improving tags support (read-only for now) 2013-10-02 22:26:20 +04:00
Alex Kocharin
1556ce195a npm star/unstar calls now return proper error 2013-10-02 22:01:18 +04:00
Alex Kocharin
2675196672 jshinting 2013-10-01 22:02:23 +04:00
Alex Kocharin
77fccaa1e5 readme: typo fix 2013-09-29 08:22:50 +04:00
Alex Kocharin
e95d22abf6 typo 2013-09-29 08:22:04 +04:00
Alex Kocharin
cd9f0d7044 disabling loop tests for a while 2013-09-29 08:05:20 +04:00
62 changed files with 4397 additions and 1499 deletions

271
.eslint.yaml Normal file
View File

@@ -0,0 +1,271 @@
env:
node: true
#
# 0 - disable
# Rules that more harmful than useful, or just buggy.
#
# 1 - warning
# Rules that we didn't encounter yet. You can safely ignore them,
# but I'd like to know any interesting use-cases they forbid.
#
# 2 - error
# Rules that have proven to be useful, please follow them.
#
rules:
# didn't understand what it does, but it fails a good code
block-scoped-var: 0
# fails where newlines are used to format pretty big "if":
# if (
# name.charAt(0) === "." ||
# name.match(/[\/@\s\+%:]/) ||
# name !== encodeURIComponent(name) ||
# name.toLowerCase() === "node_modules"
# ) {
brace-style: 1
# snake_case is more readable, what's up with you guys?
camelcase: 0
# if some functions are complex, they are for a good reason,
# ain't worth it
complexity: [0, 10]
# never saw it, but self is preferred
consistent-this: [1, self]
# fails good code
curly: [0, multi]
# fails good code, where this notation is used for consistency:
# something['foo-bar'] = 123
# something['blahblah'] = 234
dot-notation: 0
# pointless in many cases (like indexOf() == -1), harmful in a few
# cases (when you do want to ignore types), fails good code
eqeqeq: 0
# if someone is changing prototype and makes properties enumerable,
# it's their own fault
guard-for-in: 0
# if some functions are complex, they are for a good reason,
# ain't worth it
max-depth: [0, 4]
max-nested-callbacks: [0, 2]
# should it really throw for every long URL?
max-len: [0, 80, 4]
# that's obvious by just looking at the code, you don't need lint for that
max-params: [0, 3]
# if some functions are complex, they are for a good reason,
# ain't worth it
max-statements: [0, 10]
# that one makes sense
new-cap: 2
# I'm writing javascript, not some weird reduced version of it
no-bitwise: 0
# not working around IE bugs, sorry
no-catch-shadow: 0
# see above, IE is useful for downloading other browsers only
no-comma-dangle: 0
# good for removing debugging code
no-console: 2
# good for removing debugging code
no-debugger: 2
# why would anyone need to check against that?
no-else-return: 0
# sometimes empty statement contains useful comment
no-empty: 0
# stupid rule
# "x == null" is "x === null || x === undefined"
no-eq-null: 0
# fails good code, when parens are used for grouping:
# (req && req.headers['via']) ? req.headers['via'] + ', ' : ''
# not everyone remembers priority tables, you know
no-extra-parens: 0
# fails defensive semicolons:
# ;['foo', 'bar'].forEach(function(x) {})
no-extra-semi: 0
# fails good code:
# var fs = require('fs'),
# , open = fs.open
no-mixed-requires: [0, false]
# new Array(12) is used to pre-allocate arrays
no-new-array: 0
# fails good code:
# fs.open('/file', 0666, function(){})
no-octal: 0
# fails good code:
# console.log('\033[31m' + str + '\033[39m')
# also fails \0 which is not octal escape
no-octal-escape: 0
# I'm writing javascript, not some weird reduced version of it
no-plusplus: 0
# fails good code:
# if (a) {
# var x = 'foo'
# } else {
# var x = bar
# }
no-redeclare: 0
# sometimes useful, often isn't
# probably worth enforcing
no-shadow: 2
no-sync: 2
# I'm writing javascript, not some weird reduced version of it
no-ternary: 0
# the single most important rule in the entire ruleset
no-undef: 2
# it is failing our own underscores
no-underscore-dangle: 0
# fails function hoisting
no-unreachable: 0
# fails npm-style code, it's good once you get used to it:
# if (typeof(options) === 'function') callback = options, options = {}
no-unused-expressions: 0
# fails (function(_err) {}) where named argument is used to show what
# nth function argument means
no-unused-vars: [0, local]
# fails function hoisting
no-use-before-define: 0
# fails foobar( (function(){}).bind(this) )
# parens are added for readability
no-wrap-func: 0
# fails good code:
# var x
# if (something) {
# var y
one-var: 0
quote-props: 0
# fails situation when different quotes are used to avoid escaping
quotes: [2, single, avoid-escape]
# http:#blog.izs.me/post/2353458699/an-open-letter-to-javascript-leaders-regarding
semi: [2, never]
# fails good code where spaces are used for grouping:
# (x+y * y+z)
space-infix-ops: 0
# typeof(something) should have braces to look like a function
# a matter of taste I suppose
space-unary-word-ops: 0
# strict mode is just harmful,
# can I have a check to enforce not using it?
strict: 0
sort-vars: 0
no-path-concat: 0
func-names: 0
# how can you set a return code without process.exit?
no-process-exit: 0
# both styles are useful
func-style: [0, declaration]
# fails while(1) {...}
no-constant-condition: 0
# fails good code:
# https://github.com/rlidwka/jju/blob/eb52ee72e5f21d48963798f9bda8ac8d68082148/lib/parse.js#L732
no-ex-assign: 0
wrap-iife: [2, inside]
# doesn't always make sense
consistent-return: 0
new-parens: 1
no-alert: 1
no-array-constructor: 1
no-caller: 1
no-cond-assign: 1
no-control-regex: 1
no-delete-var: 1
no-div-regex: 1
no-dupe-keys: 1
no-empty-class: 1
no-empty-label: 1
no-eval: 1
no-extend-native: 1
no-extra-boolean-cast: 1
no-extra-strict: 1
no-fallthrough: 1
no-floating-decimal: 1
no-func-assign: 1
no-global-strict: 1
no-implied-eval: 1
no-invalid-regexp: 1
no-iterator: 1
no-labels: 1
no-label-var: 1
no-lone-blocks: 1
no-loop-func: 1
no-multi-str: 1
no-native-reassign: 1
no-negated-in-lhs: 1
no-nested-ternary: 1
no-new: 1
no-new-func: 1
no-new-object: 1
no-new-wrappers: 1
no-obj-calls: 1
no-octal: 1
no-proto: 1
no-regex-spaces: 1
no-return-assign: 1
no-script-url: 1
no-self-compare: 1
no-shadow: 1
no-shadow-restricted-names: 1
no-spaced-func: 1
no-sparse-arrays: 1
no-sync: 1
no-undef: 1
no-undef-init: 1
no-unreachable: 1
no-with: 1
no-yoda: 1
radix: 1
space-return-throw-case: 1
use-isnan: 1
valid-jsdoc: 1
wrap-regex: 1

3
.gitignore vendored
View File

@@ -1,8 +1,9 @@
node_modules
package.json
npm-debug.log
sinopia-*.tgz
###
bin/storage*
bin/*.yaml
test/test-storage*
test-storage*

11
.npmignore Normal file
View File

@@ -0,0 +1,11 @@
node_modules
package.json
npm-debug.log
sinopia-*.tgz
###
bin/storage*
bin/*.yaml
test-storage*
/.eslint*

152
History.md Normal file
View File

@@ -0,0 +1,152 @@
1 Apr 2014, version 0.8.0
- use gzip compression whenever possible (issue #54)
- set `ignore_latest_tag` to false, it should now be more compatible with npm registry
29 Mar 2014, version 0.7.1
- added `ignore_latest_tag` config param (issues #55, #59)
- reverted PR #56 (see discussion in #57)
13 Mar 2014, version 0.7.0
- config changes:
- breaking change: all time intervals are now specified in *seconds* instead of *milliseconds* for the sake of consistency. Change `timeout` if you have one!
- all time intervals now can be specified in [nginx notation](http://wiki.nginx.org/ConfigNotation), for example `1m 30s` will specify a 90 seconds timeout
- added `maxage` option to avoid asking public registry for the same data too often (issue #47)
- added `max_fails` and `fail_timeout` options to reduce amount of requests to public registry when it's down (issue #7)
- bug fixes:
- fix crash when headers are sent twice (issue #52)
- all tarballs are returned with `Content-Length`, which allows [yapm](https://github.com/rlidwka/yapm) to estimate download time
- when connection to public registry is interrupted when downloading a tarball, we no longer save incomplete tarball to the disk
- other changes:
- 404 errors are returned in couchdb-like manner (issue #56)
5 Mar 2014, version 0.6.7
- pin down express@3 version, since sinopia doesn't yet work with express@4
28 Feb 2014, version 0.6.5
- old SSL keys for npm are removed, solves `SELF_SIGNED_CERT_IN_CHAIN` error
3 Feb 2014, version 0.6.3
- validate tags and versions (issue #40)
- don't crash when process.getuid doesn't exist (issue #41)
18 Jan 2014, version 0.6.2
- adding config param to specify upload limits (issue #39)
- making loose semver versions work (issue #38)
13 Jan 2014, version 0.6.1
- support setting different storage paths for different packages (issue #35)
30 Dec 2013, version 0.6.0
- tag support (issue #8)
- adding support for npm 1.3.19+ behaviour (issue #31)
- removing all support for proxying publish requests to uplink (too complex)
26 Dec 2013, version 0.5.9
- fixing bug with bad Accept header (issue #32)
20 Dec 2013, version 0.5.8
- fixed a warning from js-yaml
- don't color multiline strings in logs output
- better error messages in various cases
- test format changed
15 Dec 2013, version 0.5.7
- try to fetch package from uplinks if user requested a tarball we don't know about (issue #29)
- security fix: set express.js to production mode so we won't return stack traces to the user in case of errors
11 Dec 2013, version 0.5.6
- fixing a few crashes related to tags
8 Dec 2013, version 0.5.4
- latest tag always shows highest version available (issue #8)
- added a configurable timeout for requests to uplinks (issue #18)
- users with bad authentication header are considered not logged in (issue #17)
24 Nov 2013, version 0.5.3
- added proxy support for requests to uplinks (issue #13)
- changed license from default BSD to WTFPL
26 Oct 2013, version 0.5.2
- server now supports unpublishing local packages
- added fs-ext dependency (flock)
- fixed a few face conditions
20 Oct 2013, version 0.5.1
- fixed a few errors related to logging
12 Oct 2013, version 0.5.0
- using bunyan as a log engine
- pretty-formatting colored logs to stdout by default
- ask user before creating any config files
5 Oct 2013, version 0.4.3
- basic tags support for npm (read-only)
- npm star/unstar calls now return proper error
29 Sep 2013, version 0.4.2
28 Sep 2013, version 0.4.1
- using mocha for tests now
- making use of streams2 api, doesn't work on 0.8 anymore
- basic support for uploading packages to other registries
27 Sep 2013, version 0.4.0
- basic test suite
- storage path in config is now relative to config file location, not cwd
- proper cleanup for temporary files
12 Jul 2013, version 0.3.2
4 Jul 2013, version 0.3.1
- using ETag header for all json output, based on md5
20 Jun 2013, version 0.3.0
- compression for http responses
- requests for files to uplinks are now streams (no buffering)
- tarballs are now cached locally
19 Jun 2013, version 0.2.0
- config file changed, packages is now specified with minimatch
- ability to retrieve all packages from another registry (i.e. npmjs)
14 Jun 2013, version 0.1.1
- config is now autogenerated
- tarballs are now read/written from fs using streams (no buffering)
9 Jun 2013, version 0.1.0
- first npm version
- ability to publish packages and retrieve them locally
- basic authentication/access control
22 May 2013, version 0.0.0
- first commits

View File

@@ -2,25 +2,27 @@ Sinopia is a private/caching npm repository server.
It allows you to have a local npm registry with zero configuration. You don't have to install and replicate an entire CouchDB database. Sinopia keeps its own small database and, if a package doesn't exist there, it asks npmjs.org for it keeping only those packages you use.
<p align="center"><img src="https://f.cloud.github.com/assets/999113/1795553/680177b2-6a1d-11e3-82e1-02193aa4e32e.png"></p>
## Use cases
1. Use private packages.
1. Use private packages.
If you want to use all benefits of npm package system in your company without sending all code to the public, and use your private packages just as easy as public ones.
See [using private packages](#using-private-packages) section for details.
2. Cache npmjs.org registry.
If you have more than one server you want to install packages on, you might want to use this to decrease latency
(presumably "slow" npmjs.org will be connected to only once per package/version) and provide limited failover (if npmjs.org is down, we might still find something useful in the cache).
See [using public packages](#using-public-packages-from-npmjsorg) section for details.
3. Override public packages.
If you want to use a modified version of some 3rd-party package (for example, you found a bug, but maintainer didn't accepted pull request yet), you can publish your version locally under the same name.
See [override public packages](#override-public-packages) section for details.
## Installation
@@ -42,11 +44,35 @@ $ npm set always-auth true
$ npm set ca null
```
### Docker
A Sinopia docker image [is available](https://index.docker.io/u/keyvanfatehi/docker-sinopia/)
### Chef
A Sinopia Chef cookbook [is available at Opscode community](http://community.opscode.com/cookbooks/sinopia) source: https://github.com/BarthV/sinopia-cookbook
### Puppet
A Sinopia puppet module [is available at puppet forge](http://forge.puppetlabs.com/saheba/sinopia) source: https://github.com/saheba/puppet-sinopia
## Configuration
When you start a server, it auto-creates a config file that adds one user (password is printed to stdout only once).
## Adding a new user
There is no utility to add a new user but you can at least use node on the command-line to generate a password. You will need to edit the config and add the user manually.
Start node and enter the following code replacing 'newpass' with the password you want to get the hash for.
```bash
$ node
> crypto.createHash('sha1').update('newpass').digest('hex')
'6c55803d6f1d7a177a0db3eb4b343b0d50f9c111'
> [CTRL-D]
```
## Using private packages
You can add users and manage which users can access which packages.
@@ -66,18 +92,39 @@ If you want to use a modified version of some public package `foo`, you can just
There's two options here:
1. You want to create a separate fork and stop synchronizing with public version.
If you want to do that, you should modify your configuration file so sinopia won't make requests regarding this package to npmjs anymore. Add a separate entry for this package to *config.yaml* and remove `npmjs` from `proxy_access` list and restart the server.
When you publish your package locally, you should probably start with version string higher than existing one, so it won't conflict with existing package in the cache.
2. You want to temporarily use your version, but return to public one as soon as it's updated.
In order to avoid version conflicts, you should use a custom pre-release suffix of the next patch version. For example, if a public package has version 0.1.2, you can upload 0.1.3-my-temp-fix. This way your package will be used until its original maintainer updates his public package to 0.1.3.
## Features
## Compatibility
For now you can publish packages and read them. Advanced features like `npm search` don't work yet.
Sinopia aims to support all features of a standard npm client that make sense to support in private repository. Unfortunately, it isn't always possible.
Basic features:
- Installing packages (npm install, npm upgrade, etc.) - supported
- Publishing packages (npm publish) - supported
Advanced package control:
- Unpublishing packages (npm unpublish) - not yet supported, should be soon
- Tagging (npm tag) - not yet supported, should be soon
- Deprecation (npm deprecate) - not supported
User management:
- Registering new users (npm adduser {newuser}) - not supported, sinopia uses its own acl management system
- Transferring ownership (npm owner add {user} {pkg}) - not supported, sinopia uses its own acl management system
Misc stuff:
- Searching (npm search) - not supported
- Starring (npm star, npm unstar) - not supported, doesn't make sense in private registry
## Storage
@@ -85,7 +132,7 @@ No CouchDB here. This application is supposed to work with zero configuration, s
If you want to use a database instead, ask for it, we'll come up with some kind of a plugin system.
## Simular existing things
## Similar existing things
- npm + git (I mean, using git+ssh:// dependencies) - most people seem to use this, but it's a terrible idea... *npm update* doesn't work, can't use git subdirectories this way, etc.
- [reggie](https://github.com/mbrevoort/node-reggie) - this looks very interesting indeed... I might borrow some code there.

58
SERVER.md Normal file
View File

@@ -0,0 +1,58 @@
This is mostly basic linux server configuration stuff but I felt it important to document and share the steps I took to get sinopia running permanently on my server. You will need root (or sudo) permissions for the following.
## Running as a separate user
First create the sinopia user:
```bash
$ sudo adduser --disabled-login --gecos 'Sinopia NPM mirror' sinopia
```
You create a shell as the sinopia user using the following command:
```bash
$ sudo su sinopia
$ cd ~
```
The 'cd ~' command send you to the home directory of the sinopia user. Make sure you run sinopia at least once to generate the config file. Edit it according to your needs.
## Listening on all addresses
If you want to listen to every external address set the listen directive in the config to:
```
# you can specify listen address (or simply a port)
listen: 0.0.0.0:4873
```
## Keeping sinopia running forever
We can use the node package called 'forever' to keep sinopia running all the time.
https://github.com/nodejitsu/forever
First install forever globally:
```bash
$ sudo npm install -g forever
```
Make sure you've started sinopia at least once to generate the config file and write down the created admin user. You can then use the following command to start sinopia:
```bash
$ forever start `which sinopia`
```
You can check the documentation for more information on how to use forever.
## Surviving server restarts
We can use crontab and forever together to restart sinopia after a server reboot.
When you're logged in as the sinopia user do the following:
```bash
$ crontab -e
```
This might ask you to choose an editor. Pick your favorite and proceed.
Add the following entry to the file:
```
@reboot /usr/bin/forever start /usr/lib/node_modules/sinopia/bin/sinopia
```
The locations may vary depending on your server setup. If you want to know where your files are you can use the 'which' command:
```bash
$ which forever
$ which sinopia
```

View File

@@ -1,70 +1,4 @@
#!/usr/bin/env node
var pkg_file = '../package.yaml';
var fs = require('fs');
var yaml = require('js-yaml');
var commander = require('commander');
var server = require('../lib/index');
var crypto = require('crypto');
var pkg = require(pkg_file);
commander
.option('-l, --listen <[host:]port>', 'host:port number to listen on (default: localhost:4873)')
.option('-c, --config <config.yaml>', 'use this configuration file (default: ./config.yaml)')
.version(pkg.version)
.parse(process.argv);
if (commander.args.length == 1 && !commander.config) {
// handling "sinopia [config]" case if "-c" is missing in commandline
commander.config = commander.args.pop();
}
if (commander.args.length != 0) {
commander.help();
}
try {
var config, config_path;
if (commander.config) {
config_path = commander.config;
config = yaml.safeLoad(fs.readFileSync(config_path, 'utf8'));
} else {
config_path = './config.yaml';
try {
config = yaml.safeLoad(fs.readFileSync(config_path, 'utf8'));
} catch(err) {
var created_config = require('../lib/config_gen')();
config = yaml.safeLoad(created_config.yaml);
console.log('starting with default config, use user: "%s", pass: "%s" to authenticate', created_config.user, created_config.pass);
fs.writeFileSync(config_path, created_config.yaml);
}
}
} catch(err) {
if (err.code === 'ENOENT') {
console.error('ERROR: cannot open configuration file "'+config_path+'", file not found');
process.exit(1);
} else {
throw err;
}
}
if (!config.user_agent) config.user_agent = 'Sinopia/'+pkg.version;
if (!config.self_path) config.self_path = config_path;
// command line || config file || default
var hostport = commander.listen || String(config.listen || '') || '4873';
hostport = hostport.split(':');
if (hostport.length < 2) {
hostport = [undefined, hostport[0]];
}
if (hostport[0] == null) {
hostport[0] = 'localhost';
}
server(config).listen(hostport[1], hostport[0]);
console.log('Server is listening on http://%s:%s/', hostport[0], hostport[1]);
// undocumented stuff for tests
if (typeof(process.send) === 'function') {
process.send({sinopia_started: hostport});
}
require('../lib/cli')

146
lib/cli.js Normal file
View File

@@ -0,0 +1,146 @@
#!/usr/bin/env node
/*eslint no-sync:0*/
if (process.getuid && process.getuid() === 0) {
global.console.error("Sinopia doesn't need superuser privileges. Don't run it under root.")
}
try {
// for debugging memory leaks
// totally optional
require('heapdump')
} catch(err){}
var logger = require('./logger')
logger.setup() // default setup
var pkg_file = '../package.yaml'
, fs = require('fs')
, yaml = require('js-yaml')
, commander = require('commander')
, server = require('./index')
, crypto = require('crypto')
, pkg = yaml.safeLoad(fs.readFileSync(__dirname + '/' + pkg_file, 'utf8'))
commander
.option('-l, --listen <[host:]port>', 'host:port number to listen on (default: localhost:4873)')
.option('-c, --config <config.yaml>', 'use this configuration file (default: ./config.yaml)')
.version(pkg.version)
.parse(process.argv)
if (commander.args.length == 1 && !commander.config) {
// handling "sinopia [config]" case if "-c" is missing in commandline
commander.config = commander.args.pop()
}
if (commander.args.length != 0) {
commander.help()
}
var config, config_path, have_question
try {
if (commander.config) {
config_path = commander.config
config = yaml.safeLoad(fs.readFileSync(config_path, 'utf8'))
} else {
config_path = './config.yaml'
try {
config = yaml.safeLoad(fs.readFileSync(config_path, 'utf8'))
} catch(err) {
var readline = require('readline')
var rl = readline.createInterface(process.stdin, process.stdout)
var timeout = setTimeout(function() {
global.console.log('I got tired waiting for an answer. Exitting...')
process.exit(1)
}, 20000)
;(function askUser() {
have_question = true
rl.question('Config file doesn\'t exist, create a new one? (Y/n) ', function(x) {
clearTimeout(timeout)
if (x[0] == 'Y' || x[0] == 'y' || x === '') {
rl.close()
var created_config = require('../lib/config_gen')()
config = yaml.safeLoad(created_config.yaml)
write_config_banner(created_config, config)
fs.writeFileSync(config_path, created_config.yaml)
afterConfigLoad()
} else if (x[0] == 'N' || x[0] == 'n') {
rl.close()
global.console.log('So, you just accidentally run me in a wrong folder. Exitting...')
process.exit(1)
} else {
askUser()
}
})
})()
}
}
} catch(err) {
logger.logger.fatal({file: config_path, err: err}, 'cannot open config file @{file}: @{!err.message}')
process.exit(1)
}
if (!have_question) afterConfigLoad()
function get_hostport() {
// command line || config file || default
var hostport = commander.listen || String(config.listen || '') || '4873'
hostport = hostport.split(':')
if (hostport.length < 2) {
hostport = [undefined, hostport[0]]
}
if (hostport[0] == null) {
hostport[0] = 'localhost'
}
return hostport
}
function afterConfigLoad() {
if (!config.user_agent) config.user_agent = 'Sinopia/'+pkg.version
if (!config.self_path) config.self_path = config_path
logger.setup(config.logs)
var hostport = get_hostport()
server(config)
.listen(hostport[1], hostport[0])
.on('error', function(err) {
logger.logger.fatal({err: err}, 'cannot create server: @{err.message}')
process.exit(2)
})
logger.logger.warn({addr: 'http://'+hostport[0]+':'+hostport[1]+'/', version: 'Sinopia/'+pkg.version}, 'Server is listening on @{addr}')
// undocumented stuff for tests
if (typeof(process.send) === 'function') {
process.send({sinopia_started: hostport})
}
}
function write_config_banner(def, config) {
var hostport = get_hostport()
var log = global.console.log
log('===========================================================')
log(' Creating a new configuration file: "%s"', config_path)
log(' ')
log(' If you want to setup npm to work with this registry,')
log(' run following commands:')
log(' ')
log(' $ npm set registry http://%s:%s/', hostport[0], hostport[1])
log(' $ npm set always-auth true')
log(' $ npm adduser')
log(' Username: %s', def.user)
log(' Password: %s', def.pass)
log('===========================================================')
}
process.on('uncaughtException', function(err) {
logger.logger.fatal({err: err}, 'uncaught exception, please report this\n@{err.stack}')
process.exit(255)
})

View File

@@ -1,139 +1,186 @@
var assert = require('assert');
var crypto = require('crypto');
var minimatch = require('minimatch');
var assert = require('assert')
, crypto = require('crypto')
, minimatch = require('minimatch')
, utils = require('./utils')
// [[a, [b, c]], d] -> [a, b, c, d]
function flatten(array) {
var result = [];
var result = []
for (var i=0; i<array.length; i++) {
if (Array.isArray(array[i])) {
result.push.apply(result, flatten(array[i]));
result.push.apply(result, flatten(array[i]))
} else {
result.push(array[i]);
result.push(array[i])
}
}
return result;
return result
}
function Config(config) {
if (!(this instanceof Config)) return new Config(config);
if (!(this instanceof Config)) return new Config(config)
for (var i in config) {
if (this[i] == null) this[i] = config[i];
if (this[i] == null) this[i] = config[i]
}
// some weird shell scripts are valid yaml files parsed as string
assert.equal(typeof(config), 'object', 'CONFIG: this doesn\'t look like a valid config file');
assert(this.storage, 'CONFIG: storage path not defined');
assert.equal(typeof(config), 'object', 'CONFIG: this doesn\'t look like a valid config file')
assert(this.storage, 'CONFIG: storage path not defined')
var users = {all:true, anonymous:true, 'undefined':true, owner:true, none:true}
var users = {all:true, anonymous:true, owner:true, none:true};
var check_user_or_uplink = function(arg) {
assert(arg !== 'all' || arg !== 'owner' || arg !== 'anonymous' || arg !== 'none', 'CONFIG: reserved user/uplink name: ' + arg);
assert(!arg.match(/\s/), 'CONFIG: invalid user name: ' + arg);
assert(users[arg] == null, 'CONFIG: duplicate user/uplink name: ' + arg);
users[arg] = true;
};
assert(arg !== 'all' || arg !== 'owner' || arg !== 'anonymous' || arg !== 'undefined' || arg !== 'none', 'CONFIG: reserved user/uplink name: ' + arg)
assert(!arg.match(/\s/), 'CONFIG: invalid user name: ' + arg)
assert(users[arg] == null, 'CONFIG: duplicate user/uplink name: ' + arg)
users[arg] = true
}
['users', 'uplinks', 'packages'].forEach(function(x) {
if (this[x] == null) this[x] = {};
assert(
typeof(this[x]) === 'object' &&
!Array.isArray(this[x])
, 'CONFIG: bad "'+x+'" value (object expected)');
});
;['users', 'uplinks', 'packages'].forEach(function(x) {
if (this[x] == null) this[x] = {}
assert(utils.is_object(this[x]), 'CONFIG: bad "'+x+'" value (object expected)')
})
for (var i in this.users) check_user_or_uplink(i);
for (var i in this.uplinks) check_user_or_uplink(i);
for (var i in this.users) check_user_or_uplink(i)
for (var i in this.uplinks) check_user_or_uplink(i)
for (var i in this.users) {
assert(this.users[i].password, 'CONFIG: no password for user: ' + i);
assert(this.users[i].password, 'CONFIG: no password for user: ' + i)
assert(
typeof(this.users[i].password) === 'string' &&
typeof(this.users[i].password) === 'string' &&
this.users[i].password.match(/^[a-f0-9]{40}$/)
, 'CONFIG: wrong password format for user: ' + i + ', sha1 expected');
, 'CONFIG: wrong password format for user: ' + i + ', sha1 expected')
}
for (var i in this.uplinks) {
assert(this.uplinks[i].url, 'CONFIG: no url for uplink: ' + i);
assert(this.uplinks[i].url, 'CONFIG: no url for uplink: ' + i)
assert(
typeof(this.uplinks[i].url) === 'string'
, 'CONFIG: wrong url format for uplink: ' + i);
this.uplinks[i].url = this.uplinks[i].url.replace(/\/$/, '');
, 'CONFIG: wrong url format for uplink: ' + i)
this.uplinks[i].url = this.uplinks[i].url.replace(/\/$/, '')
}
for (var i in this.packages) {
var check_userlist = function(i, hash, action) {
if (hash[action] == null) hash[action] = [];
// if it's a string, split it to array
if (typeof(hash[action]) === 'string') {
hash[action] = hash[action].split(/\s+/);
}
function check_userlist(i, hash, action) {
if (hash[action] == null) hash[action] = []
assert(
typeof(hash[action]) === 'object' &&
Array.isArray(hash[action])
, 'CONFIG: bad "'+i+'" package '+action+' description (array or string expected)');
hash[action] = flatten(hash[action]);
hash[action].forEach(function(user) {
assert(
users[user] != null
, 'CONFIG: "'+i+'" package: user "'+user+'" doesn\'t exist');
});
// if it's a string, split it to array
if (typeof(hash[action]) === 'string') {
hash[action] = hash[action].split(/\s+/)
}
assert(
typeof(hash[action]) === 'object' &&
Array.isArray(hash[action])
, 'CONFIG: bad "'+i+'" package '+action+' description (array or string expected)')
hash[action] = flatten(hash[action])
hash[action].forEach(function(user) {
assert(
users[user] != null
, 'CONFIG: "'+i+'" package: user "'+user+'" doesn\'t exist')
})
}
for (var i in this.packages) {
assert(
typeof(this.packages[i]) === 'object' &&
!Array.isArray(this.packages[i])
, 'CONFIG: bad "'+i+'" package description (object expected)');
, 'CONFIG: bad "'+i+'" package description (object expected)')
check_userlist(i, this.packages[i], 'allow_access');
check_userlist(i, this.packages[i], 'allow_publish');
check_userlist(i, this.packages[i], 'proxy_access');
check_userlist(i, this.packages[i], 'proxy_publish');
check_userlist(i, this.packages[i], 'allow_access')
check_userlist(i, this.packages[i], 'allow_publish')
check_userlist(i, this.packages[i], 'proxy_access')
check_userlist(i, this.packages[i], 'proxy_publish')
// deprecated
check_userlist(i, this.packages[i], 'access');
check_userlist(i, this.packages[i], 'proxy');
check_userlist(i, this.packages[i], 'publish');
check_userlist(i, this.packages[i], 'access')
check_userlist(i, this.packages[i], 'proxy')
check_userlist(i, this.packages[i], 'publish')
}
return this;
// loading these from ENV if aren't in config
;['http_proxy', 'https_proxy', 'no_proxy'].forEach((function(v) {
if (!(v in this)) {
this[v] = process.env[v] || process.env[v.toUpperCase()]
}
}).bind(this))
// unique identifier of this server (or a cluster), used to avoid loops
if (!this.server_id) {
this.server_id = crypto.pseudoRandomBytes(6).toString('hex')
}
if (this.ignore_latest_tag == null) this.ignore_latest_tag = false
return this
}
function allow_action(package, who, action) {
for (var i in this.packages) {
if (minimatch.makeRe(i).exec(package)) {
return this.packages[i][action].reduce(function(prev, curr) {
if (curr === who || curr === 'all') return true;
return prev;
}, false);
}
}
return false;
return (this.get_package_setting(package, action) || []).reduce(function(prev, curr) {
if (curr === String(who) || curr === 'all') return true
return prev
}, false)
}
Config.prototype.allow_access = function(package, user) {
return allow_action.call(this, package, user, 'allow_access') || allow_action.call(this, package, user, 'access');
return allow_action.call(this, package, user, 'allow_access') || allow_action.call(this, package, user, 'access')
}
Config.prototype.allow_publish = function(package, user) {
return allow_action.call(this, package, user, 'allow_publish') || allow_action.call(this, package, user, 'publish');
return allow_action.call(this, package, user, 'allow_publish') || allow_action.call(this, package, user, 'publish')
}
Config.prototype.proxy_access = function(package, uplink) {
return allow_action.call(this, package, uplink, 'proxy_access') || allow_action.call(this, package, uplink, 'proxy');
return allow_action.call(this, package, uplink, 'proxy_access') || allow_action.call(this, package, uplink, 'proxy')
}
Config.prototype.proxy_publish = function(package, uplink) {
return allow_action.call(this, package, uplink, 'proxy_publish');
return allow_action.call(this, package, uplink, 'proxy_publish')
}
Config.prototype.get_package_setting = function(package, setting) {
for (var i in this.packages) {
if (minimatch.makeRe(i).exec(package)) {
return this.packages[i][setting]
}
}
return undefined
}
Config.prototype.authenticate = function(user, password) {
if (this.users[user] == null) return false;
return crypto.createHash('sha1').update(password).digest('hex') === this.users[user].password;
if (this.users[user] == null) return false
return crypto.createHash('sha1').update(password).digest('hex') === this.users[user].password
}
module.exports = Config;
module.exports = Config
var parse_interval_table = {
'': 1000,
ms: 1,
s: 1000,
m: 60*1000,
h: 60*60*1000,
d: 86400000,
w: 7*86400000,
M: 30*86400000,
y: 365*86400000,
}
module.exports.parse_interval = function(interval) {
if (typeof(interval) === 'number') return interval * 1000
var result = 0
var last_suffix = Infinity
interval.split(/\s+/).forEach(function(x) {
if (!x) return
var m = x.match(/^((0|[1-9][0-9]*)(\.[0-9]+)?)(ms|s|m|h|d|w|M|y|)$/)
if (!m
|| parse_interval_table[m[4]] >= last_suffix
|| (m[4] === '' && last_suffix !== Infinity)) {
throw new Error('invalid interval: ' + interval)
}
last_suffix = parse_interval_table[m[4]]
result += Number(m[1]) * parse_interval_table[m[4]]
})
return result
}

View File

@@ -12,26 +12,43 @@ uplinks:
npmjs:
url: https://registry.npmjs.org/
# amount of time to wait for repository to respond
# before giving up and use the local cached copy
#timeout: 30s
# maximum time in which data is considered up to date
#
# default is 2 minutes, so server won't request the same data from
# uplink if a similar request was made less than 2 minutes ago
#maxage: 2m
# if two subsequent requests fail, no further requests will be sent to
# this uplink for five minutes
#max_fails: 2
#fail_timeout: 5m
# timeouts are defined in the same way as nginx, see:
# http://wiki.nginx.org/ConfigNotation
packages:
# uncomment this for packages with "local-" prefix to be available
# uncomment this for packages with "local-" prefix to be available
# for admin only, it's a recommended way of handling private packages
#'local-*':
# allow_access: admin
# allow_publish: admin
# # you can override storage directory for a group of packages this way:
# storage: 'local_storage'
'*':
# allow all users to read packages ('all' is a keyword)
# this includes non-authenticated (anonymous) users
# this includes non-authenticated users
allow_access: all
# allow 'admin' to publish packages
allow_publish: admin
# if package is not available locally, proxy requests to 'npmjs' registry
proxy_access: npmjs
# when package is published locally, also push it to remote registry
#proxy_publish: none
proxy: npmjs
#####################################################################
# Advanced settings
@@ -43,3 +60,30 @@ packages:
# you can specify listen address (or simply a port)
#listen: localhost:4873
# type: file | stdout | stderr
# level: trace | debug | info | http (default) | warn | error | fatal
#
# parameters for file: name is filename
# {type: 'file', path: 'sinopia.log', level: 'debug'},
#
# parameters for stdout and stderr: format: json | pretty
# {type: 'stdout', format: 'pretty', level: 'debug'},
logs:
- {type: stdout, format: pretty, level: http}
#- {type: file, path: sinopia.log, level: info}
# you can specify proxy used with all requests in wget-like manner here
# (or set up ENV variables with the same name)
#http_proxy: http://something.local/
#https_proxy: https://something.local/
#no_proxy: localhost,127.0.0.1
# maximum size of uploaded json document
# increase it if you have "request entity too large" errors
#max_body_size: 1mb
# Workaround for countless npm bugs. Must have for npm <1.14.x, but expect
# it to be turned off in future versions. If `true`, latest tag is ignored,
# and the highest semver is placed instead.
#ignore_latest_tag: false

View File

@@ -1,16 +1,19 @@
var fs = require('fs');
var crypto = require('crypto');
var fs = require('fs')
, crypto = require('crypto')
module.exports = function create_config() {
var pass = crypto.randomBytes(8).toString('base64').replace(/[=+\/]/g, '');
var pass_digest = crypto.createHash('sha1').update(pass).digest('hex');
var config = fs.readFileSync(require.resolve('./config_def.yaml'), 'utf8');
config = config.replace('__PASSWORD__', pass_digest);
var pass = crypto.randomBytes(8).toString('base64').replace(/[=+\/]/g, '')
, pass_digest = crypto.createHash('sha1').update(pass).digest('hex')
/*eslint no-sync:0*/
var config = fs.readFileSync(require.resolve('./config_def.yaml'), 'utf8')
config = config.replace('__PASSWORD__', pass_digest)
return {
yaml: config,
user: 'admin',
pass: pass,
};
}
}

View File

@@ -1,25 +1,26 @@
var util = require('util');
var util = require('util')
, utils = require('./utils')
function parse_error_params(params, status, msg) {
if (typeof(params) === 'string') {
return {
msg: params,
status: status,
};
}
} else if (typeof(params) === 'number') {
return {
msg: msg,
status: params,
};
} else if (typeof(params) === 'object' && params != null) {
if (params.msg == null) params.msg = msg;
if (params.status == null) params.status = status;
return params;
}
} else if (utils.is_object(params)) {
if (params.msg == null) params.msg = msg
if (params.status == null) params.status = status
return params
} else {
return {
msg: msg,
status: status,
};
}
}
}
@@ -27,35 +28,35 @@ function parse_error_params(params, status, msg) {
* Errors caused by malfunctioned code
*/
var AppError = function(params, constr) {
Error.captureStackTrace(this, constr || this);
params = parse_error_params(params, 500, 'Internal server error');
this.msg = params.msg;
this.status = params.status;
};
util.inherits(AppError, Error);
AppError.prototype.name = 'Application Error';
Error.captureStackTrace(this, constr || this)
params = parse_error_params(params, 500, 'Internal server error')
this.msg = params.msg
this.status = params.status
}
util.inherits(AppError, Error)
AppError.prototype.name = 'Application Error'
/*
* Errors caused by wrong request
*/
var UserError = function(params, constr) {
params = parse_error_params(params, 404, 'The requested resource was not found');
this.msg = params.msg;
this.status = params.status;
};
util.inherits(UserError, Error);
UserError.prototype.name = 'User Error';
params = parse_error_params(params, 404, 'The requested resource was not found')
this.msg = params.msg
this.status = params.status
}
util.inherits(UserError, Error)
UserError.prototype.name = 'User Error'
/*
* Mimic filesystem errors
*/
var FSError = function(code) {
this.code = code;
};
util.inherits(UserError, Error);
UserError.prototype.name = 'FS Error';
this.code = code
}
util.inherits(UserError, Error)
UserError.prototype.name = 'FS Error'
module.exports.AppError = AppError;
module.exports.UserError = UserError;
module.exports.FSError = FSError;
module.exports.AppError = AppError
module.exports.UserError = UserError
module.exports.FSError = FSError

View File

@@ -1,216 +1,402 @@
var express = require('express');
var cookies = require('cookies');
var utils = require('./utils');
var Storage = require('./storage');
var Config = require('./config');
var UError = require('./error').UserError;
var basic_auth = require('./middleware').basic_auth;
var validate_name = require('./middleware').validate_name;
var media = require('./middleware').media;
var etagify = require('./middleware').etagify;
var expect_json = require('./middleware').expect_json;
var express = require('express')
, cookies = require('cookies')
, utils = require('./utils')
, Storage = require('./storage')
, Config = require('./config')
, UError = require('./error').UserError
, Middleware = require('./middleware')
, Logger = require('./logger')
, Cats = require('./status-cats')
, basic_auth = Middleware.basic_auth
, validate_name = Middleware.validate_name
, media = Middleware.media
, expect_json = Middleware.expect_json
function match(regexp) {
return function(req, res, next, value, name) {
if (regexp.exec(value)) {
next()
} else {
next('route')
}
}
}
module.exports = function(config_hash) {
var config = new Config(config_hash);
var storage = new Storage(config);
var config = new Config(config_hash)
, storage = new Storage(config)
var can = function(action) {
return function(req, res, next) {
if (config['allow_'+action](req.params.package, req.remoteUser)) {
next();
next()
} else {
next(new UError({
status: 403,
msg: 'user '+req.remoteUser+' not allowed to '+action+' it'
}));
if (!req.remoteUser) {
if (req.remoteUserError) {
var msg = "can't "+action+' restricted package, ' + req.remoteUserError
} else {
var msg = "can't "+action+" restricted package without auth, did you forget 'npm set always-auth true'?"
}
next(new UError({
status: 403,
msg: msg,
}))
} else {
next(new UError({
status: 403,
msg: 'user '+req.remoteUser+' not allowed to '+action+' it'
}))
}
}
};
};
}
}
var app = express();
app.use(express.logger());
app.use(etagify);
var app = express()
// run in production mode by default, just in case
// it shouldn't make any difference anyway
app.set('env', process.env.NODE_ENV || 'production')
function error_reporting_middleware(req, res, next) {
var calls = 0
res.report_error = res.report_error || function(err) {
calls++
if (err.status && err.status >= 400 && err.status < 600) {
if (calls == 1) {
res.status(err.status)
res.send({error: err.msg || err.message || 'unknown error'})
}
} else {
Logger.logger.error({err: err}, 'unexpected error: @{!err.message}\n@{err.stack}')
if (!res.status || !res.send) {
Logger.logger.error('this is an error in express.js, please report this')
res.destroy()
}
if (calls == 1) {
res.status(500)
res.send({error: 'internal server error'})
}
}
}
next()
}
app.use(error_reporting_middleware)
app.use(Middleware.log_and_etagify)
app.use(function(req, res, next) {
res.setHeader('X-Powered-By', config.user_agent)
next()
})
app.use(Cats.middleware)
app.use(basic_auth(function(user, pass) {
return config.authenticate(user, pass);
}));
app.use(express.bodyParser());
return config.authenticate(user, pass)
}))
app.use(express.json({strict: false, limit: config.max_body_size || '10mb'}))
app.use(express.compress())
app.use(Middleware.anti_loop(config))
// TODO: npm DO NOT support compression :(
app.use(express.compress());
// validate all of these params as a package name
// this might be too harsh, so ask if it causes trouble
app.param('package', validate_name)
app.param('filename', validate_name)
app.param('tag', validate_name)
app.param('version', validate_name)
app.param('revision', validate_name)
app.param('package', validate_name);
app.param('filename', validate_name);
// these can't be safely put into express url for some reason
app.param('_rev', match(/^-rev$/))
app.param('org_couchdb_user', match(/^org\.couchdb\.user:/))
/* app.get('/', function(req, res) {
res.send({
error: 'unimplemented'
});
});*/
})
})*/
/* app.get('/-/all', function(req, res) {
var https = require('https');
var JSONStream = require('JSONStream');
var https = require('https')
var JSONStream = require('JSONStream')
var request = require('request')({
url: 'https://registry.npmjs.org/-/all',
ca: require('./npmsslkeys'),
})
.pipe(JSONStream.parse('*'))
.on('data', function(d) {
console.log(d);
});
});*/
console.log(d)
})
})*/
// TODO: anonymous user?
app.get('/:package/:version?', can('access'), function(req, res, next) {
storage.get_package(req.params.package, function(err, info) {
if (err) return next(err);
info = utils.filter_tarball_urls(info, req, config);
storage.get_package(req.params.package, {req: req}, function(err, info) {
if (err) return next(err)
info = utils.filter_tarball_urls(info, req, config)
// XXX: in some cases npm calls for /:package and for some cases
// for /:package/:version - should investigate that
if (req.params.version) {
if (info.versions[req.params.version] != null) {
info = info.versions[req.params.version];
} else {
return next(new UError({
status: 404,
msg: 'version not found: ' + req.params.version
}));
var version = req.params.version
, t
if (!version) {
return res.send(info)
}
if ((t = utils.get_version(info, version)) != null) {
return res.send(t)
}
if (info['dist-tags'] != null) {
if (info['dist-tags'][version] != null) {
version = info['dist-tags'][version]
if ((t = utils.get_version(info, version)) != null) {
return res.send(t)
}
}
}
res.send(info);
});
});
return next(new UError({
status: 404,
msg: 'version not found: ' + req.params.version
}))
})
})
app.get('/:package/-/:filename', can('access'), function(req, res, next) {
var stream = storage.get_tarball(req.params.package, req.params.filename);
var stream = storage.get_tarball(req.params.package, req.params.filename)
stream.on('content-length', function(v) {
res.header('Content-Length', v)
})
stream.on('error', function(err) {
return next(err);
});
res.header('content-type', 'application/octet-stream');
stream.pipe(res);
});
return res.report_error(err)
})
res.header('Content-Type', 'application/octet-stream')
stream.pipe(res)
})
//app.get('/*', function(req, res) {
// proxy.request(req, res);
//});
// proxy.request(req, res)
//})
// placeholder 'cause npm require to be authenticated to publish
// we do not do any real authentication yet
app.post('/_session', cookies.express(), function(req, res) {
res.cookies.set('AuthSession', String(Math.random()), {
// npmjs.org sets 10h expire
expires: new Date(Date.now() + 10*60*60*1000)
});
res.send({"ok":true,"name":"somebody","roles":[]});
});
})
res.send({'ok':true,'name':'somebody','roles':[]})
})
app.get('/-/user/:argument', function(req, res, next) {
// can't put 'org.couchdb.user' in route address for some reason
if (req.params.argument.split(':')[0] !== 'org.couchdb.user') return next('route');
res.status(200);
app.get('/-/user/:org_couchdb_user', function(req, res, next) {
res.status(200)
return res.send({
ok: 'you are authenticated as "' + req.user + '"',
});
});
ok: 'you are authenticated as "' + req.remoteUser + '"',
})
})
app.put('/-/user/:argument', function(req, res, next) {
// can't put 'org.couchdb.user' in route address for some reason
if (req.params.argument.split(':')[0] !== 'org.couchdb.user') return next('route');
res.status(409);
app.put('/-/user/:org_couchdb_user', function(req, res, next) {
res.status(409)
return res.send({
error: 'registration is not implemented',
});
});
})
})
app.put('/-/user/:argument/-rev/*', function(req, res, next) {
// can't put 'org.couchdb.user' in route address for some reason
if (req.params.argument.split(':')[0] !== 'org.couchdb.user') return next('route');
res.status(201);
app.put('/-/user/:org_couchdb_user/-rev/*', function(req, res, next) {
if (req.remoteUser == null) {
res.status(403)
return res.send({
error: 'bad username/password, access denied',
})
}
res.status(201)
return res.send({
ok: 'you are authenticated as "' + req.user + '"',
});
});
ok: 'you are authenticated as "' + req.remoteUser + '"',
})
})
// tagging a package
app.put('/:package/:tag', can('publish'), media('application/json'), function(req, res, next) {
if (typeof(req.body) !== 'string') return next('route')
storage.add_tag(req.params.package, req.body, req.params.tag, function(err) {
if (err) return next(err)
res.status(201)
return res.send({
ok: 'package tagged'
})
})
})
// publishing a package
app.put('/:package', can('publish'), media('application/json'), expect_json, function(req, res, next) {
var name = req.params.package;
app.put('/:package/:_rev?/:revision?', can('publish'), media('application/json'), expect_json, function(req, res, next) {
var name = req.params.package
if (Object.keys(req.body).length == 1 && utils.is_object(req.body.users)) {
return next(new UError({
// 501 status is more meaningful, but npm doesn't show error message for 5xx
status: 404,
msg: 'npm star|unstar calls are not implemented',
}))
}
try {
var metadata = utils.validate_metadata(req.body, name);
var metadata = utils.validate_metadata(req.body, name)
} catch(err) {
return next(new UError({
status: 422,
msg: 'bad incoming package data',
}));
}))
}
storage.add_package(name, metadata, function(err) {
if (err) return next(err);
res.status(201);
if (req.params._rev) {
storage.change_package(name, metadata, req.params.revision, function(err) {
after_change(err, 'package changed')
})
} else {
storage.add_package(name, metadata, function(err) {
after_change(err, 'created new package')
})
}
function after_change(err, ok_message) {
// old npm behaviour
if (metadata._attachments == null) {
if (err) return next(err)
res.status(201)
return res.send({
ok: ok_message
})
}
// npm-registry-client 0.3+ embeds tarball into the json upload
// https://github.com/isaacs/npm-registry-client/commit/e9fbeb8b67f249394f735c74ef11fe4720d46ca0
// issue #31, dealing with it here:
if (typeof(metadata._attachments) != 'object'
|| Object.keys(metadata._attachments).length != 1
|| typeof(metadata.versions) != 'object'
|| Object.keys(metadata.versions).length != 1) {
// npm is doing something strange again
// if this happens in normal circumstances, report it as a bug
return next(new UError({
status: 400,
msg: 'unsupported registry call',
}))
}
if (err && err.status != 409) return next(err)
// at this point document is either created or existed before
var t1 = Object.keys(metadata._attachments)[0]
create_tarball(t1, metadata._attachments[t1], function(err) {
if (err) return next(err)
var t2 = Object.keys(metadata.versions)[0]
create_version(t2, metadata.versions[t2], function(err) {
if (err) return next(err)
res.status(201)
return res.send({
ok: ok_message
})
})
})
}
function create_tarball(filename, data, cb) {
var stream = storage.add_tarball(name, filename)
stream.on('error', function(err) {
cb(err)
})
stream.on('success', function() {
cb()
})
// this is dumb and memory-consuming, but what choices do we have?
stream.end(new Buffer(data.data, 'base64'))
stream.done()
}
function create_version(version, data, cb) {
storage.add_version(name, version, data, null, cb)
}
})
// unpublishing an entire package
app.delete('/:package/-rev/*', can('publish'), function(req, res, next) {
storage.remove_package(req.params.package, function(err) {
if (err) return next(err)
res.status(201)
return res.send({
ok: 'created new package'
});
});
});
ok: 'package removed'
})
})
})
// removing a tarball
app.delete('/:package/-/:filename/-rev/:revision', can('publish'), function(req, res, next) {
storage.remove_tarball(req.params.package, req.params.filename, req.params.revision, function(err) {
if (err) return next(err)
res.status(201)
return res.send({
ok: 'tarball removed'
})
})
})
// uploading package tarball
app.put('/:package/-/:filename/*', can('publish'), media('application/octet-stream'), function(req, res, next) {
var name = req.params.package;
var name = req.params.package
var stream = storage.add_tarball(name, req.params.filename);
req.pipe(stream);
var stream = storage.add_tarball(name, req.params.filename)
req.pipe(stream)
// checking if end event came before closing
var complete = false;
var complete = false
req.on('end', function() {
complete = true;
stream.done();
});
complete = true
stream.done()
})
req.on('close', function() {
if (!complete) {
stream.abort();
stream.abort()
}
});
})
stream.on('error', function(err) {
return next(err);
});
return res.report_error(err)
})
stream.on('success', function() {
res.status(201);
res.status(201)
return res.send({
ok: 'tarball uploaded successfully'
});
});
});
})
})
})
// adding a version
app.put('/:package/:version/-tag/:tag', can('publish'), media('application/json'), expect_json, function(req, res, next) {
var name = req.params.package;
var version = req.params.version;
var tag = req.params.tag;
var name = req.params.package
, version = req.params.version
, tag = req.params.tag
storage.add_version(name, version, req.body, tag, function(err) {
if (err) return next(err);
res.status(201);
if (err) return next(err)
res.status(201)
return res.send({
ok: 'package published'
});
});
});
})
})
})
app.use(app.router);
app.use(app.router)
app.use(function(err, req, res, next) {
if (err.status && err.status >= 400 && err.status < 600) {
res.status(err.status);
res.send({error: err.msg || err.message || 'unknown error'});
} else {
console.log(err);
console.log(err.stack);
res.status(500);
res.send({error: 'internal server error'});
if (typeof(res.report_error) !== 'function') {
// in case of very early error this middleware may not be loaded before error is generated
// fixing that
error_reporting_middleware(req, res, function(){})
}
});
res.report_error(err)
})
return app;
};
return app
}

View File

@@ -1,164 +1,231 @@
var fs = require('fs');
var Path = require('path');
var mystreams = require('./streams');
var FSError = require('./error').FSError;
var fs = require('fs')
, Path = require('path')
, mkdirp = require('mkdirp')
, mystreams = require('./streams')
, FSError = require('./error').FSError
function make_directories(dest, cb) {
var dir = Path.dirname(dest);
if (dir === '.' || dir === '..') return cb();
fs.mkdir(dir, function(err) {
if (err && err.code === 'ENOENT') {
make_directories(dir, function() {
fs.mkdir(dir, cb);
})
} else {
cb();
try {
var fsExt = require('fs-ext')
} catch(e) {
fsExt = {
flock: function() {
arguments[arguments.length-1]()
}
});
}
}
function write(dest, data, cb) {
var safe_write = function(cb) {
var tmpname = dest + '.tmp' + String(Math.random()).substr(2);
var tmpname = dest + '.tmp' + String(Math.random()).substr(2)
fs.writeFile(tmpname, data, function(err) {
if (err) return cb(err);
return fs.rename(tmpname, dest, cb);
});
if (err) return cb(err)
return fs.rename(tmpname, dest, cb)
})
}
safe_write(function(err) {
if (err && err.code === 'ENOENT') {
make_directories(dest, function() {
safe_write(cb);
mkdirp(Path.dirname(dest), function(err) {
if (err) return cb(err)
safe_write(cb)
})
} else {
cb(err);
cb(err)
}
});
})
}
function write_stream(name) {
var stream = new mystreams.UploadTarballStream();
var stream = new mystreams.UploadTarballStream()
var _ended = 0;
var _ended = 0
stream.on('end', function() {
_ended = 1;
});
_ended = 1
})
fs.exists(name, function(exists) {
if (exists) return stream.emit('error', new FSError('EEXISTS'));
if (exists) return stream.emit('error', new FSError('EEXISTS'))
var tmpname = name + '.tmp-'+String(Math.random()).replace(/^0\./, '');
var file = fs.createWriteStream(tmpname);
stream.pipe(file);
var tmpname = name + '.tmp-'+String(Math.random()).replace(/^0\./, '')
, file = fs.createWriteStream(tmpname)
, opened = false
stream.pipe(file)
stream.done = function() {
function onend() {
file.on('close', function() {
fs.rename(tmpname, name, function(err) {
if (err) stream.emit('error', err);
stream.emit('success');
});
});
file.destroySoon();
if (err) stream.emit('error', err)
stream.emit('success')
})
})
file.destroySoon()
}
if (_ended) {
onend();
onend()
} else {
stream.on('end', onend);
stream.on('end', onend)
}
};
}
stream.abort = function() {
file.on('close', function() {
fs.unlink(tmpname, function(){});
});
file.destroySoon();
};
if (opened) {
opened = false
file.on('close', function() {
fs.unlink(tmpname, function(){})
})
}
file.destroySoon()
}
file.on('open', function() {
opened = true
// re-emitting open because it's handled in storage.js
stream.emit('open');
});
stream.emit('open')
})
file.on('error', function(err) {
stream.emit('error', err);
});
});
return stream;
stream.emit('error', err)
})
})
return stream
}
function read_stream(name, stream, callback) {
return fs.createReadStream(name);
var rstream = fs.createReadStream(name)
rstream.on('error', function(err) {
stream.emit('error', err)
})
rstream.on('open', function(fd) {
fs.fstat(fd, function(err, stats) {
if (err) return stream.emit('error', err)
stream.emit('content-length', stats.size)
stream.emit('open')
rstream.pipe(stream)
})
})
var stream = new mystreams.ReadTarballStream()
stream.abort = function() {
rstream.close()
}
return stream
}
function create(name, contents, callback) {
fs.exists(name, function(exists) {
if (exists) return callback(new FSError('EEXISTS'));
write(name, contents, callback);
});
if (exists) return callback(new FSError('EEXISTS'))
write(name, contents, callback)
})
}
function update(name, contents, callback) {
fs.exists(name, function(exists) {
if (!exists) return callback(new FSError('ENOENT'));
write(name, contents, callback);
});
if (!exists) return callback(new FSError('ENOENT'))
write(name, contents, callback)
})
}
function read(name, callback) {
fs.readFile(name, callback);
fs.readFile(name, callback)
}
function Storage(path) {
this.path = path;
try {
fs.mkdirSync(path);
console.log('created new packages directory: ', path);
} catch(err) {
if (err.code !== 'EEXIST') throw new Error(err);
}
// open and flock with exponential backoff
function open_flock(name, opmod, flmod, tries, backoff, cb) {
fs.open(name, opmod, function(err, fd) {
if (err) return cb(err, fd)
fsExt.flock(fd, flmod, function(err) {
if (err) {
if (!tries) {
fs.close(fd, function() {
cb(err)
})
} else {
fs.close(fd, function() {
setTimeout(function() {
open_flock(name, opmod, flmod, tries-1, backoff*2, cb)
}, backoff)
})
}
} else {
cb(null, fd)
}
})
})
}
Storage.prototype.read = function(name, cb) {
read(this.path + '/' + name, cb);
// this function neither unlocks file nor closes it
// it'll have to be done manually later
function lock_and_read(name, callback) {
open_flock(name, 'r', 'exnb', 4, 10, function(err, fd) {
if (err) return callback(err, fd)
fs.fstat(fd, function(err, st) {
if (err) return callback(err, fd)
var buffer = new Buffer(st.size)
fs.read(fd, buffer, 0, st.size, null, function(err, bytesRead, buffer) {
if (bytesRead != st.size) return callback(new Error('st.size != bytesRead'), fd)
callback(null, fd, buffer)
})
})
})
}
Storage.prototype.read_json = function(name, cb) {
read(this.path + '/' + name, function(err, res) {
if (err) return cb(err);
cb(null, JSON.parse(res.toString('utf8')));
});
module.exports.read = read
module.exports.read_json = function(name, cb) {
read(name, function(err, res) {
if (err) return cb(err)
var args = []
try {
args = [null, JSON.parse(res.toString('utf8'))]
} catch(err) {
args = [err]
}
cb.apply(null, args)
})
}
Storage.prototype.create = function(name, value, cb) {
create(this.path + '/' + name, value, cb);
module.exports.lock_and_read = lock_and_read
module.exports.lock_and_read_json = function(name, cb) {
lock_and_read(name, function(err, fd, res) {
if (err) return cb(err, fd)
var args = []
try {
args = [null, fd, JSON.parse(res.toString('utf8'))]
} catch(err) {
args = [err, fd]
}
cb.apply(null, args)
})
}
Storage.prototype.create_json = function(name, value, cb) {
create(this.path + '/' + name, JSON.stringify(value, null, '\t'), cb);
module.exports.create = create
module.exports.create_json = function(name, value, cb) {
create(name, JSON.stringify(value, null, '\t'), cb)
}
Storage.prototype.update = function(name, value, cb) {
update(this.path + '/' + name, value, cb);
module.exports.update = update
module.exports.update_json = function(name, value, cb) {
update(name, JSON.stringify(value, null, '\t'), cb)
}
Storage.prototype.update_json = function(name, value, cb) {
update(this.path + '/' + name, JSON.stringify(value, null, '\t'), cb);
module.exports.write = write
module.exports.write_json = function(name, value, cb) {
write(name, JSON.stringify(value, null, '\t'), cb)
}
Storage.prototype.write = function(name, value, cb) {
write(this.path + '/' + name, value, cb);
}
module.exports.write_stream = write_stream
Storage.prototype.write_json = function(name, value, cb) {
write(this.path + '/' + name, JSON.stringify(value, null, '\t'), cb);
}
module.exports.read_stream = read_stream
Storage.prototype.write_stream = function(name, value, cb) {
return write_stream(this.path + '/' + name, value, cb);
}
module.exports.unlink = fs.unlink
Storage.prototype.read_stream = function(name, cb) {
return read_stream(this.path + '/' + name, cb);
}
module.exports = Storage;
module.exports.rmdir = fs.rmdir

View File

@@ -1,22 +1,23 @@
var fs = require('fs');
var semver = require('semver');
var Path = require('path');
var fs_storage = require('./local-fs');
var UError = require('./error').UserError;
var utils = require('./utils');
var mystreams = require('./streams');
var info_file = 'package.json';
var fs = require('fs')
, Path = require('path')
, crypto = require('crypto')
, assert = require('assert')
, fs_storage = require('./local-fs')
, UError = require('./error').UserError
, utils = require('./utils')
, mystreams = require('./streams')
, Logger = require('./logger')
, info_file = 'package.json'
//
// Implements Storage interface
// (same for storage.js, local-storage.js, up-storage.js)
//
function Storage(config) {
if (!(this instanceof Storage)) return new Storage(config);
this.config = config;
var path = Path.resolve(Path.dirname(this.config.self_path), this.config.storage);
this.storage = new fs_storage(path);
return this;
if (!(this instanceof Storage)) return new Storage(config)
this.config = config
this.logger = Logger.logger.child({sub: 'fs'})
return this
}
// returns the minimal package file
@@ -28,214 +29,511 @@ function get_boilerplate(name) {
'dist-tags': {},
// our own object
// type: "filename"->"metadata"
'_distfiles': {},
};
'_attachments': {},
'_uplinks': {},
}
}
Storage.prototype._internal_error = function(err, file, msg) {
this.logger.error( {err: err, file: file}
, msg + ' @{file}: @{!err.message}'
)
return new UError({
status: 500,
msg: 'internal server error'
})
}
Storage.prototype.add_package = function(name, metadata, callback) {
this.storage.create_json(name + '/' + info_file, get_boilerplate(name), function(err) {
this.storage(name).create_json(info_file, get_boilerplate(name), function(err) {
if (err && err.code === 'EEXISTS') {
return callback(new UError({
status: 409,
msg: 'this package is already present'
}));
}))
}
callback();
});
callback()
})
}
Storage.prototype.remove_package = function(name, callback) {
var self = this
self.logger.info({name: name}, 'unpublishing @{name} (all)')
self.storage(name).read_json(info_file, function(err, data) {
if (err) {
if (err.code === 'ENOENT') {
return callback(new UError({
status: 404,
msg: 'no such package available',
}))
} else {
return callback(err)
}
}
self._normalize_package(data)
self.storage(name).unlink(info_file, function(err) {
if (err) return callback(err)
var files = Object.keys(data._attachments)
function unlinkNext(cb) {
if (files.length === 0) return cb()
var file = files.shift()
self.storage(name).unlink(file, function() {
unlinkNext(cb)
})
}
unlinkNext(function() {
// try to unlink the directory, but ignore errors because it can fail
self.storage(name).rmdir('.', function(err) {
callback()
})
})
})
})
}
Storage.prototype._read_create_package = function(name, callback) {
var self = this;
self.storage.read_json(name + '/' + info_file, function(err, data) {
var self = this
self.storage(name).read_json(info_file, function(err, data) {
// TODO: race condition
if (err) {
if (err.code === 'ENOENT') {
// if package doesn't exist, we create it here
data = get_boilerplate(name);
data = get_boilerplate(name)
} else {
return callback(err);
return callback(self._internal_error(err, info_file, 'error reading'))
}
}
callback(null, data);
});
self._normalize_package(data)
callback(null, data)
})
}
// synchronize remote package info with the local one
// TODO: readfile called twice
Storage.prototype.update_versions = function(name, newdata, callback) {
var self = this;
var self = this
self._read_create_package(name, function(err, data) {
if (err) return callback(err);
if (err) return callback(err)
var change = false;
var change = false
for (var ver in newdata.versions) {
if (data.versions[ver] == null) {
var verdata = newdata.versions[ver];
var verdata = newdata.versions[ver]
// why does anyone need to keep that in database?
delete verdata.readme;
delete verdata.readme
change = true
data.versions[ver] = verdata
change = true;
data.versions[ver] = verdata;
if (verdata.dist && verdata.dist.tarball) {
var url = utils.parse_tarball_url(
verdata.dist.__sinopia_orig_tarball || verdata.dist.tarball
);
)
// we do NOT overwrite any existing records
if (url != null && data._distfiles[url.filename] == null) {
data._distfiles[url.filename] = {
url: verdata.dist.__sinopia_orig_tarball || verdata.dist.tarball,
sha: verdata.dist.shasum,
};
}
}
}
}
}
for (var tag in newdata['dist-tags']) {
// if tag is updated to reference latter version, that's fine
var need_change =
(data['dist-tags'][tag] == null) ||
(!semver.gte(newdata['dist-tags'][tag], data['dist-tags'][tag]));
if (!Array.isArray(data['dist-tags'][tag]) || data['dist-tags'][tag].length != newdata['dist-tags'][tag].length) {
// backward compat
var need_change = true
} else {
for (var i=0; i<data['dist-tags'][tag].length; i++) {
if (data['dist-tags'][tag][i] != newdata['dist-tags'][tag][i]) {
var need_change = true
break
}
}
}
if (need_change) {
change = true;
data['dist-tags'][tag] = newdata['dist-tags'][tag];
change = true
data['dist-tags'][tag] = newdata['dist-tags'][tag]
}
}
if (change) {
self.storage.write_json(name + '/' + info_file, data, callback);
} else {
callback();
for (var up in newdata._uplinks) {
var need_change =
!utils.is_object(data._uplinks[up]) || (newdata._uplinks[up].etag !== data._uplinks[up].etag || (newdata._uplinks[up].fetched !== data._uplinks[up].fetched))
if (need_change) {
change = true
data._uplinks[up] = newdata._uplinks[up]
}
}
});
if (change) {
self.logger.debug('updating package info')
self._write_package(name, data, function(err) {
callback(err, data)
})
} else {
callback(null, data)
}
})
}
Storage.prototype.add_version = function(name, version, metadata, tag, callback) {
var self = this;
self._read_create_package(name, function(err, data) {
var self = this
self.update_package(name, function updater(data, cb) {
// why does anyone need to keep that in database?
delete metadata.readme;
if (err) return callback(err);
delete metadata.readme
if (data.versions[version] != null) {
return callback(new UError({
return cb(new UError({
status: 409,
msg: 'this version already present'
}));
}))
}
data.versions[version] = metadata;
data['dist-tags'][tag] = version;
self.storage.update_json(name + '/' + info_file, data, callback);
});
// if uploaded tarball has a different shasum, it's very likely that we have some kind of error
if (utils.is_object(metadata.dist) && typeof(metadata.dist.tarball) === 'string') {
var tarball = metadata.dist.tarball.replace(/.*\//, '')
if (utils.is_object(data._attachments[tarball])) {
if (data._attachments[tarball].shasum != null && metadata.dist.shasum != null) {
if (data._attachments[tarball].shasum != metadata.dist.shasum) {
return cb(new UError({
status: 400,
msg: 'shasum error, ' + data._attachments[tarball].shasum + ' != ' + metadata.dist.shasum,
}))
}
}
data._attachments[tarball].version = version
}
}
data.versions[version] = metadata
utils.tag_version(data, version, tag, self.config)
cb()
}, callback)
}
Storage.prototype.add_tag = function(name, version, tag, callback) {
var self = this
self.update_package(name, function updater(data, cb) {
if (data.versions[version] == null) {
return cb(new UError({
status: 404,
msg: "this version doesn't exist"
}))
}
utils.tag_version(data, version, tag, self.config)
cb()
}, callback)
}
// change package info to tag a specific version
function _add_tag(data, version, tag) {
data['dist-tags'][tag] = version
}
// currently supports unpublishing only
Storage.prototype.change_package = function(name, metadata, revision, callback) {
var self = this
if (!utils.is_object(metadata.versions) || !utils.is_object(metadata['dist-tags'])) {
return callback(new UError({
status: 422,
msg: 'bad data',
}))
}
self.update_package(name, function updater(data, cb) {
for (var ver in data.versions) {
if (metadata.versions[ver] == null) {
self.logger.info({name: name, version: ver}, 'unpublishing @{name}@@{version}')
delete data.versions[ver]
for (var file in data._attachments) {
if (data._attachments[file].version === ver) {
delete data._attachments[file].version
}
}
}
}
data['dist-tags'] = metadata['dist-tags']
cb()
}, function(err) {
if (err) return callback(err)
callback()
})
}
Storage.prototype.remove_tarball = function(name, filename, revision, callback) {
var self = this
assert(utils.validate_name(filename))
self.update_package(name, function updater(data, cb) {
if (data._attachments[filename]) {
delete data._attachments[filename]
cb()
} else {
cb(new UError({
status: 404,
msg: 'no such file available',
}))
}
}, function(err) {
if (err) return callback(err)
self.storage(name).unlink(filename, callback)
})
}
Storage.prototype.add_tarball = function(name, filename) {
var stream = new mystreams.UploadTarballStream();
var _transform = stream._transform;
var length = 0;
assert(utils.validate_name(filename))
var stream = new mystreams.UploadTarballStream()
, _transform = stream._transform
, length = 0
, shasum = crypto.createHash('sha1')
stream._transform = function(data) {
length += data.length;
_transform.apply(stream, arguments);
};
var self = this;
shasum.update(data)
length += data.length
_transform.apply(stream, arguments)
}
var self = this
if (name === info_file || name === '__proto__') {
stream.emit('error', new UError({
status: 403,
msg: 'can\'t use this filename'
}));
}))
}
var wstream = this.storage.write_stream(name + '/' + filename);
var wstream = this.storage(name).write_stream(filename)
wstream.on('error', function(err) {
if (err.code === 'EEXISTS') {
stream.emit('error', new UError({
status: 409,
msg: 'this tarball is already present'
}));
}))
} else if (err.code === 'ENOENT') {
// check if package exists to throw an appropriate message
self.get_package(name, function(_err, res) {
if (_err) {
stream.emit('error', _err);
stream.emit('error', _err)
} else {
stream.emit('error', err);
stream.emit('error', err)
}
});
})
} else {
stream.emit('error', err);
stream.emit('error', err)
}
});
})
wstream.on('open', function() {
// re-emitting open because it's handled in storage.js
stream.emit('open');
});
stream.emit('open')
})
wstream.on('success', function() {
// re-emitting open because it's handled in index.js
stream.emit('success');
});
self.update_package(name, function updater(data, cb) {
data._attachments[filename] = {
shasum: shasum.digest('hex'),
}
cb()
}, function(err) {
if (err) {
stream.emit('error', err)
} else {
stream.emit('success')
}
})
})
stream.abort = function() {
wstream.abort();
};
wstream.abort()
}
stream.done = function() {
if (!length) {
stream.emit('error', new UError({
status: 422,
msg: 'refusing to accept zero-length file'
}));
wstream.abort();
}))
wstream.abort()
} else {
wstream.done();
wstream.done()
}
};
stream.pipe(wstream);
}
stream.pipe(wstream)
return stream;
return stream
}
Storage.prototype.get_tarball = function(name, filename, callback) {
var stream = new mystreams.ReadTarballStream();
stream.abort = function() {
rstream.close();
};
assert(utils.validate_name(filename))
var rstream = this.storage.read_stream(name + '/' + filename);
var stream = new mystreams.ReadTarballStream()
stream.abort = function() {
rstream.abort()
}
var rstream = this.storage(name).read_stream(filename)
rstream.on('error', function(err) {
if (err && err.code === 'ENOENT') {
stream.emit('error', new UError({
status: 404,
msg: 'no such file available',
}));
}))
} else {
stream.emit('error', err);
stream.emit('error', err)
}
});
})
rstream.on('content-length', function(v) {
stream.emit('content-length', v)
})
rstream.on('open', function() {
// re-emitting open because it's handled in storage.js
stream.emit('open');
rstream.pipe(stream);
});
return stream;
stream.emit('open')
rstream.pipe(stream)
})
return stream
}
Storage.prototype.get_package = function(name, callback) {
this.storage.read_json(name + '/' + info_file, function(err, result) {
if (err && err.code === 'ENOENT') {
return callback(new UError({
status: 404,
msg: 'no such package available'
}));
Storage.prototype.get_package = function(name, options, callback) {
if (typeof(options) === 'function') callback = options, options = {}
var self = this
self.storage(name).read_json(info_file, function(err, result) {
if (err) {
if (err.code === 'ENOENT') {
return callback(new UError({
status: 404,
msg: 'no such package available'
}))
} else {
return callback(self._internal_error(err, info_file, 'error reading'))
}
}
callback.apply(null, arguments);
});
self._normalize_package(result)
callback(err, result)
})
}
module.exports = Storage;
//
// This function allows to update the package thread-safely
//
// Arguments:
// - name - package name
// - updateFn - function(package, cb) - update function
// - callback - callback that gets invoked after it's all updated
//
// Algorithm:
// 1. lock package.json for writing
// 2. read package.json
// 3. updateFn(pkg, cb), and wait for cb
// 4. write package.json.tmp
// 5. move package.json.tmp package.json
// 6. callback(err?)
//
Storage.prototype.update_package = function(name, updateFn, _callback) {
var self = this
self.storage(name).lock_and_read_json(info_file, function(err, fd, json) {
function callback() {
var _args = arguments
if (fd) {
fs.close(fd, function(err) {
if (err) return _callback(err)
_callback.apply(null, _args)
})
} else {
_callback.apply(null, _args)
}
}
if (err) {
if (err.code === 'EAGAIN') {
return callback(new UError({
status: 503,
msg: 'resource temporarily unavailable'
}))
} else if (err.code === 'ENOENT') {
return callback(new UError({
status: 404,
msg: 'no such package available',
}))
} else {
return callback(err)
}
}
self._normalize_package(json)
updateFn(json, function(err) {
if (err) return callback(err)
self._write_package(name, json, callback)
})
})
}
Storage.prototype._normalize_package = function(pkg) {
;['versions', 'dist-tags', '_distfiles', '_attachments', '_uplinks'].forEach(function(key) {
if (!utils.is_object(pkg[key])) pkg[key] = {}
})
if (typeof(pkg._rev) !== 'string') pkg._rev = '0-0000000000000000'
}
Storage.prototype._write_package = function(name, json, callback) {
// calculate revision a la couchdb
if (typeof(json._rev) !== 'string') json._rev = '0-0000000000000000'
var rev = json._rev.split('-')
json._rev = ((+rev[0] || 0) + 1) + '-' + crypto.pseudoRandomBytes(8).toString('hex')
this.storage(name).write_json(info_file, json, callback)
}
Storage.prototype.storage = function(package) {
return new Path_Wrapper(
Path.join(
Path.resolve(
Path.dirname(this.config.self_path),
this.config.get_package_setting(package, 'storage') || this.config.storage
),
package
)
)
}
var Path_Wrapper = (function() {
// a wrapper adding paths to fs_storage methods
function Wrapper(path) {
this.path = path
}
for (var i in fs_storage) {
if (fs_storage.hasOwnProperty(i)) {
Wrapper.prototype[i] = wrapper(i)
}
}
function wrapper(method) {
return function(/*...*/) {
var args = Array.prototype.slice.apply(arguments)
args[0] = Path.join(this.path, args[0] || '')
return fs_storage[method].apply(null, args)
}
}
return Wrapper
})()
module.exports = Storage

149
lib/logger.js Normal file
View File

@@ -0,0 +1,149 @@
var Logger = require('bunyan')
, Stream = require('stream')
, utils = require('./utils')
function getlvl(x) {
switch(true) {
case x < 15: return 'trace'
case x < 25: return 'debug'
case x < 35: return 'info'
case x == 35: return 'http'
case x < 45: return 'warn'
case x < 55: return 'error'
default: return 'fatal'
}
}
module.exports.setup = function(logs) {
var streams = []
if (logs == null) logs = [{ type: 'stdout', format: 'pretty', level: 'http' }]
logs.forEach(function(target) {
var stream = new Stream()
stream.writable = true
if (target.type === 'stdout' || target.type === 'stderr') {
// destination stream
var dest = target.type === 'stdout' ? process.stdout : process.stderr
if (target.format === 'pretty') {
// making fake stream for prettypritting
stream.write = function(obj) {
dest.write(print(obj.level, obj.msg, obj, dest.isTTY) + '\n')
}
} else {
stream.write = function(obj) {
dest.write(JSON.stringify(obj, Logger.safeCycles()) + '\n')
}
}
} else if (target.type === 'file') {
var dest = require('fs').createWriteStream(target.path, {flags: 'a', encoding: 'utf8'})
dest.on('error', function (err) {
Logger.emit('error', err)
})
stream.write = function(obj) {
dest.write(JSON.stringify(obj, Logger.safeCycles()) + '\n')
}
} else {
throw new Error('wrong target type for a log')
}
if (target.level === 'http') target.level = 35
streams.push({
type: 'raw',
level: target.level || 35,
stream: stream,
})
})
var logger = new Logger({
name: 'sinopia',
streams: streams,
serializers: {
err: Logger.stdSerializers.err,
req: Logger.stdSerializers.req,
res: Logger.stdSerializers.res,
},
})
module.exports.logger = logger
}
// adopted from socket.io
// this part was converted to coffee-script and back again over the years,
// so it might look weird
// level to color
var levels = {
fatal: 31,
error: 31,
warn: 33,
http: 35,
info: 36,
debug: 90,
trace: 90,
}
var max = 0
for (var l in levels) {
max = Math.max(max, l.length)
}
function pad(str) {
if (str.length < max) return str + new Array(max - str.length + 1).join(' ')
return str
}
var subsystems = [{
in: '\033[32m<--\033[39m',
out: '\033[33m-->\033[39m',
fs: '\033[90m-=-\033[39m',
default: '\033[34m---\033[39m',
}, {
in: '<--',
out: '-->',
fs: '-=-',
default: '---',
}]
function print(type, msg, obj, colors) {
if (typeof type === 'number') type = getlvl(type)
var finalmsg = msg.replace(/@{(!?[$A-Za-z_][$0-9A-Za-z\._]*)}/g, function(_, name) {
var str = obj, is_error
if (name[0] === '!') {
name = name.substr(1)
is_error = true
}
var _ref = name.split('.')
for (var _i = 0; _i < _ref.length; _i++) {
var id = _ref[_i]
if (utils.is_object(str) || Array.isArray(str)) {
str = str[id]
} else {
str = undefined
}
}
if (typeof(str) === 'string') {
if (!colors || ~str.indexOf('\n')) {
return str
} else if (is_error) {
return '\033[31m' + str + '\033[39m'
} else {
return '\033[32m' + str + '\033[39m'
}
} else {
return require('util').inspect(str, void 0, void 0, colors)
}
})
var sub = subsystems[+!colors][obj.sub] || subsystems[+!colors].default
// ^^--- black magic... kidding, just "colors ? 0 : 1"
if (colors) {
return ' \033[' + levels[type] + 'm' + (pad(type)) + '\033[39m ' + sub + ' ' + finalmsg
} else {
return ' ' + (pad(type)) + ' ' + sub + ' ' + finalmsg
}
}

View File

@@ -1,18 +1,21 @@
var crypto = require('crypto');
var utils = require('./utils');
var UError = require('./error').UserError;
var crypto = require('crypto')
, utils = require('./utils')
, UError = require('./error').UserError
, Logger = require('./logger')
module.exports.validate_name = function validate_name(req, res, next, value, name) {
if (utils.validate_name(req.params.package)) {
req.params.package = String(req.params.package);
next();
if (value.charAt(0) === '-') {
// special case in couchdb usually
next('route')
} else if (utils.validate_name(value)) {
next()
} else {
next(new UError({
status: 403,
msg: 'invalid package name',
}));
msg: 'invalid ' + name,
}))
}
};
}
module.exports.media = function media(expect) {
return function(req, res, next) {
@@ -20,88 +23,180 @@ module.exports.media = function media(expect) {
next(new UError({
status: 415,
msg: 'wrong content-type, expect: '+expect+', got: '+req.headers['content-type'],
}));
}))
} else {
next();
next()
}
}
}
module.exports.expect_json = function expect_json(req, res, next) {
if (typeof(req.body) !== 'object') {
if (!utils.is_object(req.body)) {
return next({
status: 400,
msg: 'can\'t parse incoming json',
});
})
}
next();
next()
}
module.exports.basic_auth = function basic_auth(callback) {
return function(req, res, next) {
var authorization = req.headers.authorization;
return function(req, res, _next) {
function next(err) {
// uncomment this to reject users with bad auth headers
//return _next.apply(null, arguments)
if (req.user) return next();
if (authorization == null) {
req.user = req.remoteUser = 'anonymous';
return next();
// swallow error, user remains unauthorized
// set remoteUserError to indicate that user was attempting authentication
if (err) req.remoteUserError = err.msg
return _next()
}
var parts = authorization.split(' ');
var authorization = req.headers.authorization
if (req.remoteUser != null) return next()
if (authorization == null) return next()
var parts = authorization.split(' ')
if (parts.length !== 2) return next({
status: 400,
msg: 'bad authorization header',
});
})
var scheme = parts[0]
, credentials = new Buffer(parts[1], 'base64').toString()
, index = credentials.indexOf(':');
, credentials = new Buffer(parts[1], 'base64').toString()
, index = credentials.indexOf(':')
if ('Basic' != scheme || index < 0) return next({
if (scheme !== 'Basic' || index < 0) return next({
status: 400,
msg: 'bad authorization header',
});
})
var user = credentials.slice(0, index)
, pass = credentials.slice(index + 1);
, pass = credentials.slice(index + 1)
if (callback(user, pass)) {
req.user = req.remoteUser = user;
next();
req.remoteUser = user
next()
} else {
next({
status: 403,
msg: 'bad username/password, access denied',
});
})
}
}
};
}
module.exports.anti_loop = function(config) {
return function(req, res, next) {
if (req.headers.via != null) {
var arr = req.headers.via.split(',')
for (var i=0; i<arr.length; i++) {
var m = arr[i].match(/\s*(\S+)\s+(\S+)/)
if (m && m[2] === config.server_id) {
return next(new UError({
status: 508,
msg: 'loop detected',
}))
}
}
}
next()
}
}
// express doesn't do etags with requests <= 1024b
// we use md5 here, it works well on 1k+ bytes, but sucks with fewer data
// could improve performance using crc32 after benchmarks
function md5sum(data) {
return crypto.createHash('md5').update(data).digest('hex');
return crypto.createHash('md5').update(data).digest('hex')
}
// using it for json only right now
module.exports.etagify = function(req, res, next) {
var _send = res.send;
module.exports.log_and_etagify = function(req, res, next) {
// logger
req.log = Logger.logger.child({sub: 'in'})
var _auth = req.headers.authorization
if (_auth) req.headers.authorization = '<Classified>'
req.log.info({req: req, ip: req.ip}, '@{ip} requested \'@{req.method} @{req.url}\'')
if (_auth) req.headers.authorization = _auth
var bytesin = 0
req.on('data', function(chunk){ bytesin += chunk.length })
var _send = res.send
res.send = function(body) {
if (typeof(body) === 'string' || typeof(body) === 'object') {
res.header('Content-type', 'application/json');
try {
if (typeof(body) === 'string' || typeof(body) === 'object') {
res.header('Content-type', 'application/json')
if (typeof(body) === 'object') {
body = JSON.stringify(body, undefined, '\t');
if (typeof(body) === 'object' && body != null) {
if (body.error) {
res._sinopia_error = body.error
}
body = JSON.stringify(body, undefined, '\t') + '\n'
}
// don't send etags with errors
if (!res.statusCode || (res.statusCode >= 200 && res.statusCode < 300)) {
res.header('ETag', '"' + md5sum(body) + '"')
}
} else {
// send(null), send(204), etc.
}
} catch(err) {
// if sinopia sends headers first, and then calls res.send()
// as an error handler, we can't report error properly,
// and should just close socket
if (err.message.match(/set headers after they are sent/)) {
return res.socket.destroy()
} else {
throw err
}
res.header('ETag', '"' + md5sum(body) + '"');
} else {
// send(null), send(204), etc.
}
_send.call(res, body);
};
next();
res.send = _send
res.send(body)
}
var bytesout = 0
, _write = res.write
res.write = function(buf) {
bytesout += buf.length
_write.apply(res, arguments)
}
function log() {
var msg = '@{status}, user: @{user}, req: \'@{request.method} @{request.url}\''
if (res._sinopia_error) {
msg += ', error: @{!error}'
} else {
msg += ', bytes: @{bytes.in}/@{bytes.out}'
}
req.log.warn({
request: {method: req.method, url: req.url},
level: 35, // http
user: req.remoteUser,
status: res.statusCode,
error: res._sinopia_error,
bytes: {
in: bytesin,
out: bytesout,
}
}, msg)
}
req.on('close', function() {
log(true)
})
var _end = res.end
res.end = function(buf) {
if (buf) bytesout += buf.length
_end.apply(res, arguments)
log()
}
next()
}

View File

@@ -1,16 +0,0 @@
/*
module.exports = function() {
local_store()
for each uplink
status = uplink_store()
if (status == 201) {
succeess.push(uplink)
} else {
bail()
}
bail:
local_revert()
uplink_revert()
};
*/

View File

@@ -1,62 +0,0 @@
//
// Get this thingy from `npmconf` package if it ever changes...
//
module.exports = // the npm CA certificate.
[ "-----BEGIN CERTIFICATE-----\n"+
"MIIChzCCAfACCQDauvz/KHp8ejANBgkqhkiG9w0BAQUFADCBhzELMAkGA1UEBhMC\n"+
"VVMxCzAJBgNVBAgTAkNBMRAwDgYDVQQHEwdPYWtsYW5kMQwwCgYDVQQKEwNucG0x\n"+
"IjAgBgNVBAsTGW5wbSBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxDjAMBgNVBAMTBW5w\n"+
"bUNBMRcwFQYJKoZIhvcNAQkBFghpQGl6cy5tZTAeFw0xMTA5MDUwMTQ3MTdaFw0y\n"+
"MTA5MDIwMTQ3MTdaMIGHMQswCQYDVQQGEwJVUzELMAkGA1UECBMCQ0ExEDAOBgNV\n"+
"BAcTB09ha2xhbmQxDDAKBgNVBAoTA25wbTEiMCAGA1UECxMZbnBtIENlcnRpZmlj\n"+
"YXRlIEF1dGhvcml0eTEOMAwGA1UEAxMFbnBtQ0ExFzAVBgkqhkiG9w0BCQEWCGlA\n"+
"aXpzLm1lMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDLI4tIqPpRW+ACw9GE\n"+
"OgBlJZwK5f8nnKCLK629Pv5yJpQKs3DENExAyOgDcyaF0HD0zk8zTp+ZsLaNdKOz\n"+
"Gn2U181KGprGKAXP6DU6ByOJDWmTlY6+Ad1laYT0m64fERSpHw/hjD3D+iX4aMOl\n"+
"y0HdbT5m1ZGh6SJz3ZqxavhHLQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAC4ySDbC\n"+
"l7W1WpLmtLGEQ/yuMLUf6Jy/vr+CRp4h+UzL+IQpCv8FfxsYE7dhf/bmWTEupBkv\n"+
"yNL18lipt2jSvR3v6oAHAReotvdjqhxddpe5Holns6EQd1/xEZ7sB1YhQKJtvUrl\n"+
"ZNufy1Jf1r0ldEGeA+0ISck7s+xSh9rQD2Op\n"+
"-----END CERTIFICATE-----\n",
// "GlobalSign Root CA"
"-----BEGIN CERTIFICATE-----\n"+
"MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkGA1UEBhMCQkUx\n"+
"GTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jvb3QgQ0ExGzAZBgNVBAMTEkds\n"+
"b2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAwMDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNV\n"+
"BAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYD\n"+
"VQQDExJHbG9iYWxTaWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDa\n"+
"DuaZjc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavpxy0Sy6sc\n"+
"THAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp1Wrjsok6Vjk4bwY8iGlb\n"+
"Kk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdGsnUOhugZitVtbNV4FpWi6cgKOOvyJBNP\n"+
"c1STE4U6G7weNLWLBYy5d4ux2x8gkasJU26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrX\n"+
"gzT/LCrBbBlDSgeF59N89iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV\n"+
"HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0BAQUF\n"+
"AAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOzyj1hTdNGCbM+w6Dj\n"+
"Y1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE38NflNUVyRRBnMRddWQVDf9VMOyG\n"+
"j/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymPAbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhH\n"+
"hm4qxFYxldBniYUr+WymXUadDKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveC\n"+
"X4XSQRjbgbMEHMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==\n"+
"-----END CERTIFICATE-----\n",
// "GlobalSign Root CA - R2"
"-----BEGIN CERTIFICATE-----\n"+
"MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4GA1UECxMXR2xv\n"+
"YmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2Jh\n"+
"bFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxT\n"+
"aWduIFJvb3QgQ0EgLSBSMjETMBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2ln\n"+
"bjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6\n"+
"ErPLv4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8eoLrvozp\n"+
"s6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklqtTleiDTsvHgMCJiEbKjN\n"+
"S7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzdC9XZzPnqJworc5HGnRusyMvo4KD0L5CL\n"+
"TfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pazq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6C\n"+
"ygPCm48CAwEAAaOBnDCBmTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E\n"+
"FgQUm+IHV2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5nbG9i\n"+
"YWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG3lm0mi3f3BmGLjAN\n"+
"BgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4GsJ0/WwbgcQ3izDJr86iw8bmEbTUsp\n"+
"9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu\n"+
"01yiPqFbQfXf5WRDLenVOavSot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG7\n"+
"9G+dwfCMNYxdAfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7\n"+
"TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg==\n"+
"-----END CERTIFICATE-----\n" ]

74
lib/status-cats.js Normal file
View File

@@ -0,0 +1,74 @@
// see https://secure.flickr.com/photos/girliemac/sets/72157628409467125
var images = {
100: 'aVvDhR', // '6512768893', // 100 - Continue
101: 'aXXExP', // '6540479029', // 101 - Switching Protocols
200: 'aVuVsF', // '6512628175', // 200 - OK
201: 'aXWm1Z', // '6540221577', // 201 - Created
202: 'aXXEyF', // '6540479079', // 202 - Accepted
204: 'aYyJ7B', // '6547319943', // 204 - No Content
206: 'aVEnUP', // '6514473163', // 206 - Partial Content
207: 'aVEnRD', // '6514472979', // 207 - Multi-Status
300: 'aW7mac', // '6519540181', // 300 - Multiple Choices
301: 'aW7mb4', // '6519540231', // 301 - Moved Permanently
302: 'aV6jKp', // '6508023829', // 302 - Found
303: 'aVxtaK', // '6513125065', // 303 - See Other
304: 'aXY3dH', // '6540551929', // 304 - Not Modified
305: 'aXX5LK', // '6540365403', // 305 - Use Proxy
307: 'aVwQnk', // '6513001269', // 307 - Temporary Redirect
400: 'aXYDeT', // '6540669737', // 400 - Bad Request
401: 'aV6jwe', // '6508023065', // 401 - Unauthorized
402: 'aVwQoe', // '6513001321', // 402 - Payment Required
403: 'aV6jFK', // '6508023617', // 403 - Forbidden
404: 'aV6juR', // '6508022985', // 404 - Not Found
405: 'aV6jE8', // '6508023523', // 405 - Method Not Allowed
406: 'aV6jxa', // '6508023119', // 406 - Not Acceptable
408: 'aV6jyc', // '6508023179', // 408 - Request Timeout
409: 'aV6jzz', // '6508023259', // 409 - Conflict
410: 'aVES2H', // '6514567755', // 410 - Gone
411: 'aXYVpT', // '6540724141', // 411 - Length Required
413: 'aV6jHZ', // '6508023747', // 413 - Request Entity Too Large
414: 'aV6jBa', // '6508023351', // 414 - Request-URI Too Long
416: 'aVxQvr', // '6513196851', // 416 - Requested Range Not Satisfiable
417: 'aV6jGP', // '6508023679', // 417 - Expectation Failed
418: 'aV6J7c', // '6508102407', // 418 - I'm a teapot
422: 'aVEnTt', // '6514473085', // 422 - Unprocessable Entity
423: 'aVEyVZ', // '6514510235', // 423 - Locked
424: 'aVEWZ6', // '6514584423', // 424 - Failed Dependency
425: 'aXYdzH', // '6540586787', // 425 - Unordered Collection
426: 'aVdo4M', // '6509400771', // 426 - Upgrade Required
429: 'aVdo8F', // '6509400997', // 429 - Too Many Requests
431: 'aVdo3n', // '6509400689', // 431 - Request Header Fields Too Large
444: 'aVdo1P', // '6509400599', // 444 - No Response
450: 'aVxtbK', // '6513125123', // 450 - Blocked by Windows Parental Controls
451: 'eTiGQd', // '9113233540', // 451 - Unavailable for Legal Reasons
500: 'aVdo6e', // '6509400855', // 500 - Internal Server Error
502: 'aV6jCv', // '6508023429', // 502 - Bad Gateway
503: 'aXYvop', // '6540643319', // 503 - Service Unavailable
506: 'aXYvnH', // '6540643279', // 506 - Variant Also Negotiates
507: 'aVdnZa', // '6509400503', // 507 - Insufficient Storage
508: 'aVdnYa', // '6509400445', // 508 - Loop Detected
509: 'aXXg1V', // '6540399865', // 509 - Bandwidth Limit Exceeded
599: 'aVdo7v', // '6509400929', // 599 - Network connect timeout error
}
module.exports.get_image = function(status) {
if (status in images) {
return 'http://flic.kr/p/'+images[status]
//return 'https://secure.flickr.com/photos/girliemac/'+images[status]+'/in/set-72157628409467125/lightbox/'
}
}
module.exports.middleware = function(req, res, next) {
var _writeHead = res.writeHead
res.writeHead = function(status) {
if (status in images) {
res.setHeader('X-Status-Cat', module.exports.get_image(status))
}
_writeHead.apply(res, arguments)
}
next()
}

View File

@@ -1,134 +1,165 @@
var async = require('async');
var semver = require('semver');
var UError = require('./error').UserError;
var Local = require('./local-storage');
var Proxy = require('./up-storage');
var mystreams = require('./streams');
var utils = require('./utils');
var async = require('async')
, assert = require('assert')
, UError = require('./error').UserError
, Local = require('./local-storage')
, Proxy = require('./up-storage')
, mystreams = require('./streams')
, utils = require('./utils')
, Logger = require('./logger')
//
// Implements Storage interface
// (same for storage.js, local-storage.js, up-storage.js)
//
function Storage(config) {
if (!(this instanceof Storage)) return new Storage(config);
if (!(this instanceof Storage)) return new Storage(config)
this.config = config;
this.config = config
// we support a number of uplinks, but only one local storage
// Proxy and Local classes should have similar API interfaces
this.uplinks = {};
this.uplinks = {}
for (var p in config.uplinks) {
this.uplinks[p] = new Proxy(config.uplinks[p], config);
this.uplinks[p] = new Proxy(config.uplinks[p], config)
this.uplinks[p].upname = p
}
this.local = new Local(config);
this.local = new Local(config)
this.logger = Logger.logger.child()
return this;
return this
}
//
// Add a {name} package to a system
//
// Function checks if package with the same name is available from uplinks.
// If it isn't, we create package metadata locally and send requests to do
// the same to all uplinks with write access. If all actions succeeded, we
// report success, if just one uplink fails, we abort.
// If it isn't, we create package locally
//
// TODO: if a package is uploaded to uplink1, but upload to uplink2 fails,
// we report failure, but package is not removed from uplink1. This might
// require manual intervention.
//
// Used storages: local (write) && uplinks (proxy_access, r/o) &&
// uplinks (proxy_publish, write)
// Used storages: local (write) && uplinks
//
Storage.prototype.add_package = function(name, metadata, callback) {
var self = this;
var self = this
var uplinks = [];
for (var i in self.uplinks) {
if (self.config.proxy_access(name, i)) {
uplinks.push(self.uplinks[i]);
}
// NOTE:
// - when we checking package for existance, we ask ALL uplinks
// - when we publishing package, we only publish it to some of them
// so all requests are necessary
check_package_local(function(err) {
if (err) return callback(err)
check_package_remote(function(err) {
if (err) return callback(err)
publish_package(function(err) {
if (err) return callback(err)
callback()
})
})
})
function check_package_local(cb) {
self.local.get_package(name, {}, function(err, results) {
if (err && err.status !== 404) return cb(err)
if (results) {
return cb(new UError({
status: 409,
msg: 'this package is already present'
}))
}
cb()
})
}
async.map(uplinks, function(up, cb) {
up.get_package(name, function(err, res) {
cb(null, [err, res]);
});
}, function(err, results) {
for (var i=0; i<results.length; i++) {
// checking error
// if uplink fails with a status other than 404, we report failure
if (results[i][0] != null) {
if (results[i][0].status !== 404) {
return callback(new UError({
status: 503,
msg: 'one of the uplinks is down, refuse to publish'
}));
function check_package_remote(cb) {
self._sync_package_with_uplinks(name, null, {}, function(err, results, err_results) {
// something weird
if (err && err.status !== 404) return cb(err)
// checking package
if (results) {
return cb(new UError({
status: 409,
msg: 'this package is already present'
}))
}
for (var i=0; i<err_results.length; i++) {
// checking error
// if uplink fails with a status other than 404, we report failure
if (err_results[i][0] != null) {
if (err_results[i][0].status !== 404) {
return cb(new UError({
status: 503,
msg: 'one of the uplinks is down, refuse to publish'
}))
}
}
}
// checking package
if (results[i][1] != null) {
return callback(new UError({
status: 409,
msg: 'this package is already present'
}));
}
}
uplinks = [];
for (var i in self.uplinks) {
if (self.config.proxy_publish(name, i)) {
uplinks.push(self.uplinks[i]);
}
}
async.map(uplinks, function(up, cb) {
up.add_package(name, metadata, cb);
}, function(err, results) {
if (err) {
return callback(new UError({
status: 503,
msg: 'can\'t upload to one of the uplinks, refuse to publish'
}));
}
self.local.add_package(name, metadata, callback);
});
});
return cb()
})
}
function publish_package(cb) {
self.local.add_package(name, metadata, callback)
}
}
//
// Add a new version of package {name} to a system
//
// Function uploads a new package version to all uplinks with write access
// and if everything succeeded it adds it locally.
//
// TODO: if a package is uploaded to uplink1, but upload to uplink2 fails,
// we report failure, but package is not removed from uplink1. This might
// require manual intervention.
//
// Used storages: local (write) && uplinks (proxy_publish, write)
// Used storages: local (write)
//
Storage.prototype.add_version = function(name, version, metadata, tag, callback) {
var self = this;
return this.local.add_version(name, version, metadata, tag, callback)
}
var uplinks = [];
for (var i in self.uplinks) {
if (self.config.proxy_publish(name, i)) {
uplinks.push(self.uplinks[i]);
}
}
async.map(uplinks, function(up, cb) {
up.add_version(name, version, metadata, tag, cb);
}, function(err, results) {
if (err) {
return callback(new UError({
status: 503,
msg: 'can\'t upload to one of the uplinks, refuse to publish'
}));
}
self.local.add_version(name, version, metadata, tag, callback);
});
//
// Tags a package version with a provided tag
//
// Used storages: local (write)
//
Storage.prototype.add_tag = function(name, version, tag, callback) {
return this.local.add_tag(name, version, tag, callback)
}
//
// Change an existing package (i.e. unpublish one version)
//
// Function changes a package info from local storage and all uplinks with
// write access.
//
// Used storages: local (write)
//
Storage.prototype.change_package = function(name, metadata, revision, callback) {
return this.local.change_package(name, metadata, revision, callback)
}
//
// Remove a package from a system
//
// Function removes a package from local storage
//
// Used storages: local (write)
//
Storage.prototype.remove_package = function(name, callback) {
return this.local.remove_package(name, callback)
}
//
// Remove a tarball from a system
//
// Function removes a tarball from local storage.
// Tarball in question should not be linked to in any existing
// versions, i.e. package version should be unpublished first.
//
// Used storages: local (write)
//
Storage.prototype.remove_tarball = function(name, filename, revision, callback) {
return this.local.remove_tarball(name, filename, revision, callback)
}
//
@@ -136,70 +167,10 @@ Storage.prototype.add_version = function(name, version, metadata, tag, callback)
//
// Function is syncronous and returns a WritableStream
//
// Function uploads a tarball to all uplinks with write access and to
// local storage in parallel with a speed of a slowest pipe. It reports
// success if all uploads succeed.
//
// Used storages: local (write) && uplinks (proxy_publish, write)
// Used storages: local (write)
//
Storage.prototype.add_tarball = function(name, filename) {
var stream = new mystreams.UploadTarballStream();
var self = this;
var upstreams = [];
upstreams.push(self.local.add_tarball(name, filename));
for (var i in self.uplinks) {
if (self.config.proxy_publish(name, i)) {
upstreams.push(self.uplinks[i].add_tarball(name, filename));
}
}
function bail(err) {
upstreams.forEach(function(upstream) {
upstream.abort();
});
}
upstreams.forEach(function(upstream) {
stream.pipe(upstream);
upstream.on('error', function(err) {
if (err.code === 'EEXISTS') {
stream.emit('error', new UError({
status: 409,
msg: 'this tarball is already present'
}));
} else if (!stream.status && upstream != self.local) {
stream.emit('error', new UError({
status: 503,
msg: 'one or more uplinks are unreachable'
}));
} else {
stream.emit('error', err);
}
bail(err);
});
upstream.on('success', function() {
upstream._sinopia_success = true;
if (upstreams.filter(function(upstream) {
return !upstream._sinopia_success;
}).length == 0) {
stream.emit('success');
}
});
});
stream.abort = function() {
bail();
};
stream.done = function() {
upstreams.forEach(function(upstream) {
upstream.done();
});
};
return stream;
return this.local.add_tarball(name, filename)
}
//
@@ -213,71 +184,94 @@ Storage.prototype.add_tarball = function(name, filename) {
// Used storages: local || uplink (just one)
//
Storage.prototype.get_tarball = function(name, filename) {
var stream = new mystreams.ReadTarballStream();
stream.abort = function() {};
var self = this;
var stream = new mystreams.ReadTarballStream()
stream.abort = function() {}
var self = this
// if someone requesting tarball, it means that we should already have some
// information about it, so fetching package info is unnecessary
// trying local first
var rstream = self.local.get_tarball(name, filename);
var is_open = false;
var rstream = self.local.get_tarball(name, filename)
var is_open = false
rstream.on('error', function(err) {
if (is_open || err.status !== 404) {
return stream.emit('error', err);
return stream.emit('error', err)
}
// local reported 404
var err404 = err;
var uplink = null;
rstream.abort();
rstream = null; // gc
var err404 = err
var uplink = null
rstream.abort()
rstream = null // gc
self.local.get_package(name, function(err, info) {
if (err) return stream.emit('error', err);
if (!err && info._distfiles && info._distfiles[filename] != null) {
// information about this file exists locally
serve_file(info._distfiles[filename])
if (info._distfiles[filename] == null) {
return stream.emit('error', err404);
} else {
// we know nothing about this file, trying to get information elsewhere
self._sync_package_with_uplinks(name, info, {}, function(err, info) {
if (err) return stream.emit('error', err)
if (!info._distfiles || info._distfiles[filename] == null) {
return stream.emit('error', err404)
}
serve_file(info._distfiles[filename])
})
}
var file = info._distfiles[filename];
var uplink = null;
for (var p in self.uplinks) {
if (self.uplinks[p].can_fetch_url(file.url)) {
uplink = self.uplinks[p];
}
}
if (uplink == null) {
uplink = new Proxy({
url: file.url,
_autogenerated: true,
}, self.config);
}
var savestream = self.local.add_tarball(name, filename);
savestream.on('error', function(err) {
savestream.abort();
stream.emit('error', err);
});
savestream.on('open', function() {
var rstream2 = uplink.get_url(file.url);
rstream2.on('error', function(err) {
stream.emit('error', err);
});
// XXX: check, what would happen if client disconnects?
rstream2.pipe(stream);
rstream2.pipe(savestream);
});
});
});
})
})
rstream.on('content-length', function(v) {
stream.emit('content-length', v)
})
rstream.on('open', function() {
is_open = true;
rstream.pipe(stream);
});
return stream;
is_open = true
rstream.pipe(stream)
})
return stream
function serve_file(file) {
var uplink = null
for (var p in self.uplinks) {
if (self.uplinks[p].can_fetch_url(file.url)) {
uplink = self.uplinks[p]
}
}
if (uplink == null) {
uplink = new Proxy({
url: file.url,
_autogenerated: true,
}, self.config)
}
var savestream = self.local.add_tarball(name, filename)
savestream.on('error', function(err) {
savestream.abort()
stream.emit('error', err)
})
savestream.on('open', function() {
var rstream2 = uplink.get_url(file.url)
rstream2.on('error', function(err) {
savestream.abort()
stream.emit('error', err)
})
rstream2.on('end', function() {
savestream.done()
})
rstream2.on('content-length', function(v) {
stream.emit('content-length', v)
savestream.emit('content-length', v)
})
rstream2.pipe(stream)
rstream2.pipe(savestream)
})
}
}
//
@@ -289,71 +283,149 @@ Storage.prototype.get_tarball = function(name, filename) {
//
// Used storages: local && uplink (proxy_access)
//
Storage.prototype.get_package = function(name, callback) {
var self = this;
var uplinks = [this.local];
for (var i in this.uplinks) {
if (this.config.proxy_access(name, i)) {
uplinks.push(this.uplinks[i]);
Storage.prototype.get_package = function(name, options, callback) {
if (typeof(options) === 'function') callback = options, options = {}
var self = this
self.local.get_package(name, options, function(err, data) {
if (err && (!err.status || err.status >= 500)) {
// report internal errors right away
return callback(err)
}
self._sync_package_with_uplinks(name, data, options, function(err, result, uplink_errors) {
if (err) return callback(err)
var whitelist = ['_rev', 'name', 'versions', 'dist-tags']
for (var i in result) {
if (whitelist.indexOf(i) === -1) delete result[i]
}
if (self.config.ignore_latest_tag) {
result['dist-tags'].latest = utils.semver_sort(Object.keys(result.versions))
}
for (var i in result['dist-tags']) {
if (Array.isArray(result['dist-tags'][i])) {
result['dist-tags'][i] = result['dist-tags'][i][result['dist-tags'][i].length-1]
if (result['dist-tags'][i] == null) delete result['dist-tags'][i]
}
}
// npm can throw if this field doesn't exist
result._attachments = {}
callback(null, result, uplink_errors)
})
})
}
// function fetches package information from uplinks and synchronizes it with local data
// if package is available locally, it MUST be provided in pkginfo
// returns callback(err, result, uplink_errors)
Storage.prototype._sync_package_with_uplinks = function(name, pkginfo, options, callback) {
var self = this
if (!pkginfo) {
var exists = false
pkginfo = {
name: name,
versions: {},
'dist-tags': {},
_uplinks: {},
}
} else {
var exists = true
}
var uplinks = []
for (var i in self.uplinks) {
if (self.config.proxy_access(name, i)) {
uplinks.push(self.uplinks[i])
}
}
var result = {
name: name,
versions: {},
'dist-tags': {},
};
var exists = false;
var latest;
async.map(uplinks, function(up, cb) {
up.get_package(name, function(err, up_res) {
if (err) return cb();
var _options = Object.create(options)
if (utils.is_object(pkginfo._uplinks[up.upname])) {
var fetched = pkginfo._uplinks[up.upname].fetched
if (fetched && fetched > (Date.now() - up.maxage)) {
return cb()
}
if (up === self.local) {
// file exists in local repo
exists = true;
_options.etag = pkginfo._uplinks[up.upname].etag
}
up.get_package(name, _options, function(err, up_res, etag) {
if (err && err.status === 304)
pkginfo._uplinks[up.upname].fetched = Date.now()
if (err || !up_res) return cb(null, [err || new Error('no data')])
try {
utils.validate_metadata(up_res, name)
} catch(err) {
self.logger.error({
sub: 'out',
err: err,
}, 'package.json validating error @{!err.message}\n@{err.stack}')
return cb(null, [err])
}
pkginfo._uplinks[up.upname] = {
etag: etag,
fetched: Date.now()
}
try {
utils.validate_metadata(up_res, name);
Storage._merge_versions(pkginfo, up_res, self.config)
} catch(err) {
return cb();
self.logger.error({
sub: 'out',
err: err,
}, 'package.json parsing error @{!err.message}\n@{err.stack}')
return cb(null, [err])
}
var this_version = up_res['dist-tags'].latest;
if (latest == null
|| (!semver.gt(latest, this_version) && this_version)) {
latest = this_version;
var is_latest = true;
}
['versions', 'dist-tags'].forEach(function(key) {
for (var i in up_res[key]) {
if (!result[key][i] || is_latest) {
result[key][i] = up_res[key][i];
}
}
});
// if we got to this point, assume that the correct package exists
// on the uplink
exists = true;
cb();
});
}, function(err) {
if (err) return callback(err);
exists = true
cb()
})
}, function(err, uplink_errors) {
assert(!err && Array.isArray(uplink_errors))
if (!exists) {
return callback(new UError({
status: 404,
msg: 'no such package available'
}));
}), null, uplink_errors)
}
callback(null, result);
self.local.update_versions(name, result, function(){});
});
self.local.update_versions(name, pkginfo, function(err, pkginfo) {
if (err) return callback(err)
return callback(null, pkginfo, uplink_errors)
})
})
}
module.exports = Storage;
// function gets a local info and an info from uplinks and tries to merge it
// exported for unit tests only
Storage._merge_versions = function(local, up, config) {
// copy new versions to a cache
// NOTE: if a certain version was updated, we can't refresh it reliably
for (var i in up.versions) {
if (local.versions[i] == null) {
local.versions[i] = up.versions[i]
}
}
// refresh dist-tags
for (var i in up['dist-tags']) {
utils.tag_version(local, up['dist-tags'][i], i, config || {})
}
}
module.exports = Storage

View File

@@ -1,66 +1,53 @@
var stream = require('stream');
var util = require('util');
var stream = require('stream')
, util = require('util')
//
// This stream is used to read tarballs from repository
//
function ReadTarball(options) {
stream.PassThrough.call(this, options);
stream.PassThrough.call(this, options)
// called when data is not needed anymore
add_abstract_method(this, 'abort');
add_abstract_method(this, 'abort')
}
util.inherits(ReadTarball, stream.PassThrough);
module.exports.ReadTarballStream = ReadTarball;
util.inherits(ReadTarball, stream.PassThrough)
module.exports.ReadTarballStream = ReadTarball
//
// This stream is used to upload tarballs to a repository
//
function UploadTarball(options) {
stream.PassThrough.call(this, options);
stream.PassThrough.call(this, options)
// called when user closes connection before upload finishes
add_abstract_method(this, 'abort');
add_abstract_method(this, 'abort')
// called when upload finishes successfully
add_abstract_method(this, 'done');
add_abstract_method(this, 'done')
}
util.inherits(UploadTarball, stream.PassThrough);
module.exports.UploadTarballStream = UploadTarball;
util.inherits(UploadTarball, stream.PassThrough)
module.exports.UploadTarballStream = UploadTarball
//
// This function intercepts abstract calls and replays them allowing
// us to attach those functions after we are ready to do so
//
function add_abstract_method(self, name) {
self._called_methods = self._called_methods || {};
self._called_methods = self._called_methods || {}
self.__defineGetter__(name, function() {
return function() {
self._called_methods[name] = true;
self._called_methods[name] = true
}
});
})
self.__defineSetter__(name, function(fn) {
delete self[name];
self[name] = fn;
delete self[name]
self[name] = fn
if (self._called_methods && self._called_methods[name]) {
delete self._called_methods[name];
self[name]();
delete self._called_methods[name]
self[name]()
}
});
}
function __test() {
var test = new ReadTarball();
test.abort();
setTimeout(function() {
test.abort = function() {
console.log('ok');
};
test.abort = function() {
throw 'fail';
};
}, 100);
})
}

View File

@@ -1,201 +1,350 @@
var URL = require('url');
var request = require('request');
var UError = require('./error').UserError;
var mystreams = require('./streams');
var URL = require('url')
, request = require('request')
, Stream = require('stream')
, zlib = require('zlib')
, UError = require('./error').UserError
, mystreams = require('./streams')
, Logger = require('./logger')
, utils = require('./utils')
, parse_interval = require('./config').parse_interval
, encode = encodeURIComponent
//
// Implements Storage interface
// (same for storage.js, local-storage.js, up-storage.js)
//
function Storage(config, mainconfig) {
if (!(this instanceof Storage)) return new Storage(config);
this.config = config;
this.is_alive = false;
this.userAgent = mainconfig.user_agent;
this.ca;
if (!(this instanceof Storage)) return new Storage(config)
this.config = config
this.failed_requests = 0
this.userAgent = mainconfig.user_agent
this.ca = config.ca
this.logger = Logger.logger.child({sub: 'out'})
this.server_id = mainconfig.server_id
this.url = URL.parse(this.config.url);
this.url = URL.parse(this.config.url)
if (this.url.hostname === 'registry.npmjs.org') {
this.ca = require('./npmsslkeys');
// npm registry is too slow working with ssl :(
/*if (this.config._autogenerated) {
// encrypt all the things!
this.url.protocol = 'https';
this.config.url = URL.format(this.url);
this.url.protocol = 'https'
this.config.url = URL.format(this.url)
}*/
}
this.config.url = this.config.url.replace(/\/$/, '');
return this;
_setupProxy.call(this, this.url.hostname, config, mainconfig, this.url.protocol === 'https:')
this.config.url = this.config.url.replace(/\/$/, '')
if (Number(this.config.timeout) >= 1000) {
this.logger.warn('Too big timeout value: ' + this.config.timeout + '\nWe changed time format to nginx-like one\n(see http://wiki.nginx.org/ConfigNotation)\nso please update your config accordingly')
}
// a bunch of different configurable timers
this.maxage = parse_interval(config_get('maxage' , '2m' ))
this.timeout = parse_interval(config_get('timeout' , '30s'))
this.max_fails = Number(config_get('max_fails' , 2 ))
this.fail_timeout = parse_interval(config_get('fail_timeout', '5m' ))
return this
// just a helper (`config[key] || default` doesn't work because of zeroes)
function config_get(key, def) {
return config[key] != null ? config[key] : def
}
}
function _setupProxy(hostname, config, mainconfig, isHTTPS) {
var no_proxy
var proxy_key = isHTTPS ? 'https_proxy' : 'http_proxy'
// get http_proxy and no_proxy configs
if (proxy_key in config) {
this.proxy = config[proxy_key]
} else if (proxy_key in mainconfig) {
this.proxy = mainconfig[proxy_key]
}
if ('no_proxy' in config) {
no_proxy = config.no_proxy
} else if ('no_proxy' in mainconfig) {
no_proxy = mainconfig.no_proxy
}
// use wget-like algorithm to determine if proxy shouldn't be used
if (hostname[0] !== '.') hostname = '.' + hostname
if (typeof(no_proxy) === 'string' && no_proxy.length) {
no_proxy = no_proxy.split(',')
}
if (Array.isArray(no_proxy)) {
for (var i=0; i<no_proxy.length; i++) {
var no_proxy_item = no_proxy[i]
if (no_proxy_item[0] !== '.') no_proxy_item = '.' + no_proxy_item
if (hostname.lastIndexOf(no_proxy_item) === hostname.length - no_proxy_item.length) {
if (this.proxy) {
this.logger.debug({url: this.url.href, rule: no_proxy_item},
'not using proxy for @{url}, excluded by @{rule} rule')
this.proxy = false
}
break
}
}
}
// if it's non-string (i.e. "false"), don't use it
if (typeof(this.proxy) !== 'string') {
delete this.proxy
} else {
this.logger.debug({url: this.url.href, proxy: this.proxy},
'using proxy @{proxy} for @{url}')
}
}
Storage.prototype.request = function(options, cb) {
var self = this;
var headers = options.headers || {};
headers.accept = headers.accept || 'application/json';
headers['user-agent'] = headers['user-agent'] || this.userAgent;
var req = request({
url: this.config.url + options.uri,
method: options.method || 'GET',
if (!this.status_check()) {
var req = new Stream.Readable()
process.nextTick(function() {
if (typeof(cb) === 'function') cb(new Error('uplink is offline'))
req.emit('error', new Error('uplink is offline'))
})
// preventing 'Uncaught, unspecified "error" event'
req.on('error', function(){})
return req
}
var self = this
, headers = options.headers || {}
headers['Accept'] = headers['Accept'] || 'application/json'
headers['Accept-Encoding'] = headers['Accept-Encoding'] || 'gzip'
headers['User-Agent'] = headers['User-Agent'] || this.userAgent
var method = options.method || 'GET'
, uri = options.uri_full || (this.config.url + options.uri)
self.logger.info({
method: method,
headers: headers,
json: options.json || true,
uri: uri,
}, "making request: '@{method} @{uri}'")
if (utils.is_object(options.json)) {
var json = JSON.stringify(options.json)
headers['Content-Type'] = headers['Content-Type'] || 'application/json'
}
var req = request({
url: uri,
method: method,
headers: headers,
body: json,
ca: this.ca,
}, function(err) {
if (cb) cb.apply(self, arguments);
});
req.on('response', function() {
self.status_check(true);
});
req.on('error', function() {
self.status_check(false);
});
return req;
proxy: this.proxy,
encoding: null,
timeout: this.timeout,
}, function(err, res, body) {
var error
var res_length = err ? 0 : body.length
do_gunzip(function() {
do_decode()
do_log()
if (cb) cb(err, res, body)
})
function do_gunzip(cb) {
if (err) return cb()
if (res.headers['content-encoding'] !== 'gzip') return cb()
zlib.gunzip(body, function(er, buf) {
if (er) err = er
body = buf
return cb()
})
}
function do_decode() {
if (err) return error = err.message
if (options.json && res.statusCode < 300) {
try {
body = JSON.parse(body.toString('utf8'))
} catch(_err) {
body = {}
err = _err
error = err.message
}
}
if (!err && utils.is_object(body)) {
if (body.error) {
error = body.error
}
}
}
function do_log() {
var msg = '@{!status}, req: \'@{request.method} @{request.url}\''
if (error) {
msg += ', error: @{!error}'
} else {
msg += ', bytes: @{bytes.in}/@{bytes.out}'
}
self.logger.warn({
err: err,
request: {method: method, url: uri},
level: 35, // http
status: res != null ? res.statusCode : 'ERR',
error: error,
bytes: {
in: json ? json.length : 0,
out: res_length || 0,
}
}, msg)
}
})
var status_called = false
req.on('response', function(res) {
if (!req._sinopia_aborted && !status_called) {
status_called = true
self.status_check(true)
}
})
req.on('error', function(err) {
if (!req._sinopia_aborted && !status_called) {
status_called = true
self.status_check(false)
}
})
return req
}
Storage.prototype.status_check = function(alive) {
if (arguments.length === 0) {
if (!this.is_alive && Math.abs(Date.now() - this.is_alive_time()) > 60*1000) {
return false;
if (this.failed_requests >= this.max_fails && Math.abs(Date.now() - this.last_request_time) < this.fail_timeout) {
return false
} else {
return true;
return true
}
} else {
this.is_alive = alive;
this.is_alive_time = Date.now();
if (alive) {
if (this.failed_requests >= this.max_fails) {
this.logger.warn({host: this.url.host}, 'host @{host} is back online')
}
this.failed_requests = 0
} else {
this.failed_requests++
if (this.failed_requests === this.max_fails) {
this.logger.warn({host: this.url.host}, 'host @{host} is now offline')
}
}
this.last_request_time = Date.now()
}
}
Storage.prototype.can_fetch_url = function(url) {
url = URL.parse(url);
url = URL.parse(url)
return url.protocol === this.url.protocol
&& url.host === this.url.host
&& url.path.indexOf(this.url.path) === 0
}
Storage.prototype.add_package = function(name, metadata, callback) {
Storage.prototype.get_package = function(name, options, callback) {
if (typeof(options) === 'function') callback = options, options = {}
var headers = {}
if (options.etag) {
headers['If-None-Match'] = options.etag
headers['Accept'] = 'application/octet-stream'
}
this._add_proxy_headers(options.req, headers)
this.request({
uri: '/' + escape(name),
method: 'PUT',
json: metadata,
}, function(err, res, body) {
if (err) return callback(err);
if (!(res.statusCode >= 200 && res.statusCode < 300)) {
return callback(new Error('bad status code: ' + res.statusCode));
}
callback(null, body);
});
}
Storage.prototype.add_version = function(name, version, metadata, tag, callback) {
this.request({
uri: '/' + escape(name) + '/' + escape(version) + '/-tag/' + escape(tag),
method: 'PUT',
json: metadata,
}, function(err, res, body) {
if (err) return callback(err);
if (!(res.statusCode >= 200 && res.statusCode < 300)) {
return callback(new Error('bad status code: ' + res.statusCode));
}
callback(null, body);
});
}
Storage.prototype.add_tarball = function(name, filename) {
var stream = new mystreams.UploadTarballStream();
var self = this;
var wstream = this.request({
uri: '/' + escape(name) + '/-/' + escape(filename) + '/whatever',
method: 'PUT',
headers: {
'content-type': 'application/octet-stream'
},
});
wstream.on('response', function(res) {
if (!(res.statusCode >= 200 && res.statusCode < 300)) {
return stream.emit('error', new UError({
msg: 'bad uplink status code: ' + res.statusCode,
status: 500,
}));
}
stream.emit('success');
});
wstream.on('error', function(err) {
stream.emit('error', err);
});
stream.abort = function() {
process.nextTick(function() {
if (wstream.req) {
wstream.req.abort();
}
});
};
stream.done = function() {};
stream.pipe(wstream);
return stream;
}
Storage.prototype.get_package = function(name, callback) {
this.request({
uri: '/' + escape(name),
uri: '/' + encode(name),
json: true,
headers: headers,
}, function(err, res, body) {
if (err) return callback(err);
if (err) return callback(err)
if (res.statusCode === 404) {
return callback(new UError({
msg: 'package doesn\'t exist on uplink',
status: 404,
}));
}))
}
if (!(res.statusCode >= 200 && res.statusCode < 300)) {
return callback(new Error('bad status code: ' + res.statusCode));
var error = new Error('bad status code: ' + res.statusCode)
error.status = res.statusCode
return callback(error)
}
callback(null, body);
});
callback(null, body, res.headers.etag)
})
}
Storage.prototype.get_tarball = function(name, filename) {
return this.get_url(this.config.url + '/' + name + '/-/' + filename);
Storage.prototype.get_tarball = function(name, options, filename) {
if (!options) options = {}
return this.get_url(this.config.url + '/' + name + '/-/' + filename)
}
Storage.prototype.get_url = function(url) {
url = URL.parse(url);
var stream = new mystreams.ReadTarballStream();
stream.abort = function() {};
var stream = new mystreams.ReadTarballStream()
stream.abort = function() {}
var current_length = 0, expected_length
var rstream = this.request({
uri: url.path,
uri_full: url,
encoding: null,
});
headers: {
Accept: 'application/octet-stream',
},
})
rstream.on('response', function(res) {
if (res.statusCode === 404) {
return stream.emit('error', new UError({
msg: 'file doesn\'t exist on uplink',
status: 404,
}));
}))
}
if (!(res.statusCode >= 200 && res.statusCode < 300)) {
return stream.emit('error', new UError({
msg: 'bad uplink status code: ' + res.statusCode,
status: 500,
}));
}))
}
if (res.headers['content-length']) {
expected_length = res.headers['content-length']
stream.emit('content-length', res.headers['content-length'])
}
rstream.pipe(stream);
});
rstream.pipe(stream)
})
rstream.on('error', function(err) {
stream.emit('error', err);
});
return stream;
stream.emit('error', err)
})
rstream.on('data', function(d) {
current_length += d.length
})
rstream.on('end', function(d) {
if (d) current_length += d.length
if (expected_length && current_length != expected_length)
stream.emit('error', new Error('content length mismatch'))
})
return stream
}
module.exports = Storage;
Storage.prototype._add_proxy_headers = function(req, headers) {
if (req) {
headers['X-Forwarded-For'] = (
(req && req.headers['x-forwarded-for']) ?
req.headers['x-forwarded-for'] + ', ' :
''
) + req.connection.remoteAddress
}
// always attach Via header to avoid loops, even if we're not proxying
headers['Via'] =
(req && req.headers['via']) ?
req.headers['via'] + ', ' :
''
headers['Via'] += '1.1 ' + this.server_id + ' (Sinopia)'
}
module.exports = Storage

View File

@@ -1,53 +1,58 @@
var assert = require('assert');
var URL = require('url');
var assert = require('assert')
, semver = require('semver')
, Logger = require('./logger')
, URL = require('url')
// from normalize-package-data/lib/fixer.js
module.exports.validate_name = function(name) {
name = name.toLowerCase();
if (typeof(name) !== 'string') return false
name = name.toLowerCase()
if (
name.charAt(0) === "." || // ".bin", etc.
name.charAt(0) === '.' || // ".bin", etc.
name.charAt(0) === '-' || // "-" is reserved by couchdb
name.match(/[\/@\s\+%:]/) ||
name !== encodeURIComponent(name) ||
name.toLowerCase() === "node_modules" ||
name.toLowerCase() === "__proto__" ||
name.toLowerCase() === "favicon.ico"
name === 'node_modules' ||
name === '__proto__' ||
name === 'package.json' ||
name === 'favicon.ico'
) {
return false;
return false
} else {
return true;
return true
}
}
function is_object(obj) {
return typeof(obj) === 'object' && !Array.isArray(obj);
module.exports.is_object = function(obj) {
return typeof(obj) === 'object' && obj !== null && !Array.isArray(obj)
}
module.exports.validate_metadata = function(object, name) {
assert(is_object(object));
assert.equal(object.name, name);
if (!is_object(object['dist-tags'])) {
object['dist-tags'] = {};
assert(module.exports.is_object(object), 'not a json object')
assert.equal(object.name, name)
if (!module.exports.is_object(object['dist-tags'])) {
object['dist-tags'] = {}
}
if (!is_object(object['versions'])) {
object['versions'] = {};
if (!module.exports.is_object(object['versions'])) {
object['versions'] = {}
}
return object;
return object
}
module.exports.parse_tarball_url = function(_url) {
var url = URL.parse(_url);
var url = URL.parse(_url)
var path = url.path.replace(/^\//, '').split('/');
var path = url.path.replace(/^\//, '').split('/')
if (path.length >= 3 && path[path.length-2] === '-') {
var filename = path.pop();
var pkgpath = '/' + filename; // tarball name
pkgpath = '/' + path.pop() + pkgpath; // "-"
pkgpath = '/' + path.pop() + pkgpath; // package.name
var filename = path.pop()
, pkgpath = '/' + filename // tarball name
pkgpath = '/' + path.pop() + pkgpath // "-"
pkgpath = '/' + path.pop() + pkgpath // package.name
} else {
return null;
return null
}
return {
@@ -56,33 +61,87 @@ module.exports.parse_tarball_url = function(_url) {
prepath: '/' + path.join('/'),
pkgpath: pkgpath,
filename: filename,
};
}
}
module.exports.filter_tarball_urls = function(pkg, req, config) {
function filter(_url) {
if (!req.headers.host) return _url;
if (!req.headers.host) return _url
var url = module.exports.parse_tarball_url(_url);
var url = module.exports.parse_tarball_url(_url)
// weird url, just return it
if (url == null) return _url;
if (url == null) return _url
if (config.url_prefix != null) {
var result = config.url_prefix.replace(/\/$/, '');
var result = config.url_prefix.replace(/\/$/, '')
} else {
var result = req.protocol + '://' + req.headers.host;
var result = req.protocol + '://' + req.headers.host
}
return result + url.pkgpath;
return result + url.pkgpath
}
for (var ver in pkg.versions) {
if (pkg.versions[ver].dist != null
&& pkg.versions[ver].dist.tarball != null) {
pkg.versions[ver].dist.__sinopia_orig_tarball = pkg.versions[ver].dist.tarball;
pkg.versions[ver].dist.tarball = filter(pkg.versions[ver].dist.tarball);
var dist = pkg.versions[ver].dist
if (dist != null && dist.tarball != null) {
dist.__sinopia_orig_tarball = dist.tarball
dist.tarball = filter(dist.tarball)
}
}
return pkg;
return pkg
}
function can_add_tag(tag, config) {
if (!tag) return false
if (tag === 'latest' && config.ignore_latest_tag) return false
return true
}
module.exports.tag_version = function(data, version, tag, config) {
if (!can_add_tag(tag, config)) return
switch(typeof(data['dist-tags'][tag])) {
case 'string':
data['dist-tags'][tag] = [data['dist-tags'][tag]]
break
case 'object': // array
break
default:
data['dist-tags'][tag] = []
}
if (data['dist-tags'][tag].indexOf(version) === -1) {
data['dist-tags'][tag].push(version)
data['dist-tags'][tag] = module.exports.semver_sort(data['dist-tags'][tag])
}
}
// gets version from a package object taking into account semver weirdness
module.exports.get_version = function(object, version) {
if (object.versions[version] != null) return object.versions[version]
try {
version = semver.parse(version, true)
for (var k in object.versions) {
if (version.compare(semver.parse(k, true)) === 0) {
return object.versions[k]
}
}
} catch(err) {
return undefined
}
}
// function filters out bad semver versions and sorts the array
module.exports.semver_sort = function semver_sort(array) {
return array
.filter(function(x) {
if (!semver.parse(x, true)) {
Logger.logger.warn({ver: x}, 'ignoring bad version @{ver}')
return false
}
return true
})
.sort(semver.compareLoose)
.map(String)
}

View File

@@ -1,13 +1,13 @@
# use "yapm install ." if you're installing this from git repository
name: sinopia
version: 0.4.2
version: 0.8.0
description: Private npm repository server
author:
name: Alex Kocharin
email: alex@kocharin.ru
repository:
type: git
url: git://github.com/rlidwka/sinopia
@@ -18,18 +18,30 @@ bin:
sinopia: ./bin/sinopia
dependencies:
express: '>= 3.2.5'
commander: '>= 1.1.1'
js-yaml: '>= 2.0.5'
cookies: '>= 0.3.6'
request: '*'
async: '*'
semver: '*'
minimatch: '*'
express: '>= 3.4.7 < 4.0.0-0'
commander: '>= 2.1.0'
js-yaml: '>= 3.0.1'
cookies: '>= 0.3.8'
request: '>= 2.31.0'
async: '>= 0.2.9'
semver: '>= 2.2.1'
minimatch: '>= 0.2.14'
bunyan: '>= 0.22.1'
mkdirp: '>= 0.3.5'
optionalDependencies:
fs-ext: '>= 0.3.2'
devDependencies:
rimraf: '*'
mocha: '*'
rimraf: '>= 2.2.5'
mocha: '>= 1.17.0'
# linting tools
eslint: '>= 0.4.2'
# for debugging memory leaks, it'll be require()'d if
# installed, but I don't want it to be installed everytime
#heapdump: '*'
keywords:
- private
@@ -41,7 +53,8 @@ keywords:
- server
scripts:
test: ./test/start.sh
test: mocha ./test/functional ./test/unit
lint: eslint -c ./.eslint.yaml ./lib
# we depend on streams2 stuff
# it can be replaced with isaacs/readable-stream, ask if you need to use 0.8
@@ -49,5 +62,7 @@ engines:
node: '>=0.10'
preferGlobal: true
license: BSD
# http://www.wtfpl.net/txt/copying/
license: WTFPL

8
test/README.md Normal file
View File

@@ -0,0 +1,8 @@
All tests are split in three folders:
- `unit` - Tests that cover functions that transform data in an non-trivial way. These tests simply require() a few files and run code in there, so they are very fast.
- `functional` - Tests that launch sinopia instance and perform a series of requests to it over http. They are slower than unit tests.
- `integration` - Tests that launch sinopia instance and do requests to it using npm. They are really slow and can hit a real npm registry.
Unit and functional tests are executed automatically with `yapm test`. Integration tests are supposed to be executed manually from time to time.

View File

@@ -1,82 +0,0 @@
var assert = require('assert');
var readfile = require('fs').readFileSync;
var ex = module.exports;
var server = process.server;
var server2 = process.server2;
ex['trying to fetch non-existent package'] = function(cb) {
server.get_package('testpkg', function(res, body) {
// shouldn't exist yet
assert.equal(res.statusCode, 404);
assert(~body.error.indexOf('no such package'));
cb();
});
};
ex['creating new package'] = function(cb) {
server.put_package('testpkg', require('./lib/package')('testpkg'), function(res, body) {
assert.equal(res.statusCode, 201);
assert(~body.ok.indexOf('created new package'));
cb();
});
};
ex['downloading non-existent tarball'] = function(cb) {
server.get_tarball('testpkg', 'blahblah', function(res, body) {
assert.equal(res.statusCode, 404);
assert(~body.error.indexOf('no such file'));
cb();
});
};
ex['uploading incomplete tarball'] = function(cb) {
server.put_tarball_incomplete('testpkg', 'blahblah1', readfile('fixtures/binary'), 3000, function(res, body) {
cb();
});
};
ex['uploading new tarball'] = function(cb) {
server.put_tarball('testpkg', 'blahblah', readfile('fixtures/binary'), function(res, body) {
assert.equal(res.statusCode, 201);
assert(body.ok);
cb();
});
};
ex['downloading newly created tarball'] = function(cb) {
server.get_tarball('testpkg', 'blahblah', function(res, body) {
assert.equal(res.statusCode, 200);
assert.deepEqual(body, readfile('fixtures/binary').toString('utf8'));
cb();
});
};
ex['uploading new package version'] = function(cb) {
server.put_version('testpkg', '0.0.1', require('./lib/package')('testpkg'), function(res, body) {
assert.equal(res.statusCode, 201);
assert(~body.ok.indexOf('published'));
cb();
});
};
ex['downloading newly created package'] = function(cb) {
server.get_package('testpkg', function(res, body) {
assert.equal(res.statusCode, 200);
assert.equal(body.name, 'testpkg');
assert.equal(body.versions['0.0.1'].name, 'testpkg');
assert.equal(body.versions['0.0.1'].dist.tarball, 'http://localhost:55551/testpkg/-/blahblah');
assert.deepEqual(body['dist-tags'], {latest: '0.0.1'});
cb();
});
};
ex['downloading package via server2'] = function(cb) {
server2.get_package('testpkg', function(res, body) {
assert.equal(res.statusCode, 200);
assert.equal(body.name, 'testpkg');
assert.equal(body.versions['0.0.1'].name, 'testpkg');
assert.equal(body.versions['0.0.1'].dist.tarball, 'http://localhost:55552/testpkg/-/blahblah');
assert.deepEqual(body['dist-tags'], {latest: '0.0.1'});
cb();
});
};

View File

@@ -1 +0,0 @@
stuff used for stress-testing, using against real servers is unadvisable

View File

@@ -1,41 +0,0 @@
#!/usr/bin/env node
var async = require('async');
var assert = require('assert');
var Server = require('../lib/server');
var readfile = require('fs').readFileSync;
var binary = readfile('../fixtures/binary');
var count = 10000;
var server = new Server('http://localhost:55551/');
async.series([
function(cb) {
server.auth('test', 'test', function(res, body) {
cb();
});
},
function(cb) {
server.put_package('testpkg', readfile('../fixtures/test-package.json'), function(res, body) {
cb();
});
},
function(cb) {
server.put_tarball('testpkg', 'blahblah', binary, function(res, body) {
cb();
});
},
function dos(cb) {
server.get_tarball('testpkg', 'blahblah', function(res, body) {
assert(res.statusCode === 200);
assert.deepEqual(body, binary.toString('utf8'));
if (count-- > 0) {
dos(cb);
} else {
cb();
}
});
},
], function() {
process.exit();
});

58
test/functional/addtag.js Normal file
View File

@@ -0,0 +1,58 @@
var assert = require('assert')
function readfile(x) {
return require('fs').readFileSync(__dirname + '/' + x)
}
module.exports = function() {
var server = process.server
it('add tag - 404', function(cb) {
server.add_tag('testpkg-tag', 'tagtagtag', '0.0.1', function(res, body) {
assert.equal(res.statusCode, 404)
assert(~body.error.indexOf('no such package'))
cb()
})
})
describe('addtag', function() {
before(function(cb) {
server.put_package('testpkg-tag', eval(
'(' + readfile('fixtures/publish.json5')
.toString('utf8')
.replace(/__NAME__/g, 'testpkg-tag')
.replace(/__VERSION__/g, '0.0.1')
+ ')'
), function(res, body) {
assert.equal(res.statusCode, 201)
cb()
})
})
it('add testpkg-tag', function(){})
it('add tag - bad ver', function(cb) {
server.add_tag('testpkg-tag', 'tagtagtag', '0.0.1-x', function(res, body) {
assert.equal(res.statusCode, 404)
assert(~body.error.indexOf('version doesn\'t exist'))
cb()
})
})
it('add tag - bad tag', function(cb) {
server.add_tag('testpkg-tag', 'tag/tag/tag', '0.0.1-x', function(res, body) {
assert.equal(res.statusCode, 403)
assert(~body.error.indexOf('invalid tag'))
cb()
})
})
it('add tag - good', function(cb) {
server.add_tag('testpkg-tag', 'tagtagtag', '0.0.1', function(res, body) {
assert.equal(res.statusCode, 201)
assert(~body.ok.indexOf('tagged'))
cb()
})
})
})
}

133
test/functional/basic.js Normal file
View File

@@ -0,0 +1,133 @@
require('./lib/startup')
var assert = require('assert')
, async = require('async')
, crypto = require('crypto')
function readfile(x) {
return require('fs').readFileSync(__dirname + '/' + x)
}
module.exports = function() {
var server = process.server
var server2 = process.server2
it('trying to fetch non-existent package', function(cb) {
server.get_package('testpkg', function(res, body) {
assert.equal(res.statusCode, 404)
assert(~body.error.indexOf('no such package'))
cb()
})
})
describe('testpkg', function() {
before(server.add_package.bind(server, 'testpkg'))
it('creating new package', function(){/* test for before() */})
it('downloading non-existent tarball', function(cb) {
server.get_tarball('testpkg', 'blahblah', function(res, body) {
assert.equal(res.statusCode, 404)
assert(~body.error.indexOf('no such file'))
cb()
})
})
it('uploading incomplete tarball', function(cb) {
server.put_tarball_incomplete('testpkg', 'blahblah1', readfile('fixtures/binary'), 3000, function(res, body) {
cb()
})
})
describe('tarball', function() {
before(function(cb) {
server.put_tarball('testpkg', 'blahblah', readfile('fixtures/binary'), function(res, body) {
assert.equal(res.statusCode, 201)
assert(body.ok)
cb()
})
})
it('uploading new tarball', function(){/* test for before() */})
it('downloading newly created tarball', function(cb) {
server.get_tarball('testpkg', 'blahblah', function(res, body) {
assert.equal(res.statusCode, 200)
assert.deepEqual(body, readfile('fixtures/binary').toString('utf8'))
cb()
})
})
it('uploading new package version (bad sha)', function(cb) {
var pkg = require('./lib/package')('testpkg')
pkg.dist.shasum = crypto.createHash('sha1').update('fake').digest('hex')
server.put_version('testpkg', '0.0.1', pkg, function(res, body) {
assert.equal(res.statusCode, 400)
assert(~body.error.indexOf('shasum error'))
cb()
})
})
describe('version', function() {
before(function(cb) {
var pkg = require('./lib/package')('testpkg')
pkg.dist.shasum = crypto.createHash('sha1').update(readfile('fixtures/binary')).digest('hex')
server.put_version('testpkg', '0.0.1', pkg, function(res, body) {
assert.equal(res.statusCode, 201)
assert(~body.ok.indexOf('published'))
cb()
})
})
it('uploading new package version', function(){/* test for before() */})
it('downloading newly created package', function(cb) {
server.get_package('testpkg', function(res, body) {
assert.equal(res.statusCode, 200)
assert.equal(body.name, 'testpkg')
assert.equal(body.versions['0.0.1'].name, 'testpkg')
assert.equal(body.versions['0.0.1'].dist.tarball, 'http://localhost:55551/testpkg/-/blahblah')
assert.deepEqual(body['dist-tags'], {latest: '0.0.1'})
cb()
})
})
it('downloading package via server2', function(cb) {
server2.get_package('testpkg', function(res, body) {
assert.equal(res.statusCode, 200)
assert.equal(body.name, 'testpkg')
assert.equal(body.versions['0.0.1'].name, 'testpkg')
assert.equal(body.versions['0.0.1'].dist.tarball, 'http://localhost:55552/testpkg/-/blahblah')
assert.deepEqual(body['dist-tags'], {latest: '0.0.1'})
cb()
})
})
})
})
})
it('uploading new package version for bad pkg', function(cb) {
server.put_version('testpxg', '0.0.1', require('./lib/package')('testpxg'), function(res, body) {
assert.equal(res.statusCode, 404)
assert(~body.error.indexOf('no such package'))
cb()
})
})
it('doubleerr test', function(cb) {
server.put_tarball('testfwd2', 'blahblah', readfile('fixtures/binary'), function(res, body) {
assert.equal(res.statusCode, 404)
assert(body.error)
cb()
})
})
it('publishing package / bad ro uplink', function(cb) {
server.put_package('baduplink', require('./lib/package')('baduplink'), function(res, body) {
assert.equal(res.statusCode, 503)
assert(~body.error.indexOf('one of the uplinks is down, refuse to publish'))
cb()
})
})
}

View File

@@ -5,25 +5,43 @@ users:
password: a94a8fe5ccb19ba61c4c0873d391e987982fbbd3
uplinks:
express:
url: http://localhost:55550/
timeout: 100ms
server2:
url: http://localhost:55552/
baduplink:
url: http://localhost:55666/
logs:
- {type: stdout, format: pretty, level: trace}
packages:
'testfwd':
'testfwd*':
allow_access: all
allow_publish: all
proxy_access: server2
proxy_publish: server2
'testloop':
allow_access: all
allow_publish: all
proxy_access: server2
proxy_publish: server2
'testexp*':
allow_access: all
allow_publish: all
proxy_access: express
'baduplink':
allow_access: all
allow_publish: all
proxy_access: baduplink
'*':
allow_access: test anonymous
allow_publish: test anonymous
allow_access: test undefined
allow_publish: test undefined
# this should not matter
testpkg:

View File

@@ -7,19 +7,23 @@ users:
uplinks:
server1:
url: http://localhost:55551/
maxage: 0
logs:
- {type: stdout, format: pretty, level: trace}
packages:
'testfwd':
allow_access: all
allow_publish: all
'testloop':
allow_access: all
allow_publish: all
proxy_access: server1
proxy_publish: server1
testpkg:
'testpkg*':
allow_access: test anonymous
allow_publish: test anonymous
proxy_access: server1

View File

@@ -0,0 +1,29 @@
{ _id: '__NAME__',
name: '__NAME__',
description: '',
'dist-tags': { latest: '__VERSION__' },
versions:
{ '__VERSION__':
{ name: '__NAME__',
version: '__VERSION__',
description: '',
main: 'index.js',
scripts: { test: 'echo "Error: no test specified" && exit 1' },
author: '',
license: 'ISC',
readme: 'ERROR: No README data found!',
_id: '__NAME__@__VERSION__',
dist:
{ shasum: '071c8dd9fd775bf3ebc0d5108431110f5f857ce3',
tarball: 'http://localhost:4873/__NAME__/-/__NAME__-__VERSION__.tgz' },
_from: '.',
_npmVersion: '1.3.21',
_npmUser: { name: 'rlidwka', email: 'alex@kocharin.ru' },
maintainers: [ { name: 'rlidwka', email: 'alex@kocharin.ru' } ] } },
readme: 'ERROR: No README data found!',
maintainers: [ { name: 'rlidwka', email: 'alex@kocharin.ru' } ],
_attachments:
{ '__NAME__-__VERSION__.tgz':
{ content_type: 'application/octet-stream',
data: 'H4sIAAAAAAAAA+2SP2vDMBDFPftTHDdkah3Zim3IGjJ0zppFyNdE+SMJSSmBkO8eWS6mQ7cGSsC/5cG9p7uTkBXyKHY0t4MWB2909mQYY81iAVHLtmY/NcGrCrKyquq25Q1vm4yVnPEm+s9e5DcuPggXV/lrn+EuMOqLcMsBUIsz4RIwkA/v9rjDt1iN4Bc5r4zuPVawok4GduSlUzZ8O2P6LFQqKN3RNf6kIT1kfTRuKZem9DGSewNbXDtn3BK0gd4Ab0mqT0XdFmE2A7qqACXGk/fUTVzC3rhxLJ6UJO3T9h+bFeb3/L9fdGJiYuI1eACk8AYWAAgAAA==',
length: 250 } } }

View File

@@ -0,0 +1,50 @@
{
"name": "testexp_tags",
"versions": {
"0.1.0": {
"name": "testexp_tags",
"version": "0.1.0",
"dist": {
"shasum": "fake",
"tarball": "http://localhost:55551/testexp_tags/-/blahblah"
}
},
"0.1.1alpha": {
"name": "testexp_tags",
"version": "0.1.1alpha",
"dist": {
"shasum": "fake",
"tarball": "http://localhost:55551/testexp_tags/-/blahblah"
}
},
"0.1.2": {
"name": "testexp_tags",
"version": "0.1.2",
"dist": {
"shasum": "fake",
"tarball": "http://localhost:55551/testexp_tags/-/blahblah"
}
},
"0.1.3alpha": {
"name": "testexp_tags",
"version": "0.1.3alpha",
"dist": {
"shasum": "fake",
"tarball": "http://localhost:55551/testexp_tags/-/blahblah"
}
},
"1.1": {
"name": "testexp_tags",
"version": "1.1",
"dist": {
"shasum": "fake",
"tarball": "http://localhost:55551/testexp_tags/-/blahblah"
}
}
},
"dist-tags": {
"latest": "5.4.3",
"something": "0.1.1alpha",
"bad": "1.1"
}
}

75
test/functional/gh29.js Normal file
View File

@@ -0,0 +1,75 @@
var assert = require('assert')
, crypto = require('crypto')
, ex = module.exports
function readfile(x) {
return require('fs').readFileSync(__dirname + '/' + x)
}
module.exports = function() {
var server = process.server
var server2 = process.server2
it('downloading non-existent tarball #1 / srv2', function(cb) {
server2.get_tarball('testpkg-gh29', 'blahblah', function(res, body) {
assert.equal(res.statusCode, 404)
assert(~body.error.indexOf('no such package'))
cb()
})
})
describe('pkg-gh29', function() {
before(function(cb) {
server.put_package('testpkg-gh29', require('./lib/package')('testpkg-gh29'), function(res, body) {
assert.equal(res.statusCode, 201)
assert(~body.ok.indexOf('created new package'))
cb()
})
})
it('creating new package / srv1', function(){})
it('downloading non-existent tarball #2 / srv2', function(cb) {
server2.get_tarball('testpkg-gh29', 'blahblah', function(res, body) {
assert.equal(res.statusCode, 404)
assert(~body.error.indexOf('no such file'))
cb()
})
})
describe('tarball', function() {
before(function(cb) {
server.put_tarball('testpkg-gh29', 'blahblah', readfile('fixtures/binary'), function(res, body) {
assert.equal(res.statusCode, 201)
assert(body.ok)
cb()
})
})
it('uploading new tarball / srv1', function(){})
describe('pkg version', function() {
before(function(cb) {
var pkg = require('./lib/package')('testpkg-gh29')
pkg.dist.shasum = crypto.createHash('sha1').update(readfile('fixtures/binary')).digest('hex')
server.put_version('testpkg-gh29', '0.0.1', pkg, function(res, body) {
assert.equal(res.statusCode, 201)
assert(~body.ok.indexOf('published'))
cb()
})
})
it('uploading new package version / srv1', function(){})
it('downloading newly created tarball / srv2', function(cb) {
server2.get_tarball('testpkg-gh29', 'blahblah', function(res, body) {
assert.equal(res.statusCode, 200)
assert.deepEqual(body, readfile('fixtures/binary').toString('utf8'))
cb()
})
})
})
})
})
}

93
test/functional/gzip.js Normal file
View File

@@ -0,0 +1,93 @@
require('./lib/startup')
var assert = require('assert')
, async = require('async')
, crypto = require('crypto')
function readfile(x) {
return require('fs').readFileSync(__dirname + '/' + x)
}
module.exports = function() {
var server = process.server
var express = process.express
describe('testexp_gzip', function() {
before(function() {
express.get('/testexp_gzip', function(req, res) {
var x = eval(
'(' + readfile('fixtures/publish.json5')
.toString('utf8')
.replace(/__NAME__/g, 'testexp_gzip')
.replace(/__VERSION__/g, '0.0.1')
+ ')'
)
// overcoming compress threshold
x.versions['0.0.2'] = x.versions['0.0.1']
x.versions['0.0.3'] = x.versions['0.0.1']
x.versions['0.0.4'] = x.versions['0.0.1']
x.versions['0.0.5'] = x.versions['0.0.1']
x.versions['0.0.6'] = x.versions['0.0.1']
x.versions['0.0.7'] = x.versions['0.0.1']
x.versions['0.0.8'] = x.versions['0.0.1']
x.versions['0.0.9'] = x.versions['0.0.1']
require('zlib').gzip(JSON.stringify(x), function(err, buf) {
assert(!err)
assert.equal(req.headers['accept-encoding'], 'gzip')
res.header('content-encoding', 'gzip')
res.send(buf)
})
})
express.get('/testexp_baddata', function(req, res) {
assert.equal(req.headers['accept-encoding'], 'gzip')
res.header('content-encoding', 'gzip')
res.send(new Buffer([1,2,3,4,5,6,7,7,6,5,4,3,2,1]))
})
})
it('should not fail on bad gzip', function(cb) {
server.get_package('testexp_baddata', function(res, body) {
assert.equal(res.statusCode, 404)
cb()
})
})
it('should understand gzipped data from uplink', function(cb) {
server.get_package('testexp_gzip', function(res, body) {
assert.equal(res.statusCode, 200)
assert.equal(res.headers['content-encoding'], undefined)
assert.equal(body.name, 'testexp_gzip')
assert.equal(Object.keys(body.versions).length, 9)
cb()
})
})
it('should serve gzipped data', function(cb) {
server.request({
uri: '/testexp_gzip',
encoding: null,
headers: {
'Accept-encoding': 'gzip',
},
json: false,
}, function(err, res, body) {
assert.equal(res.statusCode, 200)
assert.equal(res.headers['content-encoding'], 'gzip')
assert.throws(function() {
JSON.parse(body.toString('utf8'))
})
require('zlib').gunzip(body, function(err, buf) {
assert(!err)
body = JSON.parse(buf)
assert.equal(body.name, 'testexp_gzip')
assert.equal(Object.keys(body.versions).length, 9)
cb()
})
})
})
})
}

View File

@@ -0,0 +1,67 @@
var assert = require('assert')
, ex = module.exports
module.exports = function() {
var server = process.server
var express = process.express
describe('Incomplete', function() {
var on_tarball
before(function() {
express.get('/testexp-incomplete', function(_, res) {
res.send({
"name": "testexp-incomplete",
"versions": {
"0.1.0": {
"name": "testexp_tags",
"version": "0.1.0",
"dist": {
"shasum": "fake",
"tarball": "http://localhost:55550/testexp-incomplete/-/content-length.tar.gz"
}
},
"0.1.1": {
"name": "testexp_tags",
"version": "0.1.1",
"dist": {
"shasum": "fake",
"tarball": "http://localhost:55550/testexp-incomplete/-/chunked.tar.gz"
}
}
}
})
})
})
;['content-length', 'chunked'].forEach(function(type) {
it('should not store tarballs / ' + type, function(_cb) {
var called
express.get('/testexp-incomplete/-/'+type+'.tar.gz', function(_, res) {
if (called) return res.socket.destroy()
called = true
if (type !== 'chunked') res.header('content-length', 1e6)
res.write('test test test\n')
setTimeout(function() {
res.socket.write('200\nsss\n')
res.socket.destroy()
cb()
}, 10)
})
server.request({uri:'/testexp-incomplete/-/'+type+'.tar.gz'}, function(err, res, body) {
if (type !== 'chunked') assert.equal(res.headers['content-length'], 1e6)
assert(body.match(/test test test/))
})
function cb() {
server.request({uri:'/testexp-incomplete/-/'+type+'.tar.gz'}, function(err, res, body) {
assert.equal(body.error, 'internal server error')
_cb()
})
}
})
})
})
}

50
test/functional/index.js Normal file
View File

@@ -0,0 +1,50 @@
require('./lib/startup')
var assert = require('assert')
, async = require('async')
, crypto = require('crypto')
, ex = module.exports
function readfile(x) {
return require('fs').readFileSync(__dirname + '/' + x)
}
describe('Func', function() {
var server = process.server
var server2 = process.server2
before(function(cb) {
async.parallel([
function(cb) {
require('./lib/startup').start('./test-storage', './config-1.yaml', cb)
},
function(cb) {
require('./lib/startup').start('./test-storage2', './config-2.yaml', cb)
},
], cb)
})
before(function auth(cb) {
async.map([server, server2], function(server, cb) {
server.auth('test', 'test', function(res, body) {
assert.equal(res.statusCode, 201)
assert.notEqual(body.ok.indexOf('"test"'), -1)
cb()
})
}, cb)
})
it('authenticate', function(){/* test for before() */})
require('./basic')()
require('./gh29')()
require('./tags')()
require('./gzip')()
require('./incomplete')()
require('./mirror')()
require('./race')()
require('./racycrash')()
require('./security')()
require('./addtag')()
})

View File

@@ -1,12 +1,12 @@
module.exports = function(name) {
module.exports = function(name, version) {
return {
"name": name,
"version": "0.0.0",
"version": version || "0.0.0",
"dist": {
"shasum": "fake",
"tarball": "http://localhost:55551/"+escape(name)+"/-/blahblah"
}
};
}
}

View File

@@ -0,0 +1,128 @@
var request = require('request')
, assert = require('assert')
function Server(url) {
if (!(this instanceof Server)) return new Server(url)
this.url = url.replace(/\/$/, '')
this.userAgent = 'node/v0.10.8 linux x64'
this.authstr = 'Basic '+(new Buffer('test:test')).toString('base64')
}
function prep(cb) {
return function(err, res, body) {
if (err) throw err
cb(res, body)
}
}
Server.prototype.request = function(options, cb) {
assert(options.uri)
var headers = options.headers || {}
headers.accept = headers.accept || 'application/json'
headers['user-agent'] = headers['user-agent'] || this.userAgent
headers.authorization = headers.authorization || this.authstr
return request({
url: this.url + options.uri,
method: options.method || 'GET',
headers: headers,
encoding: options.encoding,
json: options.json != null ? options.json : true,
}, cb)
}
Server.prototype.auth = function(user, pass, cb) {
this.authstr = 'Basic '+(new Buffer(user+':'+pass)).toString('base64')
this.request({
uri: '/-/user/org.couchdb.user:'+encodeURIComponent(user)+'/-rev/undefined',
method: 'PUT',
json: {
content: "doesn't matter, 'cause sinopia uses info from Authorization header anywayz",
}
}, prep(cb))
}
Server.prototype.get_package = function(name, cb) {
this.request({
uri: '/'+name,
method: 'GET',
}, prep(cb))
}
Server.prototype.put_package = function(name, data, cb) {
if (typeof(data) === 'object' && !Buffer.isBuffer(data)) data = JSON.stringify(data)
this.request({
uri: '/'+encodeURIComponent(name),
method: 'PUT',
headers: {
'content-type': 'application/json'
},
}, prep(cb)).end(data)
}
Server.prototype.put_version = function(name, version, data, cb) {
if (typeof(data) === 'object' && !Buffer.isBuffer(data)) data = JSON.stringify(data)
this.request({
uri: '/'+encodeURIComponent(name)+'/'+encodeURIComponent(version)+'/-tag/latest',
method: 'PUT',
headers: {
'content-type': 'application/json'
},
}, prep(cb)).end(data)
}
Server.prototype.get_tarball = function(name, filename, cb) {
this.request({
uri: '/'+encodeURIComponent(name)+'/-/'+encodeURIComponent(filename),
method: 'GET',
}, prep(cb))
}
Server.prototype.put_tarball = function(name, filename, data, cb) {
this.request({
uri: '/'+encodeURIComponent(name)+'/-/'+encodeURIComponent(filename)+'/whatever',
method: 'PUT',
headers: {
'content-type': 'application/octet-stream'
},
}, prep(cb)).end(data)
}
Server.prototype.add_tag = function(name, tag, version, cb) {
this.request({
uri: '/'+encodeURIComponent(name)+'/'+encodeURIComponent(tag),
method: 'PUT',
headers: {
'content-type': 'application/json'
},
}, prep(cb)).end(JSON.stringify(version))
}
Server.prototype.put_tarball_incomplete = function(name, filename, data, size, cb) {
var req = this.request({
uri: '/'+encodeURIComponent(name)+'/-/'+encodeURIComponent(filename)+'/whatever',
method: 'PUT',
headers: {
'content-type': 'application/octet-stream',
'content-length': size,
},
timeout: 1000,
}, function(err) {
assert(err)
cb()
})
req.write(data)
setTimeout(function() {
req.req.abort()
}, 20)
}
Server.prototype.add_package = function(name, cb) {
this.put_package(name, require('./package')(name), function(res, body) {
assert.equal(res.statusCode, 201)
assert(~body.ok.indexOf('created new package'))
cb()
})
}
module.exports = Server

View File

@@ -0,0 +1,33 @@
var rimraf = require('rimraf')
, fork = require('child_process').fork
, assert = require('assert')
, express = require('express')
, readfile = require('fs').readFileSync
, Server = require('./server')
var forks = process.forks = []
process.server = new Server('http://localhost:55551/')
process.server2 = new Server('http://localhost:55552/')
process.express = express()
process.express.listen(55550)
module.exports.start = function start(dir, conf, cb) {
rimraf(__dirname + '/../' + dir, function() {
var f = fork(__dirname + '/../../../bin/sinopia'
, ['-c', __dirname + '/../' + conf]
, {silent: true}
)
forks.push(f)
f.on('message', function(msg) {
if ('sinopia_started' in msg) {
cb()
}
})
})
}
process.on('exit', function() {
if (forks[0]) forks[0].kill()
if (forks[1]) forks[1].kill()
})

75
test/functional/mirror.js Normal file
View File

@@ -0,0 +1,75 @@
var assert = require('assert')
, ex = module.exports
function readfile(x) {
return require('fs').readFileSync(__dirname + '/' + x)
}
module.exports = function() {
var server = process.server
var server2 = process.server2
it('testing anti-loop', function(cb) {
server2.get_package('testloop', function(res, body) {
assert.equal(res.statusCode, 404)
assert(~body.error.indexOf('no such package'))
cb()
})
})
;['fwd', /*'loop'*/].forEach(function(pkg) {
var prefix = pkg + ': '
pkg = 'test' + pkg
describe(pkg, function() {
before(function(cb) {
server.put_package(pkg, require('./lib/package')(pkg), function(res, body) {
assert.equal(res.statusCode, 201)
assert(~body.ok.indexOf('created new package'))
cb()
})
})
it(prefix+'creating new package', function(){})
describe(pkg, function() {
before(function(cb) {
server.put_version(pkg, '0.1.1', require('./lib/package')(pkg), function(res, body) {
assert.equal(res.statusCode, 201)
assert(~body.ok.indexOf('published'))
cb()
})
})
it(prefix+'uploading new package version', function(){})
it(prefix+'uploading incomplete tarball', function(cb) {
server.put_tarball_incomplete(pkg, pkg+'.bad', readfile('fixtures/binary'), 3000, function(res, body) {
cb()
})
})
describe('tarball', function() {
before(function(cb) {
server.put_tarball(pkg, pkg+'.file', readfile('fixtures/binary'), function(res, body) {
assert.equal(res.statusCode, 201)
assert(body.ok)
cb()
})
})
it(prefix+'uploading new tarball', function(){})
it(prefix+'downloading tarball from server1', function(cb) {
server.get_tarball(pkg, pkg+'.file', function(res, body) {
assert.equal(res.statusCode, 200)
assert.deepEqual(body, readfile('fixtures/binary').toString('utf8'))
cb()
})
})
})
})
})
})
}

94
test/functional/race.js Normal file
View File

@@ -0,0 +1,94 @@
var assert = require('assert')
, readfile = require('fs').readFileSync
, ex = module.exports
, async = require('async')
, _oksum = 0
module.exports = function() {
var server = process.server
var server2 = process.server2
describe('race', function() {
before(function(cb) {
server.put_package('race', require('./lib/package')('race'), function(res, body) {
assert.equal(res.statusCode, 201)
assert(~body.ok.indexOf('created new package'))
cb()
})
})
it('creating new package', function(){})
it('uploading 10 same versions', function(cb) {
var fns = []
for (var i=0; i<10; i++) {
fns.push(function(cb_) {
var data = require('./lib/package')('race')
data.rand = Math.random()
server.put_version('race', '0.0.1', data, function(res, body) {
cb_(null, res, body)
})
})
}
async.parallel(fns, function(err, res) {
var okcount = 0
, failcount = 0
res.forEach(function(arr) {
var resp = arr[0]
, body = arr[1]
if (resp.statusCode === 201 && ~body.ok.indexOf('published')) okcount++
if (resp.statusCode === 409 && ~body.error.indexOf('already present')) failcount++
if (resp.statusCode === 503 && ~body.error.indexOf('unavailable')) failcount++
})
assert.equal(okcount + failcount, 10)
assert.equal(okcount, 1)
_oksum += okcount
cb()
})
})
it('uploading 10 diff versions', function(cb) {
var fns = []
for (var i=0; i<10; i++) {
;(function(i) {
fns.push(function(cb_) {
server.put_version('race', '0.1.'+String(i), require('./lib/package')('race'), function(res, body) {
cb_(null, res, body)
})
})
})(i)
}
async.parallel(fns, function(err, res) {
var okcount = 0
, failcount = 0
res.forEach(function(arr) {
var resp = arr[0]
, body = arr[1]
if (resp.statusCode === 201 && ~body.ok.indexOf('published')) okcount++
if (resp.statusCode === 409 && ~body.error.indexOf('already present')) failcount++
if (resp.statusCode === 503 && ~body.error.indexOf('unavailable')) failcount++
})
assert.equal(okcount + failcount, 10)
_oksum += okcount
cb()
})
})
// XXX: this should be after anything else, but we can't really ensure that with mocha
it('downloading package', function(cb) {
server.get_package('race', function(res, body) {
assert.equal(res.statusCode, 200)
assert.equal(Object.keys(body.versions).length, _oksum)
cb()
})
})
})
}

View File

@@ -0,0 +1,69 @@
var assert = require('assert')
, ex = module.exports
module.exports = function() {
var server = process.server
var express = process.express
describe('Racy', function() {
var on_tarball
before(function() {
express.get('/testexp-racycrash', function(_, res) {
res.send({
"name": "testexp-racycrash",
"versions": {
"0.1.0": {
"name": "testexp_tags",
"version": "0.1.0",
"dist": {
"shasum": "fake",
"tarball": "http://localhost:55550/testexp-racycrash/-/test.tar.gz"
}
}
}
})
})
express.get('/testexp-racycrash/-/test.tar.gz', function(_, res) {
on_tarball(res)
})
})
it('should not crash on error if client disconnects', function(_cb) {
on_tarball = function(res) {
res.header('content-length', 1e6)
res.write('test test test\n')
setTimeout(function() {
res.write('test test test\n')
res.socket.destroy()
cb()
}, 200)
}
server.request({uri:'/testexp-racycrash/-/test.tar.gz'}, function(err, res, body) {
assert.equal(body, 'test test test\n')
})
function cb() {
// test for NOT crashing
server.request({uri:'/testexp-racycrash'}, function(err, res, body) {
assert.equal(res.statusCode, 200)
_cb()
})
}
})
it('should not store tarball', function(cb) {
on_tarball = function(res) {
res.socket.destroy()
}
server.request({uri:'/testexp-racycrash/-/test.tar.gz'}, function(err, res, body) {
assert.equal(body.error, 'internal server error')
cb()
})
})
})
}

View File

@@ -0,0 +1,88 @@
var assert = require('assert')
, ex = module.exports
module.exports = function() {
var server = process.server
var server2 = process.server2
describe('Security', function() {
before(server.add_package.bind(server, 'testpkg-sec'))
it('bad pkg #1', function(cb) {
server.get_package('package.json', function(res, body) {
assert.equal(res.statusCode, 403)
assert(~body.error.indexOf('invalid package'))
cb()
})
})
it('bad pkg #2', function(cb) {
server.get_package('__proto__', function(res, body) {
assert.equal(res.statusCode, 403)
assert(~body.error.indexOf('invalid package'))
cb()
})
})
it('__proto__, connect stuff', function(cb) {
server.request({uri:'/testpkg-sec?__proto__=1'}, function(err, res, body) {
// test for NOT outputting stack trace
assert(!body || typeof(body) === 'object' || body.indexOf('node_modules') === -1)
// test for NOT crashing
server.request({uri:'/testpkg-sec'}, function(err, res, body) {
assert.equal(res.statusCode, 200)
cb()
})
})
})
it('do not return package.json as an attachment', function(cb) {
server.request({uri:'/testpkg-sec/-/package.json'}, function(err, res, body) {
assert.equal(res.statusCode, 403)
assert(body.error.match(/invalid filename/))
cb()
})
})
it('silly things - reading #1', function(cb) {
server.request({uri:'/testpkg-sec/-/../../../../../../../../etc/passwd'}, function(err, res, body) {
assert.equal(res.statusCode, 404)
cb()
})
})
it('silly things - reading #2', function(cb) {
server.request({uri:'/testpkg-sec/-/%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2fetc%2fpasswd'}, function(err, res, body) {
assert.equal(res.statusCode, 403)
assert(body.error.match(/invalid filename/))
cb()
})
})
it('silly things - writing #1', function(cb) {
server.put_tarball('testpkg-sec', 'package.json', '{}', function(res, body) {
assert.equal(res.statusCode, 403)
assert(body.error.match(/invalid filename/))
cb()
})
})
it('silly things - writing #3', function(cb) {
server.put_tarball('testpkg-sec', 'node_modules', '{}', function(res, body) {
assert.equal(res.statusCode, 403)
assert(body.error.match(/invalid filename/))
cb()
})
})
it('silly things - writing #4', function(cb) {
server.put_tarball('testpkg-sec', '../testpkg.tgz', '{}', function(res, body) {
assert.equal(res.statusCode, 403)
assert(body.error.match(/invalid filename/))
cb()
})
})
})
}

51
test/functional/tags.js Normal file
View File

@@ -0,0 +1,51 @@
var assert = require('assert')
, ex = module.exports
function readfile(x) {
return require('fs').readFileSync(__dirname + '/' + x)
}
module.exports = function() {
var server = process.server
var express = process.express
it('tags - testing for 404', function(cb) {
server.get_package('testexp_tags', function(res, body) {
// shouldn't exist yet
assert.equal(res.statusCode, 404)
assert(~body.error.indexOf('no such package'))
cb()
})
})
describe('tags', function() {
before(function(cb) {
express.get('/testexp_tags', function(req, res) {
res.send(JSON.parse(readfile('fixtures/tags.json')))
})
cb()
})
it('fetching package again', function(cb) {
server.get_package('testexp_tags', function(res, body) {
assert.equal(res.statusCode, 200)
assert.equal(typeof(body.versions['1.1']), 'object')
assert.equal(body['dist-tags'].something, '0.1.1alpha')
// note: 5.4.3 is invalid tag, 0.1.3alpha is highest semver
assert.equal(body['dist-tags'].latest, '5.4.3')
assert.equal(body['dist-tags'].bad, null)
cb()
})
})
;['0.1.1alpha', '0.1.1-alpha', '0000.00001.001-alpha'].forEach(function(ver) {
it('fetching '+ver, function(cb) {
server.request({uri:'/testexp_tags/'+ver}, function(err, res, body) {
assert.equal(res.statusCode, 200)
assert.equal(body.version, '0.1.1alpha')
cb()
})
})
})
})
}

View File

@@ -0,0 +1,25 @@
storage: ./.sinopia_test_env/test-storage
users:
test:
password: a94a8fe5ccb19ba61c4c0873d391e987982fbbd3
uplinks:
npmjs:
url: https://registry.npmjs.org/
logs:
- {type: stdout, format: pretty, level: trace}
packages:
jju:
allow_access: all
allow_publish: all
proxy_access: npmjs
'*':
allow_access: all
allow_publish: all
listen: 55501

Binary file not shown.

50
test/integration/test.pl Executable file
View File

@@ -0,0 +1,50 @@
#!/usr/bin/perl
# note to readers: in perl it's useful, in javascript it isn't
use strict;
# setting up working environment && chdir there
use Cwd 'abs_path';
use File::Basename;
$ENV{HOME} = dirname(abs_path( __FILE__ )) . '/.sinopia_test_env';
system('rm -rf .sinopia_test_env ; mkdir .sinopia_test_env') and quit('fail');
chdir $ENV{HOME};
use Data::Dumper;
my $pid;
sub quit {
print $_[0]."\n";
exec("kill $pid ; exit 1");
}
# run sinopia in a child process
if (($pid = fork()) == 0) {
exec "../../../bin/sinopia ../config.yaml";
die "exec failed";
}
system('mkdir node_modules') and quit('fail');
system('npm set sinopia_test_config 12345') and quit('fail');
if (`cat .npmrc` !~ /sinopia_test_config/) {
quit "npm is using wrong config";
}
system('npm set registry http://localhost:55501') and quit('fail');
system(q{/bin/echo -e 'test\ntest\ns@s.s\n' | npm adduser}) and quit('fail');
system('npm install jju') and quit('fail');
(`node -e 'console.log(require("jju").parse("{qwerty:123}").qwerty+456)'` =~ /579/) or quit('fail');
system('npm publish ../sinopia-test-1.2.3.tgz') and quit('fail');
system('npm tag sinopia-test@1.2.3 meow') and quit('fail');
system('npm install sinopia-test@meow') and quit('fail');
(`node -e 'require("sinopia-test")'` =~ /w==w/) or quit('fail');
quit("
==================================================================
All tests seem to be executed successfully, nothing is broken yet.
==================================================================");

View File

@@ -1,107 +0,0 @@
var request = require('request');
var assert = require('assert');
function Server(url) {
if (!(this instanceof Server)) return new Server(url);
this.url = url.replace(/\/$/, '');
this.userAgent = 'node/v0.10.8 linux x64';
}
function prep(cb) {
return function(err, res, body) {
if (err) throw err;
cb(res, body);
};
}
Server.prototype.request = function(options, cb) {
var headers = options.headers || {};
headers.accept = headers.accept || 'application/json';
headers['user-agent'] = headers['user-agent'] || this.userAgent;
headers.authorization = headers.authorization || this.auth;
return request({
url: this.url + options.uri,
method: options.method || 'GET',
headers: headers,
json: options.json || true,
}, cb);
}
Server.prototype.auth = function(user, pass, cb) {
this.auth = 'Basic '+(new Buffer(user+':'+pass)).toString('base64');
this.request({
uri: '/-/user/org.couchdb.user:'+escape(user)+'/-rev/undefined',
method: 'PUT',
json: {
content: "doesn't matter, 'cause sinopia uses info from Authorization header anywayz",
}
}, prep(cb));
}
Server.prototype.get_package = function(name, cb) {
this.request({
uri: '/'+name,
method: 'GET',
}, prep(cb));
}
Server.prototype.put_package = function(name, data, cb) {
if (typeof(data) === 'object' && !Buffer.isBuffer(data)) data = JSON.stringify(data);
this.request({
uri: '/'+escape(name),
method: 'PUT',
headers: {
'content-type': 'application/json'
},
}, prep(cb)).end(data);
}
Server.prototype.put_version = function(name, version, data, cb) {
if (typeof(data) === 'object' && !Buffer.isBuffer(data)) data = JSON.stringify(data);
this.request({
uri: '/'+escape(name)+'/'+escape(version)+'/-tag/latest',
method: 'PUT',
headers: {
'content-type': 'application/json'
},
}, prep(cb)).end(data);
}
Server.prototype.get_tarball = function(name, filename, cb) {
this.request({
uri: '/'+escape(name)+'/-/'+escape(filename),
method: 'GET',
}, prep(cb));
}
Server.prototype.put_tarball = function(name, filename, data, cb) {
this.request({
uri: '/'+escape(name)+'/-/'+escape(filename)+'/whatever',
method: 'PUT',
headers: {
'content-type': 'application/octet-stream'
},
}, prep(cb)).end(data);
}
Server.prototype.put_tarball_incomplete = function(name, filename, data, size, cb) {
var req = this.request({
uri: '/'+escape(name)+'/-/'+escape(filename)+'/whatever',
method: 'PUT',
headers: {
'content-type': 'application/octet-stream',
'content-length': size,
},
timeout: 1000,
}, function(err) {
assert(err);
cb();
});
req.write(data);
setTimeout(function() {
req.req.abort();
}, 20);
}
module.exports = Server;

View File

@@ -1,67 +0,0 @@
var assert = require('assert');
var readfile = require('fs').readFileSync;
var ex = module.exports;
var server = process.server;
var server2 = process.server2;
['fwd', 'loop'].forEach(function(pkg) {
var prefix = pkg+': ';
pkg = 'test'+pkg;
ex[prefix+'creating new package'] = function(cb) {
server.put_package(pkg, require('./lib/package')(pkg), function(res, body) {
assert.equal(res.statusCode, 201);
assert(~body.ok.indexOf('created new package'));
cb();
});
};
ex[prefix+'uploading new package version'] = function(cb) {
server.put_version(pkg, '0.1.1', require('./lib/package')(pkg), function(res, body) {
assert.equal(res.statusCode, 201);
assert(~body.ok.indexOf('published'));
cb();
});
};
ex[prefix+'downloading package via server2'] = function(cb) {
server2.get_package(pkg, function(res, body) {
assert.equal(res.statusCode, 200);
assert.equal(body.name, pkg);
assert.equal(body.versions['0.1.1'].name, pkg);
assert.equal(body.versions['0.1.1'].dist.tarball, 'http://localhost:55552/'+pkg+'/-/blahblah');
cb();
});
};
ex[prefix+'uploading incomplete tarball'] = function(cb) {
server.put_tarball_incomplete(pkg, pkg+'.bad', readfile('fixtures/binary'), 3000, function(res, body) {
cb();
});
};
ex[prefix+'uploading new tarball'] = function(cb) {
server.put_tarball(pkg, pkg+'.file', readfile('fixtures/binary'), function(res, body) {
assert.equal(res.statusCode, 201);
assert(body.ok);
cb();
});
};
ex[prefix+'downloading tarball from server1'] = function(cb) {
server.get_tarball(pkg, pkg+'.file', function(res, body) {
assert.equal(res.statusCode, 200);
assert.deepEqual(body, readfile('fixtures/binary').toString('utf8'));
cb();
});
};
ex[prefix+'downloading tarball from server2'] = function(cb) {
server2.get_tarball(pkg, pkg+'.file', function(res, body) {
assert.equal(res.statusCode, 200);
assert.deepEqual(body, readfile('fixtures/binary').toString('utf8'));
cb();
});
};
});

View File

@@ -1,8 +1,10 @@
#!/bin/sh
CWD=$(pwd)
PATH='../node_modules/.bin':$PATH
TESTDIR=$(dirname $0)
cd $TESTDIR
../node_modules/mocha/bin/mocha -R list --ui exports ./tests.js
mocha ./functional ./unit
TESTRES=$?
cd $CWD
exit $TESTRES

View File

@@ -1,39 +0,0 @@
var rimraf = require('rimraf');
var fork = require('child_process').fork;
var assert = require('assert');
var readfile = require('fs').readFileSync;
var ex = module.exports;
var server = process.server;
var server2 = process.server2;
var forks = process.forks;
ex['starting servers'] = function(cb) {
var count = 0;
function start(dir, conf) {
count++;
rimraf(dir, function() {
var f = fork('../bin/sinopia', ['-c', conf], {silent: true});
forks.push(f);
f.on('message', function(msg) {
if ('sinopia_started' in msg) {
if (!--count) cb();
}
});
});
};
start('./test-storage', './config-1.yaml', cb);
start('./test-storage2', './config-2.yaml', cb);
};
ex['authentication to servers'] = function(cb) {
var count = 0;
[server, server2].forEach(function(server) {
count++;
server.auth('test', 'test', function(res, body) {
assert.equal(res.statusCode, 201);
assert.notEqual(body.ok.indexOf('"test"'), -1);
if (!--count) cb();
});
});
};

View File

@@ -1,20 +0,0 @@
var fs = require('fs');
var async = require('async');
var assert = require('assert');
var Server = require('./lib/server');
var readfile = require('fs').readFileSync;
var ex = module.exports;
var forks = process.forks = [];
process.server = new Server('http://localhost:55551/');
process.server2 = new Server('http://localhost:55552/');
ex['Startup:'] = require('./startup');
ex['Basic:'] = require('./basic');
ex['Mirror:'] = require('./mirror');
process.on('exit', function() {
if (forks[0]) forks[0].kill();
if (forks[1]) forks[1].kill();
});

17
test/unit/mystreams.js Normal file
View File

@@ -0,0 +1,17 @@
var ReadTarball = require('../../lib/streams').ReadTarballStream
describe('mystreams', function() {
it('should delay events', function(cb) {
var test = new ReadTarball()
test.abort()
setTimeout(function() {
test.abort = function() {
cb()
}
test.abort = function() {
throw new Error('fail')
}
}, 10)
})
})

88
test/unit/no_proxy.js Normal file
View File

@@ -0,0 +1,88 @@
var assert = require('assert')
, Storage = require('../../lib/up-storage')
require('../../lib/logger').setup([])
function setup(host, config, mainconfig) {
config.url = host
return new Storage(config, mainconfig)
}
describe('Use proxy', function() {
it('should work fine without proxy', function() {
var x = setup('http://x/x', {}, {})
assert.equal(x.proxy, null)
})
it('local config should take priority', function() {
var x = setup('http://x/x', {http_proxy: '123'}, {http_proxy: '456'})
assert.equal(x.proxy, '123')
})
it('no_proxy is invalid', function() {
var x = setup('http://x/x', {http_proxy: '123', no_proxy: false}, {})
assert.equal(x.proxy, '123')
var x = setup('http://x/x', {http_proxy: '123', no_proxy: null}, {})
assert.equal(x.proxy, '123')
var x = setup('http://x/x', {http_proxy: '123', no_proxy: []}, {})
assert.equal(x.proxy, '123')
var x = setup('http://x/x', {http_proxy: '123', no_proxy: ''}, {})
assert.equal(x.proxy, '123')
})
it('no_proxy - simple/include', function() {
var x = setup('http://localhost', {http_proxy: '123'}, {no_proxy: 'localhost'})
assert.equal(x.proxy, undefined)
})
it('no_proxy - simple/not', function() {
var x = setup('http://localhost', {http_proxy: '123'}, {no_proxy: 'blah'})
assert.equal(x.proxy, '123')
})
it('no_proxy - various, single string', function() {
var x = setup('http://blahblah', {http_proxy: '123'}, {no_proxy: 'blah'})
assert.equal(x.proxy, '123')
var x = setup('http://blah.blah', {}, {http_proxy: '123', no_proxy: 'blah'})
assert.equal(x.proxy, null)
var x = setup('http://blahblah', {}, {http_proxy: '123', no_proxy: '.blah'})
assert.equal(x.proxy, '123')
var x = setup('http://blah.blah', {http_proxy: '123', no_proxy: '.blah'}, {})
assert.equal(x.proxy, null)
var x = setup('http://blah', {http_proxy: '123', no_proxy: '.blah'}, {})
assert.equal(x.proxy, null)
var x = setup('http://blahh', {http_proxy: '123', no_proxy: 'blah'}, {})
assert.equal(x.proxy, '123')
})
it('no_proxy - various, array', function() {
var x = setup('http://blahblah', {http_proxy: '123'}, {no_proxy: 'foo,bar,blah'})
assert.equal(x.proxy, '123')
var x = setup('http://blah.blah', {http_proxy: '123'}, {no_proxy: 'foo,bar,blah'})
assert.equal(x.proxy, null)
var x = setup('http://blah.foo', {http_proxy: '123'}, {no_proxy: 'foo,bar,blah'})
assert.equal(x.proxy, null)
var x = setup('http://foo.baz', {http_proxy: '123'}, {no_proxy: 'foo,bar,blah'})
assert.equal(x.proxy, '123')
var x = setup('http://blahblah', {http_proxy: '123'}, {no_proxy: ['foo','bar','blah']})
assert.equal(x.proxy, '123')
var x = setup('http://blah.blah', {http_proxy: '123'}, {no_proxy: ['foo','bar','blah']})
assert.equal(x.proxy, null)
})
it('no_proxy - hostport', function() {
var x = setup('http://localhost:80', {http_proxy: '123'}, {no_proxy: 'localhost'})
assert.equal(x.proxy, null)
var x = setup('http://localhost:8080', {http_proxy: '123'}, {no_proxy: 'localhost'})
assert.equal(x.proxy, null)
})
it('no_proxy - secure', function() {
var x = setup('https://something', {http_proxy: '123'}, {})
assert.equal(x.proxy, null)
var x = setup('https://something', {https_proxy: '123'}, {})
assert.equal(x.proxy, '123')
var x = setup('https://something', {http_proxy: '456', https_proxy: '123'}, {})
assert.equal(x.proxy, '123')
})
})

View File

@@ -0,0 +1,34 @@
var assert = require('assert')
, parse_interval = require('../../lib/config').parse_interval
describe('Parse interval', function() {
function add_test(str, res) {
it('parse ' + str, function() {
if (res === null) {
assert.throws(function() {
console.log(parse_interval(str))
})
} else {
assert.strictEqual(parse_interval(str), res)
}
})
}
add_test(12345, 12345000)
add_test('1000', 1000000)
add_test('1.5s', 1500)
add_test('25ms', 25)
add_test('2m', 2*1000*60)
add_test('3h', 3*1000*60*60)
add_test('0.5d', 0.5*1000*60*60*24)
add_test('0.5w', 0.5*1000*60*60*24*7)
add_test('1M', 1000*60*60*24*30)
add_test('5s 20ms', 5020)
add_test('1y', 1000*60*60*24*365)
add_test('1y 5', null)
add_test('1m 1m', null)
add_test('1m 1y', null)
add_test('1y 1M 1w 1d 1h 1m 1s 1ms', 34822861001)
add_test(' 5s 25ms ', 5025)
})

53
test/unit/st_merge.js Normal file
View File

@@ -0,0 +1,53 @@
var assert = require('assert')
, semver_sort = require('../../lib/utils').semver_sort
, merge = require('../../lib/storage')._merge_versions
require('../../lib/logger').setup([])
describe('Merge', function() {
it('simple', function() {
var x = {
versions: {a:1,b:1,c:1},
'dist-tags': {},
}
merge(x, {versions: {a:2,q:2}})
assert.deepEqual(x, {
versions: {a:1,b:1,c:1,q:2},
'dist-tags': {},
})
})
it('dist-tags - compat', function() {
var x = {
versions: {},
'dist-tags': {q:'1.1.1',w:['2.2.2']},
}
merge(x, {'dist-tags':{q:'2.2.2',w:'3.3.3',t:'4.4.4'}})
assert.deepEqual(x, {
versions: {},
'dist-tags': {q:['1.1.1','2.2.2'],w:['2.2.2','3.3.3'],t:['4.4.4']},
})
})
it('dist-tags - sort', function() {
var x = {
versions: {},
'dist-tags': {w:['2.2.2','1.1.1','12.2.2','2.2.2-rc2']},
}
merge(x, {'dist-tags':{w:'3.3.3'}})
assert.deepEqual(x, {
versions: {},
'dist-tags': {w:["1.1.1","2.2.2-rc2","2.2.2","3.3.3","12.2.2"]},
})
})
it('semver_sort', function() {
assert.deepEqual(semver_sort(['1.2.3','1.2','1.2.3a','1.2.3c','1.2.3-b']),
[ '1.2.3a',
'1.2.3-b',
'1.2.3c',
'1.2.3' ]
)
})
})

42
test/unit/utils.js Normal file
View File

@@ -0,0 +1,42 @@
var assert = require('assert')
, validate = require('../../lib/utils').validate_name
describe('Validate', function() {
it('good ones', function() {
assert(validate('sinopia'))
assert(validate('some.weird.package-zzz'))
})
it('uppercase', function() {
assert(validate('EVE'))
assert(validate('JSONStream'))
})
it('no package.json', function() {
assert(!validate('package.json'))
})
it('no path seps', function() {
assert(!validate('some/thing'))
assert(!validate('some\\thing'))
})
it('no hidden', function() {
assert(!validate('.bin'))
})
it('no reserved', function() {
assert(!validate('favicon.ico'))
assert(!validate('node_modules'))
assert(!validate('__proto__'))
})
it('other', function() {
assert(!validate('pkg@'))
assert(!validate('pk g'))
assert(!validate('pk\tg'))
assert(!validate('pk%20g'))
assert(!validate('pk+g'))
assert(!validate('pk:g'))
})
})

33
test/unit/validate_all.js Normal file
View File

@@ -0,0 +1,33 @@
// ensure that all arguments are validated
var assert = require('assert')
describe('index.js app', function() {
var source = require('fs').readFileSync(__dirname + '/../../lib/index.js', 'utf8')
var very_scary_regexp = /\n\s*app\.(\w+)\s*\(\s*(("[^"]*")|('[^']*'))\s*,/g
var m
var params = {}
while ((m = very_scary_regexp.exec(source)) != null) {
if (m[1] === 'set') continue
var inner = m[2].slice(1, m[2].length-1)
var t
inner.split('/').forEach(function(x) {
if (m[1] === 'param') {
params[x] = 'ok'
} else if (t = x.match(/^:([^?:]*)\??$/)) {
params[t[1]] = params[t[1]] || m[0].trim()
}
})
}
Object.keys(params).forEach(function(param) {
it('should validate ":'+param+'"', function() {
assert.equal(params[param], 'ok')
})
})
})