1
0
mirror of https://github.com/verdaccio/verdaccio.git synced 2024-11-13 03:35:52 +01:00
verdaccio/lib/local-storage.js

388 lines
9.7 KiB
JavaScript
Raw Normal View History

2013-10-18 22:46:13 +02:00
var fs = require('fs')
, semver = require('semver')
, Path = require('path')
2013-10-22 09:00:04 +02:00
, crypto = require('crypto')
2013-10-18 22:46:13 +02:00
, fs_storage = require('./local-fs')
, UError = require('./error').UserError
, utils = require('./utils')
, mystreams = require('./streams')
, Logger = require('./logger')
, info_file = 'package.json'
2013-06-08 03:16:28 +02:00
2013-09-25 11:12:33 +02:00
//
// Implements Storage interface
// (same for storage.js, local-storage.js, up-storage.js)
//
2013-06-13 16:21:14 +02:00
function Storage(config) {
2013-10-18 22:46:13 +02:00
if (!(this instanceof Storage)) return new Storage(config)
this.config = config
var path = Path.resolve(Path.dirname(this.config.self_path), this.config.storage)
this.storage = new fs_storage(path)
this.logger = Logger.logger.child({sub: 'fs'})
return this
2013-06-13 16:21:14 +02:00
}
2013-06-18 20:14:55 +02:00
// returns the minimal package file
function get_boilerplate(name) {
return {
2013-06-19 18:58:16 +02:00
// standard things
2013-06-18 20:14:55 +02:00
name: name,
versions: {},
'dist-tags': {},
2013-06-19 18:58:16 +02:00
// our own object
'_distfiles': {},
2013-10-22 09:00:04 +02:00
'_attachments': {},
2013-06-18 20:14:55 +02:00
};
}
2013-10-18 23:17:53 +02:00
Storage.prototype._internal_error = function(err, file, msg) {
this.logger.error( {err: err, file: this.storage.path_to(file)}
, msg + ' @{file}: @{!err.message}'
)
return new UError({
status: 500,
msg: 'internal server error'
})
}
2013-06-13 16:21:14 +02:00
Storage.prototype.add_package = function(name, metadata, callback) {
2013-06-20 15:07:34 +02:00
this.storage.create_json(name + '/' + info_file, get_boilerplate(name), function(err) {
2013-06-08 03:16:28 +02:00
if (err && err.code === 'EEXISTS') {
return callback(new UError({
status: 409,
msg: 'this package is already present'
}));
}
callback();
});
}
Storage.prototype.remove_package = function(name, callback) {
var self = this
self.storage.unlink(name + '/' + info_file, function(err) {
if (err && err.code === 'ENOENT') {
return callback(new UError({
status: 404,
msg: 'no such package available',
}))
}
// try to unlink the directory, but ignore errors because it can fail
self.storage.rmdir(name, function(err) {
callback()
})
})
}
2013-06-18 20:14:55 +02:00
Storage.prototype._read_create_package = function(name, callback) {
2013-10-18 23:17:53 +02:00
var self = this
, file = name + '/' + info_file
self.storage.read_json(file, function(err, data) {
2013-06-08 03:16:28 +02:00
// TODO: race condition
2013-06-18 20:14:55 +02:00
if (err) {
if (err.code === 'ENOENT') {
// if package doesn't exist, we create it here
data = get_boilerplate(name);
} else {
2013-10-18 23:17:53 +02:00
return callback(self._internal_error(err, file, 'error reading'))
2013-06-18 20:14:55 +02:00
}
}
2013-10-22 09:00:04 +02:00
self._normalize_package(data)
callback(null, data)
2013-06-18 20:14:55 +02:00
});
}
// synchronize remote package info with the local one
// TODO: readfile called twice
Storage.prototype.update_versions = function(name, newdata, callback) {
var self = this;
self._read_create_package(name, function(err, data) {
if (err) return callback(err);
var change = false;
for (var ver in newdata.versions) {
if (data.versions[ver] == null) {
2013-06-19 18:58:16 +02:00
var verdata = newdata.versions[ver];
// why does anyone need to keep that in database?
delete verdata.readme;
2013-06-18 20:14:55 +02:00
change = true;
2013-06-19 18:58:16 +02:00
data.versions[ver] = verdata;
if (verdata.dist && verdata.dist.tarball) {
var url = utils.parse_tarball_url(
verdata.dist.__sinopia_orig_tarball || verdata.dist.tarball
);
// we do NOT overwrite any existing records
if (url != null && data._distfiles[url.filename] == null) {
data._distfiles[url.filename] = {
2013-06-20 15:41:07 +02:00
url: verdata.dist.__sinopia_orig_tarball || verdata.dist.tarball,
2013-06-19 18:58:16 +02:00
sha: verdata.dist.shasum,
};
}
}
2013-06-18 20:14:55 +02:00
}
}
for (var tag in newdata['dist-tags']) {
// if tag is updated to reference latter version, that's fine
var need_change =
(data['dist-tags'][tag] == null) ||
2013-06-22 02:19:46 +02:00
(!semver.gte(newdata['dist-tags'][tag], data['dist-tags'][tag]));
2013-06-18 20:14:55 +02:00
if (need_change) {
change = true;
data['dist-tags'][tag] = newdata['dist-tags'][tag];
}
}
if (change) {
2013-10-22 09:00:04 +02:00
self._write_package(name, data, callback)
2013-06-18 20:14:55 +02:00
} else {
callback();
}
});
}
Storage.prototype.add_version = function(name, version, metadata, tag, callback) {
var self = this
self.update_package(name, function updater(data, cb) {
2013-06-19 18:58:16 +02:00
// why does anyone need to keep that in database?
delete metadata.readme
2013-06-08 03:16:28 +02:00
if (data.versions[version] != null) {
return cb(new UError({
2013-06-08 03:16:28 +02:00
status: 409,
msg: 'this version already present'
}))
2013-06-08 03:16:28 +02:00
}
2013-10-22 09:45:19 +02:00
// if uploaded tarball has a different shasum, it's very likely that we have some kind of error
if (utils.is_object(metadata.dist) && typeof(metadata.dist.tarball) === 'string') {
var tarball = metadata.dist.tarball.replace(/.*\//, '')
if (utils.is_object(data._attachments[tarball])) {
if (data._attachments[tarball].shasum != null && metadata.dist.shasum != null) {
if (data._attachments[tarball].shasum != metadata.dist.shasum) {
return cb(new UError({
status: 400,
msg: 'shasum error, ' + data._attachments[tarball].shasum + ' != ' + metadata.dist.shasum,
}))
}
}
data._attachments[tarball].version = version
}
}
data.versions[version] = metadata
data['dist-tags'][tag] = version
cb()
}, callback)
2013-06-08 03:16:28 +02:00
}
2013-06-20 15:07:34 +02:00
Storage.prototype.add_tarball = function(name, filename) {
var stream = new mystreams.UploadTarballStream()
, _transform = stream._transform
, length = 0
, shasum = crypto.createHash('sha1')
2013-09-28 14:19:40 +02:00
stream._transform = function(data) {
shasum.update(data)
length += data.length
_transform.apply(stream, arguments)
}
2013-06-20 15:07:34 +02:00
var self = this
2013-09-27 13:31:28 +02:00
if (name === info_file || name === '__proto__') {
2013-06-20 15:07:34 +02:00
stream.emit('error', new UError({
2013-06-08 03:16:28 +02:00
status: 403,
msg: 'can\'t use this filename'
}))
2013-06-08 03:16:28 +02:00
}
2013-06-20 15:07:34 +02:00
var wstream = this.storage.write_stream(name + '/' + filename);
wstream.on('error', function(err) {
if (err.code === 'EEXISTS') {
stream.emit('error', new UError({
status: 409,
msg: 'this tarball is already present'
}));
2013-09-28 14:19:40 +02:00
} else if (err.code === 'ENOENT') {
// check if package exists to throw an appropriate message
self.get_package(name, function(_err, res) {
if (_err) {
stream.emit('error', _err);
} else {
stream.emit('error', err);
}
});
2013-06-20 15:07:34 +02:00
} else {
stream.emit('error', err);
}
2013-06-08 03:16:28 +02:00
});
2013-06-20 15:07:34 +02:00
wstream.on('open', function() {
// re-emitting open because it's handled in storage.js
stream.emit('open');
})
2013-09-27 13:31:28 +02:00
wstream.on('success', function() {
self.update_package(name, function updater(data, cb) {
data._attachments[filename] = {
shasum: shasum.digest('hex'),
}
cb()
}, function(err) {
if (err) {
stream.emit('error', err)
} else {
stream.emit('success')
}
})
})
2013-09-27 13:31:28 +02:00
stream.abort = function() {
wstream.abort()
}
2013-09-27 13:31:28 +02:00
stream.done = function() {
2013-09-28 14:19:40 +02:00
if (!length) {
stream.emit('error', new UError({
status: 422,
msg: 'refusing to accept zero-length file'
}));
wstream.abort();
} else {
wstream.done();
}
2013-09-27 13:31:28 +02:00
};
stream.pipe(wstream);
2013-09-24 08:28:26 +02:00
2013-06-20 15:07:34 +02:00
return stream;
2013-06-08 03:16:28 +02:00
}
2013-06-13 16:21:14 +02:00
Storage.prototype.get_tarball = function(name, filename, callback) {
var stream = new mystreams.ReadTarballStream();
stream.abort = function() {
rstream.close();
};
2013-06-20 15:07:34 +02:00
var rstream = this.storage.read_stream(name + '/' + filename);
rstream.on('error', function(err) {
2013-06-08 03:16:28 +02:00
if (err && err.code === 'ENOENT') {
2013-06-20 15:07:34 +02:00
stream.emit('error', new UError({
2013-06-08 03:16:28 +02:00
status: 404,
2013-06-20 15:07:34 +02:00
msg: 'no such file available',
2013-06-08 03:16:28 +02:00
}));
2013-06-20 15:07:34 +02:00
} else {
stream.emit('error', err);
2013-06-08 03:16:28 +02:00
}
});
2013-06-20 15:07:34 +02:00
rstream.on('open', function() {
// re-emitting open because it's handled in storage.js
2013-06-20 15:07:34 +02:00
stream.emit('open');
rstream.pipe(stream);
});
return stream;
2013-06-08 03:16:28 +02:00
}
2013-06-13 16:21:14 +02:00
Storage.prototype.get_package = function(name, callback) {
2013-10-18 22:46:13 +02:00
var self = this
, file = name + '/' + info_file
self.storage.read_json(file, function(err, result) {
if (err) {
if (err.code === 'ENOENT') {
return callback(new UError({
status: 404,
msg: 'no such package available'
}))
} else {
2013-10-18 23:17:53 +02:00
return callback(self._internal_error(err, file, 'error reading'))
2013-10-18 22:46:13 +02:00
}
2013-06-08 03:16:28 +02:00
}
2013-10-22 09:00:04 +02:00
self._normalize_package(result)
callback(err, result)
2013-10-18 22:46:13 +02:00
})
2013-06-08 03:16:28 +02:00
}
//
// This function allows to update the package thread-safely
//
// Arguments:
// - name - package name
// - updateFn - function(package, cb) - update function
// - callback - callback that gets invoked after it's all updated
//
// Algorithm:
// 1. lock package.json for writing
// 2. read package.json
// 3. updateFn(pkg, cb), and wait for cb
// 4. write package.json.tmp
// 5. move package.json.tmp package.json
// 6. callback(err?)
//
Storage.prototype.update_package = function(name, updateFn, _callback) {
var self = this
, file = name + '/' + info_file
self.storage.lock_and_read_json(file, function(err, fd, json) {
self.logger.debug({file: file}, 'locking @{file}')
function callback() {
self.logger.debug({file: file}, 'unlocking @{file}')
var _args = arguments
if (fd) {
fs.close(fd, function(err) {
if (err) return _callback(err)
_callback.apply(null, _args)
})
} else {
_callback.apply(null, _args)
}
}
if (err) {
if (err.code === 'EAGAIN') {
return callback(new UError({
status: 503,
msg: 'resource temporarily unavailable'
}))
} else if (err.code === 'ENOENT') {
return callback(new UError({
status: 404,
msg: 'no such package available',
}))
} else {
return callback(err)
}
}
2013-10-22 09:00:04 +02:00
self._normalize_package(json)
updateFn(json, function(err) {
if (err) return callback(err)
2013-10-22 09:00:04 +02:00
self._write_package(name, json, callback)
})
})
}
2013-10-22 09:00:04 +02:00
Storage.prototype._normalize_package = function(pkg) {
['versions', 'dist-tags', '_distfiles', '_attachments'].forEach(function(key) {
if (!utils.is_object(pkg[key])) pkg[key] = {}
2013-10-22 09:00:04 +02:00
});
if (typeof(pkg._rev) !== 'string') pkg._rev = '0-0000000000000000'
}
Storage.prototype._write_package = function(name, json, callback) {
// calculate revision a la couchdb
if (typeof(json._rev) !== 'string') json._rev = '0-0000000000000000'
var rev = json._rev.split('-')
json._rev = ((+rev[0] || 0) + 1) + '-' + crypto.pseudoRandomBytes(16).toString('hex')
this.storage.write_json(name + '/' + info_file, json, callback)
}
2013-06-13 16:21:14 +02:00
module.exports = Storage;