var assert = require('assert') var Crypto = require('crypto') var fs = require('fs') var Error = require('http-errors') var Path = require('path') var URL = require('url') var fs_storage = require('./local-fs') var Logger = require('./logger') var Search = require('./search') var MyStreams = require('./streams') var Utils = require('./utils') var info_file = 'package.json' // // Implements Storage interface // (same for storage.js, local-storage.js, up-storage.js) // function Storage(config) { var self = Object.create(Storage.prototype) self.config = config self.logger = Logger.logger.child({ sub: 'fs' }) return self } // returns the minimal package file function get_boilerplate(name) { return { // standard things name: name, versions: {}, 'dist-tags': {}, // our own object '_distfiles': {}, '_attachments': {}, '_uplinks': {}, } } Storage.prototype._internal_error = function(err, file, message) { this.logger.error( { err: err, file: file } , message + ' @{file}: @{!err.message}' ) return Error[500]() } Storage.prototype.add_package = function(name, info, callback) { var self = this var storage = this.storage(name) if (!storage) return callback( Error[404]('this package cannot be added') ) storage.create_json(info_file, get_boilerplate(name), function(err) { if (err && err.code === 'EEXISTS') { return callback( Error[409]('this package is already present') ) } var latest = info['dist-tags'].latest if (latest && info.versions[latest]) { Search.add(info.versions[latest]) } callback() }) } Storage.prototype.remove_package = function(name, callback) { var self = this self.logger.info( { name: name } , 'unpublishing @{name} (all)') var storage = self.storage(name) if (!storage) return callback( Error[404]('no such package available') ) storage.read_json(info_file, function(err, data) { if (err) { if (err.code === 'ENOENT') { return callback( Error[404]('no such package available') ) } else { return callback(err) } } self._normalize_package(data) storage.unlink(info_file, function(err) { if (err) return callback(err) var files = Object.keys(data._attachments) function unlinkNext(cb) { if (files.length === 0) return cb() var file = files.shift() storage.unlink(file, function() { unlinkNext(cb) }) } unlinkNext(function() { // try to unlink the directory, but ignore errors because it can fail storage.rmdir('.', function(err) { callback(err) }) }) }) }) Search.remove(name) this.config.localList.remove(name) } Storage.prototype._read_create_package = function(name, callback) { var self = this var storage = self.storage(name) if (!storage) { var data = get_boilerplate(name) self._normalize_package(data) return callback(null, data) } storage.read_json(info_file, function(err, data) { // TODO: race condition if (err) { if (err.code === 'ENOENT') { // if package doesn't exist, we create it here data = get_boilerplate(name) } else { return callback(self._internal_error(err, info_file, 'error reading')) } } self._normalize_package(data) callback(null, data) }) } // synchronize remote package info with the local one // TODO: readfile called twice Storage.prototype.update_versions = function(name, newdata, callback) { var self = this self._read_create_package(name, function(err, data) { if (err) return callback(err) var change = false for (var ver in newdata.versions) { if (data.versions[ver] == null) { var verdata = newdata.versions[ver] // we don't keep readmes for package versions, // only one readme per package delete verdata.readme change = true data.versions[ver] = verdata if (verdata.dist && verdata.dist.tarball) { var filename = URL.parse(verdata.dist.tarball).pathname.replace(/^.*\//, '') // we do NOT overwrite any existing records if (data._distfiles[filename] == null) { var hash = data._distfiles[filename] = { url: verdata.dist.tarball, sha: verdata.dist.shasum, } if (verdata._sinopia_uplink) { // if we got this information from a known registry, // use the same protocol for the tarball // // see https://github.com/rlidwka/sinopia/issues/166 var tarball_url = URL.parse(hash.url) var uplink_url = URL.parse(self.config.uplinks[verdata._sinopia_uplink].url) if (uplink_url.host === tarball_url.host) { tarball_url.protocol = uplink_url.protocol hash.registry = verdata._sinopia_uplink hash.url = URL.format(tarball_url) } } } } } } for (var tag in newdata['dist-tags']) { if (!Array.isArray(data['dist-tags'][tag]) || data['dist-tags'][tag].length != newdata['dist-tags'][tag].length) { // backward compat var need_change = true } else { for (var i=0; i startkey && Utils.validate_name(file)) { list.push({ time: stats.mtime, name: file }) } if (++i !== filesL) { return false } return callback(null, list) }) }) }) } // // This function allows to update the package thread-safely // // Arguments: // - name - package name // - updateFn - function(package, cb) - update function // - callback - callback that gets invoked after it's all updated // // Algorithm: // 1. lock package.json for writing // 2. read package.json // 3. updateFn(pkg, cb), and wait for cb // 4. write package.json.tmp // 5. move package.json.tmp package.json // 6. callback(err?) // Storage.prototype.update_package = function(name, updateFn, _callback) { var self = this var storage = self.storage(name) if (!storage) return _callback( Error[404]('no such package available') ) storage.lock_and_read_json(info_file, function(err, fd, json) { function callback() { var _args = arguments if (fd) { fs.close(fd, function(err) { if (err) return _callback(err) _callback.apply(null, _args) }) } else { _callback.apply(null, _args) } } if (err) { if (err.code === 'EAGAIN') { return callback( Error[503]('resource temporarily unavailable') ) } else if (err.code === 'ENOENT') { return callback( Error[404]('no such package available') ) } else { return callback(err) } } self._normalize_package(json) updateFn(json, function(err) { if (err) return callback(err) self._write_package(name, json, callback) }) }) } Storage.prototype._normalize_package = function(pkg) { ;['versions', 'dist-tags', '_distfiles', '_attachments', '_uplinks'].forEach(function(key) { if (!Utils.is_object(pkg[key])) pkg[key] = {} }) if (typeof(pkg._rev) !== 'string') pkg._rev = '0-0000000000000000' } Storage.prototype._write_package = function(name, json, callback) { // calculate revision a la couchdb if (typeof(json._rev) !== 'string') json._rev = '0-0000000000000000' var rev = json._rev.split('-') json._rev = ((+rev[0] || 0) + 1) + '-' + Crypto.pseudoRandomBytes(8).toString('hex') var storage = this.storage(name) if (!storage) return callback() storage.write_json(info_file, json, callback) } Storage.prototype.storage = function(package) { var path = this.config.get_package_setting(package, 'storage') if (path == null) path = this.config.storage if (path == null || path === false) { this.logger.debug( { name: package } , 'this package has no storage defined: @{name}' ) return null } return Path_Wrapper( Path.join( Path.resolve(Path.dirname(this.config.self_path), path), package ) ) } var Path_Wrapper = (function() { // a wrapper adding paths to fs_storage methods function Wrapper(path) { var self = Object.create(Wrapper.prototype) self.path = path return self } for (var i in fs_storage) { if (fs_storage.hasOwnProperty(i)) { Wrapper.prototype[i] = wrapper(i) } } function wrapper(method) { return function(/*...*/) { var args = Array.prototype.slice.apply(arguments) args[0] = Path.join(this.path, args[0] || '') return fs_storage[method].apply(null, args) } } return Wrapper })() module.exports = Storage