mirror of
https://github.com/verdaccio/verdaccio.git
synced 2024-11-13 03:35:52 +01:00
Migrate storages to classes
This commit is contained in:
parent
d79f12d45a
commit
7970e52068
@ -11,10 +11,10 @@ var Storage = require('./storage')
|
||||
module.exports = function(config_hash) {
|
||||
Logger.setup(config_hash.logs)
|
||||
|
||||
var config = Config(config_hash)
|
||||
var storage = Storage(config)
|
||||
var auth = Auth(config)
|
||||
var app = express()
|
||||
var config = Config(config_hash);
|
||||
var storage = new Storage(config);
|
||||
var auth = Auth(config);
|
||||
var app = express();
|
||||
|
||||
// run in production mode by default, just in case
|
||||
// it shouldn't make any difference anyway
|
||||
|
@ -1,30 +1,19 @@
|
||||
"use strict";
|
||||
|
||||
var assert = require('assert')
|
||||
var async = require('async')
|
||||
var Crypto = require('crypto')
|
||||
var fs = require('fs')
|
||||
var Error = require('http-errors')
|
||||
var Path = require('path')
|
||||
var Stream = require('readable-stream')
|
||||
var URL = require('url')
|
||||
var fs_storage = require('./local-fs')
|
||||
var Logger = require('./logger')
|
||||
var Search = require('./search')
|
||||
var MyStreams = require('./streams')
|
||||
var Utils = require('./utils')
|
||||
var info_file = 'package.json'
|
||||
|
||||
//
|
||||
// Implements Storage interface
|
||||
// (same for storage.js, local-storage.js, up-storage.js)
|
||||
//
|
||||
class Storage {
|
||||
constructor(config) {
|
||||
this.config = config
|
||||
this.logger = Logger.logger.child({ sub: 'fs' })
|
||||
}
|
||||
}
|
||||
const assert = require('assert');
|
||||
const async = require('async');
|
||||
const Crypto = require('crypto');
|
||||
const fs = require('fs');
|
||||
const Error = require('http-errors');
|
||||
const Path = require('path');
|
||||
const Stream = require('readable-stream');
|
||||
const URL = require('url');
|
||||
const fs_storage = require('./local-fs');
|
||||
const Logger = require('./logger');
|
||||
const Search = require('./search');
|
||||
const MyStreams = require('./streams');
|
||||
const Utils = require('./utils');
|
||||
const info_file = 'package.json';
|
||||
|
||||
// returns the minimal package file
|
||||
function get_boilerplate(name) {
|
||||
@ -39,15 +28,37 @@ function get_boilerplate(name) {
|
||||
'_attachments': {},
|
||||
'_uplinks': {},
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Storage.prototype._internal_error = function(err, file, message) {
|
||||
//
|
||||
// Implements Storage interface
|
||||
// (same for storage.js, local-storage.js, up-storage.js)
|
||||
//
|
||||
class Storage {
|
||||
constructor(config) {
|
||||
this.config = config
|
||||
this.logger = Logger.logger.child({ sub: 'fs' })
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {*} err
|
||||
* @param {*} file
|
||||
* @param {*} message
|
||||
*/
|
||||
_internal_error(err, file, message) {
|
||||
this.logger.error( { err: err, file: file }
|
||||
, message + ' @{file}: @{!err.message}' )
|
||||
return Error[500]()
|
||||
}
|
||||
}
|
||||
|
||||
Storage.prototype.add_package = function(name, info, callback) {
|
||||
/**
|
||||
*
|
||||
* @param {*} name
|
||||
* @param {*} info
|
||||
* @param {*} callback
|
||||
*/
|
||||
add_package(name, info, callback) {
|
||||
var storage = this.storage(name)
|
||||
if (!storage) return callback( Error[404]('this package cannot be added') )
|
||||
|
||||
@ -62,9 +73,14 @@ Storage.prototype.add_package = function(name, info, callback) {
|
||||
}
|
||||
callback()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
Storage.prototype.remove_package = function(name, callback) {
|
||||
/**
|
||||
*
|
||||
* @param {*} name
|
||||
* @param {*} callback
|
||||
*/
|
||||
remove_package(name, callback) {
|
||||
this.logger.info( { name: name }
|
||||
, 'unpublishing @{name} (all)')
|
||||
|
||||
@ -106,9 +122,14 @@ Storage.prototype.remove_package = function(name, callback) {
|
||||
|
||||
Search.remove(name)
|
||||
this.config.localList.remove(name)
|
||||
}
|
||||
}
|
||||
|
||||
Storage.prototype._read_create_package = function(name, callback) {
|
||||
/**
|
||||
*
|
||||
* @param {*} name
|
||||
* @param {*} callback
|
||||
*/
|
||||
_read_create_package(name, callback) {
|
||||
var storage = this.storage(name)
|
||||
if (!storage) {
|
||||
var data = get_boilerplate(name)
|
||||
@ -128,11 +149,15 @@ Storage.prototype._read_create_package = function(name, callback) {
|
||||
this._normalize_package(data)
|
||||
callback(null, data)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// synchronize remote package info with the local one
|
||||
// TODO: readfile called twice
|
||||
Storage.prototype.update_versions = function(name, newdata, callback) {
|
||||
/**
|
||||
* Synchronize remote package info with the local one
|
||||
* @param {*} name
|
||||
* @param {*} newdata
|
||||
* @param {*} callback
|
||||
*/
|
||||
update_versions(name, newdata, callback) {
|
||||
this._read_create_package(name, (err, data) => {
|
||||
if (err) return callback(err)
|
||||
|
||||
@ -156,7 +181,7 @@ Storage.prototype.update_versions = function(name, newdata, callback) {
|
||||
url: verdata.dist.tarball,
|
||||
sha: verdata.dist.shasum,
|
||||
}
|
||||
|
||||
// if (verdata[Symbol('_verdaccio_uplink')]) {
|
||||
if (verdata._verdaccio_uplink) {
|
||||
// if we got this information from a known registry,
|
||||
// use the same protocol for the tarball
|
||||
@ -204,9 +229,17 @@ Storage.prototype.update_versions = function(name, newdata, callback) {
|
||||
callback(null, data)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
Storage.prototype.add_version = function(name, version, metadata, tag, callback) {
|
||||
/**
|
||||
*
|
||||
* @param {*} name
|
||||
* @param {*} version
|
||||
* @param {*} metadata
|
||||
* @param {*} tag
|
||||
* @param {*} callback
|
||||
*/
|
||||
add_version(name, version, metadata, tag, callback) {
|
||||
this.update_package(name, (data, cb) => {
|
||||
// keep only one readme per package
|
||||
data.readme = metadata.readme
|
||||
@ -227,7 +260,6 @@ Storage.prototype.add_version = function(name, version, metadata, tag, callback)
|
||||
+ ' != ' + metadata.dist.shasum) )
|
||||
}
|
||||
}
|
||||
|
||||
data._attachments[tarball].version = version
|
||||
}
|
||||
}
|
||||
@ -237,9 +269,15 @@ Storage.prototype.add_version = function(name, version, metadata, tag, callback)
|
||||
this.config.localList.add(name)
|
||||
cb()
|
||||
}, callback)
|
||||
}
|
||||
}
|
||||
|
||||
Storage.prototype.merge_tags = function(name, tags, callback) {
|
||||
/**
|
||||
*
|
||||
* @param {*} name
|
||||
* @param {*} tags
|
||||
* @param {*} callback
|
||||
*/
|
||||
merge_tags(name, tags, callback) {
|
||||
this.update_package(name, function updater(data, cb) {
|
||||
for (let t in tags) {
|
||||
if (tags[t] === null) {
|
||||
@ -255,9 +293,15 @@ Storage.prototype.merge_tags = function(name, tags, callback) {
|
||||
}
|
||||
cb()
|
||||
}, callback)
|
||||
}
|
||||
}
|
||||
|
||||
Storage.prototype.replace_tags = function(name, tags, callback) {
|
||||
/**
|
||||
*
|
||||
* @param {*} name
|
||||
* @param {*} tags
|
||||
* @param {*} callback
|
||||
*/
|
||||
replace_tags(name, tags, callback) {
|
||||
this.update_package(name, function updater(data, cb) {
|
||||
data['dist-tags'] = {}
|
||||
|
||||
@ -275,10 +319,16 @@ Storage.prototype.replace_tags = function(name, tags, callback) {
|
||||
}
|
||||
cb()
|
||||
}, callback)
|
||||
}
|
||||
}
|
||||
|
||||
// currently supports unpublishing only
|
||||
Storage.prototype.change_package = function(name, metadata, revision, callback) {
|
||||
/**
|
||||
* Currently supports unpublishing only
|
||||
* @param {*} name
|
||||
* @param {*} metadata
|
||||
* @param {*} revision
|
||||
* @param {*} callback
|
||||
*/
|
||||
change_package(name, metadata, revision, callback) {
|
||||
|
||||
if (!Utils.is_object(metadata.versions) || !Utils.is_object(metadata['dist-tags'])) {
|
||||
return callback( Error[422]('bad data') )
|
||||
@ -304,9 +354,16 @@ Storage.prototype.change_package = function(name, metadata, revision, callback)
|
||||
if (err) return callback(err)
|
||||
callback()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
Storage.prototype.remove_tarball = function(name, filename, revision, callback) {
|
||||
/**
|
||||
*
|
||||
* @param {*} name
|
||||
* @param {*} filename
|
||||
* @param {*} revision
|
||||
* @param {*} callback
|
||||
*/
|
||||
remove_tarball(name, filename, revision, callback) {
|
||||
assert(Utils.validate_name(filename))
|
||||
|
||||
this.update_package(name, (data, cb) => {
|
||||
@ -321,9 +378,14 @@ Storage.prototype.remove_tarball = function(name, filename, revision, callback)
|
||||
var storage = this.storage(name)
|
||||
if (storage) storage.unlink(filename, callback)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
Storage.prototype.add_tarball = function(name, filename) {
|
||||
/**
|
||||
*
|
||||
* @param {*} name
|
||||
* @param {*} filename
|
||||
*/
|
||||
add_tarball(name, filename) {
|
||||
assert(Utils.validate_name(filename))
|
||||
|
||||
var stream = MyStreams.UploadTarballStream()
|
||||
@ -405,9 +467,15 @@ Storage.prototype.add_tarball = function(name, filename) {
|
||||
stream.pipe(wstream)
|
||||
|
||||
return stream
|
||||
}
|
||||
}
|
||||
|
||||
Storage.prototype.get_tarball = function(name, filename, callback) {
|
||||
/**
|
||||
*
|
||||
* @param {*} name
|
||||
* @param {*} filename
|
||||
* @param {*} callback
|
||||
*/
|
||||
get_tarball(name, filename, callback) {
|
||||
assert(Utils.validate_name(filename))
|
||||
var self = this
|
||||
|
||||
@ -441,16 +509,21 @@ Storage.prototype.get_tarball = function(name, filename, callback) {
|
||||
rstream.pipe(stream)
|
||||
})
|
||||
return stream
|
||||
}
|
||||
}
|
||||
|
||||
Storage.prototype.get_package = function(name, options, callback) {
|
||||
/**
|
||||
*
|
||||
* @param {*} name
|
||||
* @param {*} options
|
||||
* @param {*} callback
|
||||
*/
|
||||
get_package(name, options, callback) {
|
||||
if (typeof(options) === 'function') {
|
||||
callback = options, options = {};
|
||||
}
|
||||
|
||||
var storage = this.storage(name)
|
||||
if (!storage) return callback( Error[404]('no such package available') )
|
||||
|
||||
storage.read_json(info_file, (err, result) => {
|
||||
if (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
@ -462,22 +535,25 @@ Storage.prototype.get_package = function(name, options, callback) {
|
||||
this._normalize_package(result)
|
||||
callback(err, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// walks through each package and calls `on_package` on them
|
||||
Storage.prototype._each_package = function (on_package, on_end) {
|
||||
/**
|
||||
* Walks through each package and calls `on_package` on them
|
||||
* @param {*} on_package
|
||||
* @param {*} on_end
|
||||
*/
|
||||
_each_package(on_package, on_end) {
|
||||
var storages = {}
|
||||
|
||||
storages[this.config.storage] = true;
|
||||
|
||||
if (this.config.packages) {
|
||||
Object.keys(this.config.packages || {}).map(function (pkg) {
|
||||
Object.keys(this.packages || {}).map( pkg => {
|
||||
if (this.config.packages[pkg].storage) {
|
||||
storages[this.config.packages[pkg].storage] = true
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const base = Path.dirname(this.config.self_path);
|
||||
|
||||
async.eachSeries(Object.keys(storages), function (storage, cb) {
|
||||
@ -511,26 +587,23 @@ Storage.prototype._each_package = function (on_package, on_end) {
|
||||
}
|
||||
}, cb)
|
||||
})
|
||||
}, on_end)
|
||||
}
|
||||
}, on_end);
|
||||
}
|
||||
|
||||
//
|
||||
// This function allows to update the package thread-safely
|
||||
//
|
||||
// Arguments:
|
||||
// - name - package name
|
||||
// - updateFn - function(package, cb) - update function
|
||||
// - callback - callback that gets invoked after it's all updated
|
||||
//
|
||||
// Algorithm:
|
||||
// 1. lock package.json for writing
|
||||
// 2. read package.json
|
||||
// 3. updateFn(pkg, cb), and wait for cb
|
||||
// 4. write package.json.tmp
|
||||
// 5. move package.json.tmp package.json
|
||||
// 6. callback(err?)
|
||||
//
|
||||
Storage.prototype.update_package = function(name, updateFn, _callback) {
|
||||
/**
|
||||
* This function allows to update the package thread-safely
|
||||
Algorithm:
|
||||
1. lock package.json for writing
|
||||
2. read package.json
|
||||
3. updateFn(pkg, cb), and wait for cb
|
||||
4. write package.json.tmp
|
||||
5. move package.json.tmp package.json
|
||||
6. callback(err?)
|
||||
* @param {*} name package name
|
||||
* @param {*} updateFn function(package, cb) - update function
|
||||
* @param {*} _callback callback that gets invoked after it's all updated
|
||||
*/
|
||||
update_package(name, updateFn, _callback) {
|
||||
var self = this
|
||||
var storage = self.storage(name)
|
||||
if (!storage) return _callback( Error[404]('no such package available') )
|
||||
@ -571,8 +644,53 @@ Storage.prototype.update_package = function(name, updateFn, _callback) {
|
||||
self._write_package(name, json, callback)
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {*} startkey
|
||||
* @param {*} options
|
||||
*/
|
||||
search(startkey, options) {
|
||||
const stream = new Stream.PassThrough({ objectMode: true });
|
||||
|
||||
this._each_package((item, cb) => {
|
||||
fs.stat(item.path, (err, stats) => {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
if (stats.mtime > startkey) {
|
||||
this.get_package(item.name, options, function(err, data) {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
var versions = Utils.semver_sort(Object.keys(data.versions))
|
||||
var latest = data['dist-tags'] && data['dist-tags'].latest ? data['dist-tags'].latest : versions.pop()
|
||||
|
||||
if (data.versions[latest]) {
|
||||
stream.push({
|
||||
name : data.versions[latest].name,
|
||||
description : data.versions[latest].description,
|
||||
'dist-tags' : { latest: latest },
|
||||
maintainers : data.versions[latest].maintainers ||
|
||||
[ data.versions[latest]._npmUser ].filter(Boolean),
|
||||
author : data.versions[latest].author,
|
||||
repository : data.versions[latest].repository,
|
||||
readmeFilename : data.versions[latest].readmeFilename || '',
|
||||
homepage : data.versions[latest].homepage,
|
||||
keywords : data.versions[latest].keywords,
|
||||
bugs : data.versions[latest].bugs,
|
||||
license : data.versions[latest].license,
|
||||
time : {
|
||||
modified: item.time ? new Date(item.time).toISOString() : undefined
|
||||
},
|
||||
versions : {},
|
||||
})
|
||||
}
|
||||
|
||||
<<<<<<< HEAD
|
||||
Storage.prototype.search = function(startkey, options) {
|
||||
var stream = new Stream.PassThrough({ objectMode: true })
|
||||
|
||||
@ -603,10 +721,9 @@ Storage.prototype.search = function(startkey, options) {
|
||||
license : data.versions[latest].license,
|
||||
time : { modified: item.time ? new Date(item.time).toISOString() : undefined },
|
||||
versions : {},
|
||||
})
|
||||
}
|
||||
|
||||
=======
|
||||
cb()
|
||||
>>>>>>> Migrate storages to classes
|
||||
})
|
||||
} else {
|
||||
cb()
|
||||
@ -618,9 +735,13 @@ Storage.prototype.search = function(startkey, options) {
|
||||
})
|
||||
|
||||
return stream
|
||||
}
|
||||
}
|
||||
|
||||
Storage.prototype._normalize_package = function(pkg) {
|
||||
/**
|
||||
*
|
||||
* @param {*} pkg
|
||||
*/
|
||||
_normalize_package(pkg) {
|
||||
;['versions', 'dist-tags', '_distfiles', '_attachments', '_uplinks'].forEach(function(key) {
|
||||
if (!Utils.is_object(pkg[key])) pkg[key] = {}
|
||||
})
|
||||
@ -629,9 +750,15 @@ Storage.prototype._normalize_package = function(pkg) {
|
||||
}
|
||||
// normalize dist-tags
|
||||
Utils.normalize_dist_tags(pkg)
|
||||
}
|
||||
}
|
||||
|
||||
Storage.prototype._write_package = function(name, json, callback) {
|
||||
/**
|
||||
*
|
||||
* @param {*} name
|
||||
* @param {*} json
|
||||
* @param {*} callback
|
||||
*/
|
||||
_write_package(name, json, callback) {
|
||||
|
||||
// calculate revision a la couchdb
|
||||
if (typeof(json._rev) !== 'string') {
|
||||
@ -643,11 +770,17 @@ Storage.prototype._write_package = function(name, json, callback) {
|
||||
var storage = this.storage(name)
|
||||
if (!storage) return callback()
|
||||
storage.write_json(info_file, json, callback)
|
||||
}
|
||||
}
|
||||
|
||||
Storage.prototype.storage = function(pkg) {
|
||||
var path = this.config.get_package_spec(pkg).storage
|
||||
if (path == null) path = this.config.storage
|
||||
/**
|
||||
*
|
||||
* @param {*} pkg
|
||||
*/
|
||||
storage(pkg) {
|
||||
let path = this.config.get_package_spec(pkg).storage
|
||||
if (path == null) {
|
||||
path = this.config.storage
|
||||
}
|
||||
if (path == null || path === false) {
|
||||
this.logger.debug( { name: pkg }
|
||||
, 'this package has no storage defined: @{name}' )
|
||||
@ -659,6 +792,7 @@ Storage.prototype.storage = function(pkg) {
|
||||
pkg
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
var Path_Wrapper = (function() {
|
||||
|
344
lib/storage.js
344
lib/storage.js
@ -1,47 +1,50 @@
|
||||
"use strict";
|
||||
|
||||
var assert = require('assert')
|
||||
var async = require('async')
|
||||
var Error = require('http-errors')
|
||||
var Stream = require('stream')
|
||||
var Local = require('./local-storage')
|
||||
var Logger = require('./logger')
|
||||
var MyStreams = require('./streams')
|
||||
var Proxy = require('./up-storage')
|
||||
var Utils = require('./utils')
|
||||
|
||||
module.exports = Storage
|
||||
const assert = require('assert')
|
||||
const async = require('async')
|
||||
const Error = require('http-errors')
|
||||
const Stream = require('stream')
|
||||
const Local = require('./local-storage')
|
||||
const Logger = require('./logger')
|
||||
const MyStreams = require('./streams')
|
||||
const Proxy = require('./up-storage')
|
||||
const Utils = require('./utils')
|
||||
|
||||
//
|
||||
// Implements Storage interface
|
||||
// (same for storage.js, local-storage.js, up-storage.js)
|
||||
//
|
||||
function Storage(config) {
|
||||
var self = Object.create(Storage.prototype)
|
||||
self.config = config
|
||||
class Storage {
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {*} config
|
||||
*/
|
||||
constructor(config) {
|
||||
this.config = config
|
||||
// we support a number of uplinks, but only one local storage
|
||||
// Proxy and Local classes should have similar API interfaces
|
||||
self.uplinks = {}
|
||||
for (var p in config.uplinks) {
|
||||
self.uplinks[p] = Proxy(config.uplinks[p], config)
|
||||
self.uplinks[p].upname = p
|
||||
this.uplinks = {}
|
||||
for (let p in config.uplinks) {
|
||||
// instance for each up-link definition
|
||||
this.uplinks[p] = new Proxy(config.uplinks[p], config)
|
||||
this.uplinks[p].upname = p
|
||||
}
|
||||
// an instance for local storage
|
||||
this.local = new Local(config)
|
||||
this.logger = Logger.logger.child();
|
||||
}
|
||||
self.local = new Local(config)
|
||||
self.logger = Logger.logger.child()
|
||||
|
||||
return self
|
||||
}
|
||||
|
||||
//
|
||||
// Add a {name} package to a system
|
||||
//
|
||||
// Function checks if package with the same name is available from uplinks.
|
||||
// If it isn't, we create package locally
|
||||
//
|
||||
// Used storages: local (write) && uplinks
|
||||
//
|
||||
Storage.prototype.add_package = function(name, metadata, callback) {
|
||||
/**
|
||||
* Add a {name} package to a system
|
||||
Function checks if package with the same name is available from uplinks.
|
||||
If it isn't, we create package locally
|
||||
Used storages: local (write) && uplinks
|
||||
* @param {*} name
|
||||
* @param {*} metadata
|
||||
* @param {*} callback
|
||||
*/
|
||||
add_package(name, metadata, callback) {
|
||||
var self = this
|
||||
|
||||
// NOTE:
|
||||
@ -97,93 +100,103 @@ Storage.prototype.add_package = function(name, metadata, callback) {
|
||||
function publish_package(cb) {
|
||||
self.local.add_package(name, metadata, callback)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Add a new version of package {name} to a system
|
||||
//
|
||||
// Used storages: local (write)
|
||||
//
|
||||
Storage.prototype.add_version = function(name, version, metadata, tag, callback) {
|
||||
/**
|
||||
* Add a new version of package {name} to a system
|
||||
Used storages: local (write)
|
||||
* @param {*} name
|
||||
* @param {*} version
|
||||
* @param {*} metadata
|
||||
* @param {*} tag
|
||||
* @param {*} callback
|
||||
*/
|
||||
add_version(name, version, metadata, tag, callback) {
|
||||
return this.local.add_version(name, version, metadata, tag, callback)
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Tags a package version with a provided tag
|
||||
//
|
||||
// Used storages: local (write)
|
||||
//
|
||||
Storage.prototype.merge_tags = function(name, tag_hash, callback) {
|
||||
/**
|
||||
* Tags a package version with a provided tag
|
||||
Used storages: local (write)
|
||||
* @param {*} name
|
||||
* @param {*} tag_hash
|
||||
* @param {*} callback
|
||||
*/
|
||||
merge_tags(name, tag_hash, callback) {
|
||||
return this.local.merge_tags(name, tag_hash, callback)
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Tags a package version with a provided tag
|
||||
//
|
||||
// Used storages: local (write)
|
||||
//
|
||||
Storage.prototype.replace_tags = function(name, tag_hash, callback) {
|
||||
/**
|
||||
* Tags a package version with a provided tag
|
||||
Used storages: local (write)
|
||||
* @param {*} name
|
||||
* @param {*} tag_hash
|
||||
* @param {*} callback
|
||||
*/
|
||||
replace_tags(name, tag_hash, callback) {
|
||||
return this.local.replace_tags(name, tag_hash, callback)
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Change an existing package (i.e. unpublish one version)
|
||||
//
|
||||
// Function changes a package info from local storage and all uplinks with
|
||||
// write access.
|
||||
//
|
||||
// Used storages: local (write)
|
||||
//
|
||||
Storage.prototype.change_package = function(name, metadata, revision, callback) {
|
||||
/**
|
||||
* Change an existing package (i.e. unpublish one version)
|
||||
Function changes a package info from local storage and all uplinks with write access./
|
||||
Used storages: local (write)
|
||||
* @param {*} name
|
||||
* @param {*} metadata
|
||||
* @param {*} revision
|
||||
* @param {*} callback
|
||||
*/
|
||||
change_package(name, metadata, revision, callback) {
|
||||
return this.local.change_package(name, metadata, revision, callback)
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Remove a package from a system
|
||||
//
|
||||
// Function removes a package from local storage
|
||||
//
|
||||
// Used storages: local (write)
|
||||
//
|
||||
Storage.prototype.remove_package = function(name, callback) {
|
||||
/**
|
||||
* Remove a package from a system
|
||||
Function removes a package from local storage
|
||||
Used storages: local (write)
|
||||
* @param {*} name
|
||||
* @param {*} callback
|
||||
*/
|
||||
remove_package(name, callback) {
|
||||
return this.local.remove_package(name, callback)
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Remove a tarball from a system
|
||||
//
|
||||
// Function removes a tarball from local storage.
|
||||
// Tarball in question should not be linked to in any existing
|
||||
// versions, i.e. package version should be unpublished first.
|
||||
//
|
||||
// Used storages: local (write)
|
||||
//
|
||||
Storage.prototype.remove_tarball = function(name, filename, revision, callback) {
|
||||
/**
|
||||
Remove a tarball from a system
|
||||
Function removes a tarball from local storage.
|
||||
Tarball in question should not be linked to in any existing
|
||||
versions, i.e. package version should be unpublished first.
|
||||
Used storages: local (write)
|
||||
* @param {*} name
|
||||
* @param {*} filename
|
||||
* @param {*} revision
|
||||
* @param {*} callback
|
||||
*/
|
||||
remove_tarball(name, filename, revision, callback) {
|
||||
return this.local.remove_tarball(name, filename, revision, callback)
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Upload a tarball for {name} package
|
||||
//
|
||||
// Function is syncronous and returns a WritableStream
|
||||
//
|
||||
// Used storages: local (write)
|
||||
//
|
||||
Storage.prototype.add_tarball = function(name, filename) {
|
||||
/**
|
||||
* Upload a tarball for {name} package
|
||||
Function is syncronous and returns a WritableStream
|
||||
Used storages: local (write)
|
||||
* @param {*} name
|
||||
* @param {*} filename
|
||||
*/
|
||||
add_tarball(name, filename) {
|
||||
return this.local.add_tarball(name, filename)
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Get a tarball from a storage for {name} package
|
||||
//
|
||||
// Function is syncronous and returns a ReadableStream
|
||||
//
|
||||
// Function tries to read tarball locally, if it fails then it reads package
|
||||
// information in order to figure out where we can get this tarball from
|
||||
//
|
||||
// Used storages: local || uplink (just one)
|
||||
//
|
||||
Storage.prototype.get_tarball = function(name, filename) {
|
||||
/**
|
||||
Get a tarball from a storage for {name} package
|
||||
Function is syncronous and returns a ReadableStream
|
||||
Function tries to read tarball locally, if it fails then it reads package
|
||||
information in order to figure out where we can get this tarball from
|
||||
Used storages: local || uplink (just one)
|
||||
* @param {*} name
|
||||
* @param {*} filename
|
||||
*/
|
||||
get_tarball(name, filename) {
|
||||
var stream = MyStreams.ReadTarballStream()
|
||||
stream.abort = function() {}
|
||||
|
||||
@ -280,31 +293,32 @@ Storage.prototype.get_tarball = function(name, filename) {
|
||||
on_open()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Retrieve a package metadata for {name} package
|
||||
//
|
||||
// Function invokes local.get_package and uplink.get_package for every
|
||||
// uplink with proxy_access rights against {name} and combines results
|
||||
// into one json object
|
||||
//
|
||||
// Used storages: local && uplink (proxy_access)
|
||||
//
|
||||
Storage.prototype.get_package = function(name, options, callback) {
|
||||
if (typeof(options) === 'function') callback = options, options = {}
|
||||
/**
|
||||
Retrieve a package metadata for {name} package
|
||||
Function invokes local.get_package and uplink.get_package for every
|
||||
uplink with proxy_access rights against {name} and combines results
|
||||
into one json object
|
||||
Used storages: local && uplink (proxy_access)
|
||||
* @param {*} name
|
||||
* @param {*} options
|
||||
* @param {*} callback
|
||||
*/
|
||||
get_package(name, options, callback) {
|
||||
if (typeof(options) === 'function') {
|
||||
callback = options, options = {};
|
||||
}
|
||||
|
||||
var self = this
|
||||
|
||||
self.local.get_package(name, options, function(err, data) {
|
||||
this.local.get_package(name, options, (err, data) => {
|
||||
if (err && (!err.status || err.status >= 500)) {
|
||||
// report internal errors right away
|
||||
return callback(err)
|
||||
}
|
||||
|
||||
self._sync_package_with_uplinks(name, data, options, function(err, result, uplink_errors) {
|
||||
this._sync_package_with_uplinks(name, data, options, function(err, result, uplink_errors) {
|
||||
if (err) return callback(err)
|
||||
var whitelist = [ '_rev', 'name', 'versions', 'dist-tags', 'readme' ]
|
||||
const whitelist = [ '_rev', 'name', 'versions', 'dist-tags', 'readme' ]
|
||||
for (var i in result) {
|
||||
if (whitelist.indexOf(i) === -1) delete result[i]
|
||||
}
|
||||
@ -317,26 +331,25 @@ Storage.prototype.get_package = function(name, options, callback) {
|
||||
callback(null, result, uplink_errors)
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Retrieve remote and local packages more recent than {startkey}
|
||||
//
|
||||
// Function streams all packages from all uplinks first, and then
|
||||
// local packages.
|
||||
//
|
||||
// Note that local packages could override registry ones just because
|
||||
// they appear in JSON last. That's a trade-off we make to avoid
|
||||
// memory issues.
|
||||
//
|
||||
// Used storages: local && uplink (proxy_access)
|
||||
//
|
||||
Storage.prototype.search = function(startkey, options) {
|
||||
/**
|
||||
Retrieve remote and local packages more recent than {startkey}
|
||||
Function streams all packages from all uplinks first, and then
|
||||
local packages.
|
||||
Note that local packages could override registry ones just because
|
||||
they appear in JSON last. That's a trade-off we make to avoid
|
||||
memory issues.
|
||||
Used storages: local && uplink (proxy_access)
|
||||
* @param {*} startkey
|
||||
* @param {*} options
|
||||
*/
|
||||
search(startkey, options) {
|
||||
var self = this
|
||||
|
||||
var stream = new Stream.PassThrough({ objectMode: true })
|
||||
|
||||
async.eachSeries(Object.keys(self.uplinks), function(up_name, cb) {
|
||||
async.eachSeries(Object.keys(this.uplinks), function(up_name, cb) {
|
||||
// shortcut: if `local=1` is supplied, don't call uplinks
|
||||
if (options.req.query.local !== undefined) return cb()
|
||||
|
||||
@ -364,10 +377,14 @@ Storage.prototype.search = function(startkey, options) {
|
||||
})
|
||||
})
|
||||
|
||||
return stream
|
||||
}
|
||||
return stream;
|
||||
}
|
||||
|
||||
Storage.prototype.get_local = function(callback) {
|
||||
/**
|
||||
*
|
||||
* @param {*} callback
|
||||
*/
|
||||
get_local(callback) {
|
||||
var self = this
|
||||
var locals = this.config.localList.get()
|
||||
var packages = []
|
||||
@ -397,16 +414,22 @@ Storage.prototype.get_local = function(callback) {
|
||||
} else {
|
||||
callback(null, [])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// function fetches package information from uplinks and synchronizes it with local data
|
||||
// if package is available locally, it MUST be provided in pkginfo
|
||||
// returns callback(err, result, uplink_errors)
|
||||
Storage.prototype._sync_package_with_uplinks = function(name, pkginfo, options, callback) {
|
||||
/**
|
||||
* Function fetches package information from uplinks and synchronizes it with local data
|
||||
if package is available locally, it MUST be provided in pkginfo
|
||||
returns callback(err, result, uplink_errors)
|
||||
* @param {*} name
|
||||
* @param {*} pkginfo
|
||||
* @param {*} options
|
||||
* @param {*} callback
|
||||
*/
|
||||
_sync_package_with_uplinks(name, pkginfo, options, callback) {
|
||||
var self = this
|
||||
|
||||
let exists = false;
|
||||
if (!pkginfo) {
|
||||
var exists = false
|
||||
exists = false
|
||||
|
||||
pkginfo = {
|
||||
name : name,
|
||||
@ -415,11 +438,11 @@ Storage.prototype._sync_package_with_uplinks = function(name, pkginfo, options,
|
||||
_uplinks : {},
|
||||
}
|
||||
} else {
|
||||
var exists = true
|
||||
exists = true
|
||||
}
|
||||
|
||||
var uplinks = []
|
||||
for (var i in self.uplinks) {
|
||||
for (let i in self.uplinks) {
|
||||
if (self.config.can_proxy_to(name, i)) {
|
||||
uplinks.push(self.uplinks[i])
|
||||
}
|
||||
@ -456,20 +479,20 @@ Storage.prototype._sync_package_with_uplinks = function(name, pkginfo, options,
|
||||
etag: etag,
|
||||
fetched: Date.now()
|
||||
}
|
||||
|
||||
for (var i in up_res.versions) {
|
||||
for (let i in up_res.versions) {
|
||||
// this won't be serialized to json,
|
||||
// kinda like an ES6 Symbol
|
||||
//FIXME: perhaps Symbol('_verdaccio_uplink') here?
|
||||
Object.defineProperty(up_res.versions[i], '_verdaccio_uplink', {
|
||||
value : up.upname,
|
||||
enumerable : false,
|
||||
configurable : false,
|
||||
writable : true,
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
Storage._merge_versions(pkginfo, up_res, self.config)
|
||||
Storage._merge_versions(pkginfo, up_res, self.config);
|
||||
} catch(err) {
|
||||
self.logger.error({
|
||||
sub: 'out',
|
||||
@ -497,11 +520,16 @@ Storage.prototype._sync_package_with_uplinks = function(name, pkginfo, options,
|
||||
return callback(null, pkginfo, uplink_errors)
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// function gets a local info and an info from uplinks and tries to merge it
|
||||
// exported for unit tests only
|
||||
Storage._merge_versions = function(local, up, config) {
|
||||
/**
|
||||
* Function gets a local info and an info from uplinks and tries to merge it
|
||||
exported for unit tests only.
|
||||
* @param {*} local
|
||||
* @param {*} up
|
||||
* @param {*} config
|
||||
*/
|
||||
static _merge_versions(local, up, config) {
|
||||
// copy new versions to a cache
|
||||
// NOTE: if a certain version was updated, we can't refresh it reliably
|
||||
for (var i in up.versions) {
|
||||
@ -520,4 +548,8 @@ Storage._merge_versions = function(local, up, config) {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = Storage;
|
||||
|
@ -1,59 +1,19 @@
|
||||
"use strict";
|
||||
|
||||
var JSONStream = require('JSONStream')
|
||||
var Error = require('http-errors')
|
||||
var request = require('request')
|
||||
var Stream = require('readable-stream')
|
||||
var URL = require('url')
|
||||
var parse_interval = require('./config').parse_interval
|
||||
var Logger = require('./logger')
|
||||
var MyStreams = require('./streams')
|
||||
var Utils = require('./utils')
|
||||
var encode = function(thing) {
|
||||
const JSONStream = require('JSONStream')
|
||||
const Error = require('http-errors')
|
||||
const request = require('request')
|
||||
const Stream = require('readable-stream')
|
||||
const URL = require('url')
|
||||
const parse_interval = require('./config').parse_interval
|
||||
const Logger = require('./logger')
|
||||
const MyStreams = require('./streams')
|
||||
const Utils = require('./utils')
|
||||
const encode = function(thing) {
|
||||
return encodeURIComponent(thing).replace(/^%40/, '@');
|
||||
};
|
||||
|
||||
module.exports = Storage
|
||||
|
||||
//
|
||||
// Implements Storage interface
|
||||
// (same for storage.js, local-storage.js, up-storage.js)
|
||||
//
|
||||
function Storage(config, mainconfig) {
|
||||
var self = Object.create(Storage.prototype)
|
||||
self.config = config
|
||||
self.failed_requests = 0
|
||||
self.userAgent = mainconfig.user_agent
|
||||
self.ca = config.ca
|
||||
self.logger = Logger.logger.child({sub: 'out'})
|
||||
self.server_id = mainconfig.server_id
|
||||
|
||||
self.url = URL.parse(self.config.url)
|
||||
|
||||
_setupProxy.call(self, self.url.hostname, config, mainconfig, self.url.protocol === 'https:')
|
||||
|
||||
self.config.url = self.config.url.replace(/\/$/, '')
|
||||
if (Number(self.config.timeout) >= 1000) {
|
||||
self.logger.warn([ 'Too big timeout value: ' + self.config.timeout,
|
||||
'We changed time format to nginx-like one',
|
||||
'(see http://wiki.nginx.org/ConfigNotation)',
|
||||
'so please update your config accordingly' ].join('\n'))
|
||||
}
|
||||
|
||||
// a bunch of different configurable timers
|
||||
self.maxage = parse_interval(config_get('maxage' , '2m' ))
|
||||
self.timeout = parse_interval(config_get('timeout' , '30s'))
|
||||
self.max_fails = Number(config_get('max_fails' , 2 ))
|
||||
self.fail_timeout = parse_interval(config_get('fail_timeout', '5m' ))
|
||||
return self
|
||||
|
||||
// just a helper (`config[key] || default` doesn't work because of zeroes)
|
||||
function config_get(key, def) {
|
||||
return config[key] != null ? config[key] : def
|
||||
}
|
||||
}
|
||||
|
||||
function _setupProxy(hostname, config, mainconfig, isHTTPS) {
|
||||
const _setupProxy = function(hostname, config, mainconfig, isHTTPS) {
|
||||
var no_proxy
|
||||
var proxy_key = isHTTPS ? 'https_proxy' : 'http_proxy'
|
||||
|
||||
@ -100,6 +60,52 @@ function _setupProxy(hostname, config, mainconfig, isHTTPS) {
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Implements Storage interface
|
||||
// (same for storage.js, local-storage.js, up-storage.js)
|
||||
//
|
||||
class Storage {
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {*} config
|
||||
* @param {*} mainconfig
|
||||
*/
|
||||
constructor(config, mainconfig) {
|
||||
this.config = config
|
||||
this.failed_requests = 0
|
||||
this.userAgent = mainconfig.user_agent
|
||||
this.ca = config.ca
|
||||
this.logger = Logger.logger.child({sub: 'out'})
|
||||
this.server_id = mainconfig.server_id
|
||||
|
||||
this.url = URL.parse(this.config.url)
|
||||
|
||||
_setupProxy.call(this, this.url.hostname, config, mainconfig, this.url.protocol === 'https:')
|
||||
|
||||
this.config.url = this.config.url.replace(/\/$/, '')
|
||||
if (Number(this.config.timeout) >= 1000) {
|
||||
this.logger.warn([ 'Too big timeout value: ' + this.config.timeout,
|
||||
'We changed time format to nginx-like one',
|
||||
'(see http://wiki.nginx.org/ConfigNotation)',
|
||||
'so please update your config accordingly' ].join('\n'))
|
||||
}
|
||||
|
||||
// a bunch of different configurable timers
|
||||
this.maxage = parse_interval(config_get('maxage' , '2m' ))
|
||||
this.timeout = parse_interval(config_get('timeout' , '30s'))
|
||||
this.max_fails = Number(config_get('max_fails' , 2 ))
|
||||
this.fail_timeout = parse_interval(config_get('fail_timeout', '5m' ))
|
||||
return this
|
||||
|
||||
// just a helper (`config[key] || default` doesn't work because of zeroes)
|
||||
function config_get(key, def) {
|
||||
return config[key] != null ? config[key] : def
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Storage.prototype.request = function(options, cb) {
|
||||
if (!this.status_check()) {
|
||||
var req = new Stream.Readable()
|
||||
@ -399,3 +405,6 @@ Storage.prototype._add_proxy_headers = function(req, headers) {
|
||||
|
||||
headers['Via'] += '1.1 ' + this.server_id + ' (Verdaccio)'
|
||||
}
|
||||
|
||||
|
||||
module.exports = Storage;
|
||||
|
Loading…
Reference in New Issue
Block a user