Merge pull request #663 from verdaccio/refactor-storage

Refactor storage
This commit is contained in:
Juan Picado @jotadeveloper 2018-04-24 20:14:46 +02:00 committed by GitHub
commit db440f0aff
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 365 additions and 295 deletions

View File

@ -16,7 +16,7 @@
},
"dependencies": {
"@verdaccio/file-locking": "0.0.5",
"@verdaccio/local-storage": "1.0.1",
"@verdaccio/local-storage": "1.0.2",
"@verdaccio/streams": "1.0.0",
"JSONStream": "1.3.2",
"async": "2.6.0",
@ -51,7 +51,7 @@
"@commitlint/cli": "6.1.3",
"@commitlint/config-conventional": "6.1.3",
"@commitlint/travis-cli": "6.1.3",
"@verdaccio/types": "2.1.0",
"@verdaccio/types": "2.1.1",
"babel-cli": "6.26.0",
"babel-core": "6.26.0",
"babel-eslint": "8.2.2",
@ -126,7 +126,7 @@
"supertest": "3.0.0",
"url-loader": "0.6.2",
"verdaccio-auth-memory": "0.0.4",
"verdaccio-memory": "1.0.0",
"verdaccio-memory": "1.0.1",
"webpack": "3.10.0",
"webpack-dev-server": "2.11.1",
"webpack-merge": "4.1.2",

View File

@ -20,11 +20,7 @@ const Config = require('../lib/config');
const Middleware = require('./middleware');
const Cats = require('../lib/status-cats');
export default function(configHash: any) {
// Config
LoggerApp.setup(configHash.logs);
const config: IConfig = new Config(configHash);
const storage: IStorageHandler = new Storage(config);
const defineAPI = function(config: Config, storage: IStorageHandler) {
const auth: IAuth = new Auth(config);
const app: $Application = express();
// run in production mode by default, just in case
@ -102,4 +98,13 @@ export default function(configHash: any) {
app.use(Middleware.final);
return app;
};
export default async function(configHash: any) {
LoggerApp.setup(configHash.logs);
const config: IConfig = new Config(configHash);
const storage: IStorageHandler = new Storage(config);
// waits until init calls have been intialized
await storage.init(config);
return defineAPI(config, storage);
}

38
src/lib/bootstrap.js vendored
View File

@ -8,7 +8,7 @@ import http from'http';
import https from 'https';
// $FlowFixMe
import constants from 'constants';
import server from '../api/index';
import endPointAPI from '../api/index';
import {parse_address} from './utils';
import type {Callback} from '@verdaccio/types';
@ -63,31 +63,35 @@ export function getListListenAddresses(argListen: string, configListen: mixed) {
* @param {String} pkgVersion
* @param {String} pkgName
*/
function startVerdaccio(config: any, cliListen: string, configPath: string, pkgVersion: string, pkgName: string, callback: Callback) {
function startVerdaccio(config: any, cliListen: string,
configPath: string, pkgVersion: string,
pkgName: string, callback: Callback) {
if (isObject(config) === false) {
throw new Error('config file must be an object');
}
const app = server(config);
const addresses = getListListenAddresses(cliListen, config.listen);
endPointAPI(config).then((app)=> {
const addresses = getListListenAddresses(cliListen, config.listen);
addresses.forEach(function(addr) {
let webServer;
if (addr.proto === 'https') {
// https must either have key cert and ca or a pfx and (optionally) a passphrase
if (!config.https || !config.https.key || !config.https.cert || !config.https.ca) {
displayHTTPSWarning(configPath);
addresses.forEach(function(addr) {
let webServer;
if (addr.proto === 'https') {
// https must either have key cert and ca or a pfx and (optionally) a passphrase
if (!config.https || !config.https.key || !config.https.cert || !config.https.ca) {
displayHTTPSWarning(configPath);
}
webServer = handleHTTPS(app, configPath, config);
} else { // http
webServer = http.createServer(app);
}
webServer = handleHTTPS(app, configPath, config);
} else { // http
webServer = http.createServer(app);
}
unlinkAddressPath(addr);
unlinkAddressPath(addr);
callback(webServer, addr, pkgName, pkgVersion);
callback(webServer, addr, pkgName, pkgVersion);
});
});
}
function unlinkAddressPath(addr) {

View File

@ -59,13 +59,13 @@ try {
}
logger.logger.warn({file: configPathLocation}, 'config file - @{file}');
startVerdaccio(verdaccioConfiguration, cliListner, configPathLocation, pkgVersion, pkgName, listenDefaultCallback);
} catch (err) {
logger.logger.fatal({file: configPathLocation, err: err}, 'cannot open config file @{file}: @{!err.message}');
process.exit(1);
}
startVerdaccio(verdaccioConfiguration, cliListner, configPathLocation, pkgVersion, pkgName, listenDefaultCallback);
process.on('uncaughtException', function(err) {
logger.logger.fatal( {
err: err,

View File

@ -208,7 +208,7 @@ class Config {
* @return {String}
*/
checkSecretKey(secret) {
if (_.isNil(secret) === false && secret !== '') {
if (_.isString(secret) && secret !== '') {
this.secret = secret;
return secret;
}

View File

@ -50,7 +50,6 @@ class LocalStorage implements IStorage {
this.logger = logger.child({sub: 'fs'});
this.config = config;
this.localData = this._loadStorage(config, logger);
this._setSecret(config);
}
addPackage(name: string, pkg: Package, callback: Callback) {
@ -839,8 +838,10 @@ class LocalStorage implements IStorage {
}
}
_setSecret(config: Config) {
this.localData.setSecret(config.checkSecretKey(this.localData.getSecret()));
async getSecret(config: Config) {
const secretKey = await this.localData.getSecret();
return this.localData.setSecret(config.checkSecretKey(secretKey));
}
_loadStorage(config: Config, logger: Logger) {

37
src/lib/metadata-utils.js Normal file
View File

@ -0,0 +1,37 @@
// @flow
import semver from 'semver';
import _ from 'lodash';
import {DIST_TAGS} from './utils';
import type {Package} from '@verdaccio/types';
/**
* Function gets a local info and an info from uplinks and tries to merge it
exported for unit tests only.
* @param {*} local
* @param {*} up
* @param {*} config
* @static
*/
export function mergeVersions(local: Package, up: Package) {
// copy new versions to a cache
// NOTE: if a certain version was updated, we can't refresh it reliably
for (let i in up.versions) {
if (_.isNil(local.versions[i])) {
local.versions[i] = up.versions[i];
}
}
for (let i in up[DIST_TAGS]) {
if (local[DIST_TAGS][i] !== up[DIST_TAGS][i]) {
if (!local[DIST_TAGS][i] || semver.lte(local[DIST_TAGS][i], up[DIST_TAGS][i])) {
local[DIST_TAGS][i] = up[DIST_TAGS][i];
}
if (i === 'latest' && local[DIST_TAGS][i] === up[DIST_TAGS][i]) {
// if remote has more fresh package, we should borrow its readme
local.readme = up.readme;
}
}
}
}

View File

@ -2,11 +2,11 @@
import _ from 'lodash';
import crypto from 'crypto';
import * as Utils from './utils';
import {ErrorCode, isObject, normalizeDistTags, DIST_TAGS} from './utils';
import Search from './search';
import type {
Package, Version,
} from '@verdaccio/types';
import type {Package, Version} from '@verdaccio/types';
import type {IStorage} from '../../types';
const pkgFileName = 'package.json';
const fileExist: string = 'EEXISTS';
@ -42,7 +42,7 @@ function normalizePackage(pkg: Package) {
'time'];
pkgProperties.forEach((key) => {
if (_.isNil(Utils.isObject(pkg[key]))) {
if (_.isNil(isObject(pkg[key]))) {
pkg[key] = {};
}
});
@ -52,7 +52,7 @@ function normalizePackage(pkg: Package) {
}
// normalize dist-tags
Utils.normalize_dist_tags(pkg);
normalizeDistTags(pkg);
return pkg;
}
@ -71,6 +71,90 @@ function cleanUpReadme(version: Version): Version {
return version;
}
export const WHITELIST = ['_rev', 'name', 'versions', DIST_TAGS, 'readme', 'time'];
export function cleanUpLinksRef(keepUpLinkData: boolean, result: Package): Package {
const propertyToKeep = [...WHITELIST];
if (keepUpLinkData === true) {
propertyToKeep.push('_uplinks');
}
for (let i in result) {
if (propertyToKeep.indexOf(i) === -1) { // Remove sections like '_uplinks' from response
delete result[i];
}
}
return result;
}
/**
* Check whether a package it is already a local package
* @param {*} name
* @param {*} localStorage
*/
export function checkPackageLocal(name: string, localStorage: IStorage): Promise<any> {
return new Promise((resolve, reject) => {
localStorage.getPackageMetadata(name, (err, results) => {
if (!_.isNil(err) && err.status !== 404) {
return reject(err);
}
if (results) {
return reject(ErrorCode.get409('this package is already present'));
}
return resolve();
});
});
}
export function publishPackage(name: string, metadata: any, localStorage: IStorage): Promise<any> {
return new Promise((resolve, reject) => {
localStorage.addPackage(name, metadata, (err, latest) => {
if (!_.isNull(err)) {
return reject(err);
} else if (!_.isUndefined(latest)) {
Search.add(latest);
}
return resolve();
});
});
}
export function checkPackageRemote(name: string, isAllowPublishOffline: boolean, syncMetadata: Function): Promise<any> {
return new Promise((resolve, reject) => {
// $FlowFixMe
syncMetadata(name, null, {}, (err, packageJsonLocal, upLinksErrors) => {
// something weird
if (err && err.status !== 404) {
return reject(err);
}
// checking package exist already
if (_.isNil(packageJsonLocal) === false) {
return reject(ErrorCode.get409('this package is already present'));
}
for (let errorItem = 0; errorItem < upLinksErrors.length; errorItem++) {
// checking error
// if uplink fails with a status other than 404, we report failure
if (_.isNil(upLinksErrors[errorItem][0]) === false) {
if (upLinksErrors[errorItem][0].status !== 404) {
if (isAllowPublishOffline) {
return resolve();
}
return reject(ErrorCode.get503('one of the uplinks is down, refuse to publish'));
}
}
}
return resolve();
});
});
}
export {
generatePackageTemplate,
normalizePackage,

View File

@ -3,15 +3,15 @@
import _ from 'lodash';
import assert from 'assert';
import async from 'async';
import createError from 'http-errors';
import semver from 'semver';
import Stream from 'stream';
import ProxyStorage from './up-storage';
import Search from './search';
import LocalStorage from './local-storage';
import {ReadTarball} from '@verdaccio/streams';
import ProxyStorage from './up-storage';
import {ErrorCode, normalize_dist_tags, validate_metadata, isObject, DIST_TAGS} from './utils';
import {checkPackageLocal, publishPackage, checkPackageRemote, cleanUpLinksRef} from './storage-utils';
import {setupUpLinks, updateVersionsHiddenUpLink} from './uplink-util';
import {mergeVersions} from './metadata-utils';
import {ErrorCode, normalizeDistTags, validate_metadata, isObject, DIST_TAGS} from './utils';
import type {IStorage, IProxy, IStorageHandler, ProxyList, StringValue} from '../../types';
import type {
Versions,
@ -26,7 +26,6 @@ import type {
import type {IReadTarball, IUploadTarball} from '@verdaccio/streams';
const LoggerApi = require('../lib/logger');
const WHITELIST = ['_rev', 'name', 'versions', DIST_TAGS, 'readme', 'time'];
const getDefaultMetadata = function(name): Package {
const pkgMetadata: Package = {
name,
@ -41,25 +40,22 @@ const getDefaultMetadata = function(name): Package {
return pkgMetadata;
};
/**
* Implements Storage interface
* (same for storage.js, local-storage.js, up-storage.js).
*/
class Storage implements IStorageHandler {
localStorage: IStorage;
config: Config;
logger: Logger;
uplinks: ProxyList;
/**
* @param {*} config
*/
constructor(config: Config) {
this.config = config;
this.uplinks = {};
this._setupUpLinks(this.config);
this.uplinks = setupUpLinks(config);
this.logger = LoggerApi.logger.child();
}
init(config: Config) {
this.localStorage = new LocalStorage(this.config, LoggerApi.logger);
return this.localStorage.getSecret(config);
}
/**
@ -67,106 +63,27 @@ class Storage implements IStorageHandler {
Function checks if package with the same name is available from uplinks.
If it isn't, we create package locally
Used storages: local (write) && uplinks
* @param {*} name
* @param {*} metadata
* @param {*} callback
*/
addPackage(name: string, metadata: any, callback: Function) {
const self = this;
async addPackage(name: string, metadata: any, callback: Function) {
try {
await checkPackageLocal(name, this.localStorage);
await checkPackageRemote(name, this._isAllowPublishOffline(), this._syncUplinksMetadata.bind(this));
await publishPackage(name, metadata, this.localStorage);
callback();
} catch (err) {
callback(err);
}
}
/**
* Check whether a package it is already a local package
* @return {Promise}
*/
const checkPackageLocal = () => {
return new Promise((resolve, reject) => {
this.localStorage.getPackageMetadata(name, (err, results) => {
if (!_.isNil(err) && err.status !== 404) {
return reject(err);
}
if (results) {
return reject(ErrorCode.get409('this package is already present'));
}
return resolve();
});
});
};
/**
* Check whether a package exist in any of the uplinks.
* @return {Promise}
*/
const checkPackageRemote = () => {
return new Promise((resolve, reject) => {
// $FlowFixMe
self._syncUplinksMetadata(name, null, {}, (err, results, err_results) => {
// something weird
if (err && err.status !== 404) {
return reject(err);
}
// checking package
if (results) {
return reject(ErrorCode.get409('this package is already present'));
}
for (let i = 0; i < err_results.length; i++) {
// checking error
// if uplink fails with a status other than 404, we report failure
if (_.isNil(err_results[i][0]) === false) {
if (err_results[i][0].status !== 404) {
if (this.config.publish &&
_.isBoolean(this.config.publish.allow_offline) &&
this.config.publish.allow_offline) {
return resolve();
}
return reject(createError(503, 'one of the uplinks is down, refuse to publish'));
}
}
}
return resolve();
});
});
};
/**
* Add a package to the local database
* @return {Promise}
*/
const publishPackage = () => {
return new Promise((resolve, reject) => {
self.localStorage.addPackage(name, metadata, (err, latest) => {
if (!_.isNull(err)) {
return reject(err);
} else if (!_.isUndefined(latest)) {
Search.add(latest);
}
return resolve();
});
});
};
// NOTE:
// - when we checking package for existance, we ask ALL uplinks
// - when we publishing package, we only publish it to some of them
// so all requests are necessary
checkPackageLocal()
.then(() => {
return checkPackageRemote().then(() => {
return publishPackage().then(() => {
callback();
}, (err) => callback(err));
}, (err) => callback(err));
}, (err) => callback(err));
_isAllowPublishOffline(): boolean {
return typeof this.config.publish !== 'undefined'
&& _.isBoolean(this.config.publish.allow_offline)
&& this.config.publish.allow_offline;
}
/**
* Add a new version of package {name} to a system
Used storages: local (write)
* @param {*} name
* @param {*} version
* @param {*} metadata
* @param {*} tag
* @param {*} callback
*/
addVersion(name: string, version: string, metadata: Version, tag: StringValue, callback: Callback) {
this.localStorage.addVersion(name, version, metadata, tag, callback);
@ -175,9 +92,6 @@ class Storage implements IStorageHandler {
/**
* Tags a package version with a provided tag
Used storages: local (write)
* @param {*} name
* @param {*} tag_hash
* @param {*} callback
*/
mergeTags(name: string, tagHash: MergeTags, callback: Callback) {
this.localStorage.mergeTags(name, tagHash, callback);
@ -186,9 +100,6 @@ class Storage implements IStorageHandler {
/**
* Tags a package version with a provided tag
Used storages: local (write)
* @param {*} name
* @param {*} tag_hash
* @param {*} callback
*/
replaceTags(name: string, tagHash: MergeTags, callback: Callback) {
this.logger.warn('method deprecated');
@ -199,10 +110,6 @@ class Storage implements IStorageHandler {
* Change an existing package (i.e. unpublish one version)
Function changes a package info from local storage and all uplinks with write access./
Used storages: local (write)
* @param {*} name
* @param {*} metadata
* @param {*} revision
* @param {*} callback
*/
changePackage(name: string, metadata: Package, revision: string, callback: Callback) {
this.localStorage.changePackage(name, metadata, revision, callback);
@ -212,8 +119,6 @@ class Storage implements IStorageHandler {
* Remove a package from a system
Function removes a package from local storage
Used storages: local (write)
* @param {*} name
* @param {*} callback
*/
removePackage(name: string, callback: Callback) {
this.localStorage.removePackage(name, callback);
@ -227,10 +132,6 @@ class Storage implements IStorageHandler {
Tarball in question should not be linked to in any existing
versions, i.e. package version should be unpublished first.
Used storage: local (write)
* @param {*} name
* @param {*} filename
* @param {*} revision
* @param {*} callback
*/
removeTarball(name: string, filename: string, revision: string, callback: Callback) {
this.localStorage.removeTarball(name, filename, revision, callback);
@ -240,9 +141,6 @@ class Storage implements IStorageHandler {
* Upload a tarball for {name} package
Function is syncronous and returns a WritableStream
Used storages: local (write)
* @param {*} name
* @param {*} filename
* @return {Stream}
*/
addTarball(name: string, filename: string): IUploadTarball {
return this.localStorage.addTarball(name, filename);
@ -254,9 +152,6 @@ class Storage implements IStorageHandler {
Function tries to read tarball locally, if it fails then it reads package
information in order to figure out where we can get this tarball from
Used storages: local || uplink (just one)
* @param {*} name
* @param {*} filename
* @return {Stream}
*/
getTarball(name: string, filename: string) {
let readStream = new ReadTarball();
@ -270,9 +165,9 @@ class Storage implements IStorageHandler {
// trying local first
// flow: should be IReadTarball
let localStream: any = self.localStorage.getTarball(name, filename);
let is_open = false;
let isOpen = false;
localStream.on('error', (err) => {
if (is_open || err.status !== 404) {
if (isOpen || err.status !== 404) {
return readStream.emit('error', err);
}
@ -303,7 +198,7 @@ class Storage implements IStorageHandler {
readStream.emit('content-length', v);
});
localStream.on('open', function() {
is_open = true;
isOpen = true;
localStream.pipe(readStream);
});
return readStream;
@ -315,10 +210,10 @@ class Storage implements IStorageHandler {
function serveFile(file: DistFile) {
let uplink: any = null;
for (let p in self.uplinks) {
for (let uplinkId in self.uplinks) {
// $FlowFixMe
if (self.uplinks[p].isUplinkValid(file.url)) {
uplink = self.uplinks[p];
if (self.uplinks[uplinkId].isUplinkValid(file.url)) {
uplink = self.uplinks[uplinkId];
}
}
@ -405,28 +300,17 @@ class Storage implements IStorageHandler {
}
this._syncUplinksMetadata(options.name, data, {req: options.req},
function getPackageSynUpLinksCallback(err, result: Package, uplink_errors) {
function getPackageSynUpLinksCallback(err, result: Package, uplinkErrors) {
if (err) {
return options.callback(err);
}
const propertyToKeep = [...WHITELIST];
if (options.keepUpLinkData === true) {
propertyToKeep.push('_uplinks');
}
for (let i in result) {
if (propertyToKeep.indexOf(i) === -1) { // Remove sections like '_uplinks' from response
delete result[i];
}
}
normalize_dist_tags(result);
normalizeDistTags(cleanUpLinksRef(options.keepUpLinkData, result));
// npm can throw if this field doesn't exist
result._attachments = {};
options.callback(null, result, uplink_errors);
options.callback(null, result, uplinkErrors);
});
});
}
@ -572,7 +456,7 @@ class Storage implements IStorageHandler {
if (err || !upLinkResponse) {
// $FlowFixMe
return cb(null, [err || createError(500, 'no data')]);
return cb(null, [err || ErrorCode.get500('no data')]);
}
try {
@ -595,10 +479,10 @@ class Storage implements IStorageHandler {
packageInfo.time = upLinkResponse.time;
}
this._updateVersionsHiddenUpLink(upLinkResponse.versions, upLink);
updateVersionsHiddenUpLink(upLinkResponse.versions, upLink);
try {
Storage._mergeVersions(packageInfo, upLinkResponse, self.config);
mergeVersions(packageInfo, upLinkResponse);
} catch(err) {
self.logger.error({
@ -647,54 +531,6 @@ class Storage implements IStorageHandler {
}
}
}
/**
* Set up the Up Storage for each link.
* @param {Object} config
* @private
*/
_setupUpLinks(config: Config) {
for (let uplinkName in config.uplinks) {
if (Object.prototype.hasOwnProperty.call(config.uplinks, uplinkName)) {
// instance for each up-link definition
const proxy: IProxy = new ProxyStorage(config.uplinks[uplinkName], config);
proxy.upname = uplinkName;
this.uplinks[uplinkName] = proxy;
}
}
}
/**
* Function gets a local info and an info from uplinks and tries to merge it
exported for unit tests only.
* @param {*} local
* @param {*} up
* @param {*} config
* @static
*/
static _mergeVersions(local: Package, up: Package, config: Config) {
// copy new versions to a cache
// NOTE: if a certain version was updated, we can't refresh it reliably
for (let i in up.versions) {
if (_.isNil(local.versions[i])) {
local.versions[i] = up.versions[i];
}
}
for (let i in up[DIST_TAGS]) {
if (local[DIST_TAGS][i] !== up[DIST_TAGS][i]) {
if (!local[DIST_TAGS][i] || semver.lte(local[DIST_TAGS][i], up[DIST_TAGS][i])) {
local[DIST_TAGS][i] = up[DIST_TAGS][i];
}
if (i === 'latest' && local[DIST_TAGS][i] === up[DIST_TAGS][i]) {
// if remote has more fresh package, we should borrow its readme
local.readme = up.readme;
}
}
}
}
}
export default Storage;

107
src/lib/uplink-util.js Normal file
View File

@ -0,0 +1,107 @@
// @flow
import {ErrorCode, isObject, validate_metadata} from './utils';
import ProxyStorage from './up-storage';
import {mergeVersions} from './metadata-utils';
import type {Package, Versions, Config, Logger} from '@verdaccio/types';
import type {IProxy, ProxyList} from '../../types';
/**
* Set up the Up Storage for each link.
*/
export function setupUpLinks(config: Config): ProxyList {
const uplinks: ProxyList = {};
for (let uplinkName in config.uplinks) {
if (Object.prototype.hasOwnProperty.call(config.uplinks, uplinkName)) {
// instance for each up-link definition
const proxy: IProxy = new ProxyStorage(config.uplinks[uplinkName], config);
proxy.upname = uplinkName;
uplinks[uplinkName] = proxy;
}
}
return uplinks;
}
export function updateVersionsHiddenUpLink(versions: Versions, upLink: IProxy) {
for (let i in versions) {
if (Object.prototype.hasOwnProperty.call(versions, i)) {
const version = versions[i];
// holds a "hidden" value to be used by the package storage.
// $FlowFixMe
version[Symbol.for('__verdaccio_uplink')] = upLink.upname;
}
}
}
export function fetchUplinkMetadata(name: string, packageInfo: Package,
options: any, upLink: any, logger: Logger): Promise<any> {
return new Promise(function(resolve, reject) {
const _options = Object.assign({}, options);
const upLinkMeta = packageInfo._uplinks[upLink.upname];
if (isObject(upLinkMeta)) {
const fetched = upLinkMeta.fetched;
// check whether is too soon to ask for metadata
if (fetched && (Date.now() - fetched) < upLink.maxage) {
return resolve(false);
}
_options.etag = upLinkMeta.etag;
}
upLink.getRemoteMetadata(name, _options, function handleUplinkMetadataResponse(err, upLinkResponse, eTag) {
if (err && err.remoteStatus === 304) {
upLinkMeta.fetched = Date.now();
}
if (err || !upLinkResponse) {
// $FlowFixMe
return reject(err || ErrorCode.get500('no data'));
}
try {
validate_metadata(upLinkResponse, name);
} catch(err) {
logger.error({
sub: 'out',
err: err,
}, 'package.json validating error @{!err.message}\n@{err.stack}');
return reject(err);
}
packageInfo._uplinks[upLink.upname] = {
etag: eTag,
fetched: Date.now(),
};
// added to fix verdaccio#73
if ('time' in upLinkResponse) {
packageInfo.time = upLinkResponse.time;
}
updateVersionsHiddenUpLink(upLinkResponse.versions, upLink);
try {
mergeVersions(packageInfo, upLinkResponse);
} catch(err) {
logger.error({
sub: 'out',
err: err,
}, 'package.json parsing error @{!err.message}\n@{err.stack}');
return reject(err);
}
// if we got to this point, assume that the correct package exists
// on the uplink
resolve(true);
});
});
}

View File

@ -247,7 +247,7 @@ function semverSort(listVersions: Array<string>) {
* Flatten arrays of tags.
* @param {*} data
*/
function normalize_dist_tags(pkg: Package) {
export function normalizeDistTags(pkg: Package) {
let sorted;
if (!pkg[DIST_TAGS].latest) {
// overwrite latest with highest known version based on semver sort
@ -345,8 +345,8 @@ const ErrorCode = {
get403: (message: string = 'can\'t use this filename') => {
return createError(403, message);
},
get503: () => {
return createError(500, 'resource temporarily unavailable');
get503: (message: string = 'resource temporarily unavailable') => {
return createError(503, message);
},
get404: (customMessage?: string) => {
return createError(404, customMessage || 'no such package available');
@ -443,7 +443,6 @@ export {
semverSort,
parse_address,
get_version,
normalize_dist_tags,
tagVersion,
combineBaseUrl,
filter_tarball_urls,

View File

@ -5,21 +5,17 @@ import rimraf from 'rimraf';
import configDefault from './partials/config/access';
import Config from '../../src/lib/config';
import Storage from '../../src/lib/storage';
import Auth from '../../src/lib/auth';
import indexAPI from '../../src/api/index';
import endPointAPI from '../../src/api/index';
require('../../src/lib/logger').setup([]);
describe('api with no limited access configuration', () => {
let config;
let storage;
let auth;
let app;
beforeAll(function(done) {
const store = path.join(__dirname, './partials/store/access-storage');
rimraf(store, () => {
rimraf(store, async () => {
const configForTest = _.clone(configDefault);
configForTest.auth = {
htpasswd: {
@ -28,9 +24,7 @@ describe('api with no limited access configuration', () => {
};
configForTest.self_path = store;
config = new Config(configForTest);
storage = new Storage(config);
auth = new Auth(config);
app = indexAPI(config, auth, storage);
app = await endPointAPI(config);
done();
});
});

View File

@ -7,23 +7,19 @@ import configDefault from './partials/config';
import publishMetadata from './partials/publish-api';
import forbiddenPlace from './partials/forbidden-place';
import Config from '../../src/lib/config';
import Storage from '../../src/lib/storage';
import Auth from '../../src/lib/auth';
import indexAPI from '../../src/api/index';
import endPointAPI from '../../src/api/index';
require('../../src/lib/logger').setup([]);
const credentials = { name: 'Jota', password: 'secretPass' };
describe('endpoint unit test', () => {
let config;
let storage;
let auth;
let app;
jest.setTimeout(10000);
beforeAll(function(done) {
const store = path.join(__dirname, './partials/store/test-storage');
rimraf(store, () => {
rimraf(store, async () => {
const configForTest = _.clone(configDefault);
configForTest.auth = {
htpasswd: {
@ -32,9 +28,7 @@ describe('endpoint unit test', () => {
};
configForTest.self_path = store;
config = new Config(configForTest);
storage = new Storage(config);
auth = new Auth(config);
app = indexAPI(config, auth, storage);
app = await endPointAPI(config);
done();
});
});

View File

@ -1,4 +1,4 @@
import verdaccio from '../../src/api/index';
import endPointAPI from '../../src/api/index';
const assert = require('assert');
const express = require('express');
@ -17,9 +17,9 @@ describe('basic system test', () => {
rimraf(__dirname + '/store/test-storage', done);
});
beforeAll(function(done) {
beforeAll(async function(done) {
app.use(verdaccio(config));
app.use(await endPointAPI(config));
server.listen(0, function() {
port = server.address().port;

View File

@ -9,10 +9,10 @@ require('../../src/lib/logger').setup([]);
describe('startServer via API', () => {
describe('startServer launcher', () => {
test('should provide all server data', (done) => {
test('should provide all server data await/async', async (done) => {
const store = path.join(__dirname, 'partials/store');
startServer(config, 6000, store, '1.0.0', 'verdaccio-test',
await startServer(config, 6000, store, '1.0.0', 'verdaccio-test',
(webServer, addrs, pkgName, pkgVersion) => {
expect(webServer).toBeDefined();
expect(addrs).toBeDefined();
@ -27,8 +27,12 @@ describe('startServer via API', () => {
});
});
test('should fails if config is missing', () => {
expect(() => { return startServer() }).toThrow('config file must be an object');
test('should fails if config is missing', async () => {
try {
await startServer();
} catch (e) {
expect(e.message).toEqual('config file must be an object');
}
});
});

View File

@ -33,9 +33,10 @@ let packages = [
];
describe('search', () => {
beforeAll(function() {
beforeAll(async function() {
let config = new Config(config_hash);
this.storage = new Storage(config);
await this.storage.init(config);
Search.configureStorage(this.storage);
packages.map(function(item) {
Search.add(item);

View File

@ -1,6 +1,6 @@
let assert = require('assert');
let semverSort = require('../../src/lib/utils').semverSort;
import Storage from '../../src/lib/storage';
import {mergeVersions} from '../../src/lib/metadata-utils';
require('../../src/lib/logger').setup([]);
@ -12,7 +12,7 @@ describe('Storage._merge_versions versions', () => {
'dist-tags': {},
};
Storage._mergeVersions(pkg, {versions: {a: 2, q: 2}});
mergeVersions(pkg, {versions: {a: 2, q: 2}});
assert.deepEqual(pkg, {
'versions': {a: 1, b: 1, c: 1, q: 2},
@ -26,7 +26,7 @@ describe('Storage._merge_versions versions', () => {
'dist-tags': {q: '1.1.1', w: '2.2.2'},
};
Storage._mergeVersions(pkg, {'dist-tags': {q: '2.2.2', w: '3.3.3', t: '4.4.4'}});
mergeVersions(pkg, {'dist-tags': {q: '2.2.2', w: '3.3.3', t: '4.4.4'}});
assert.deepEqual(pkg, {
'versions': {},
@ -46,7 +46,7 @@ describe('Storage._merge_versions versions', () => {
// against our local 1.1.10, which may end up published as 1.1.3 in the
// future
Storage._mergeVersions(pkg, {'dist-tags':{q:'1.1.2',w:'3.3.3',t:'4.4.4'}})
mergeVersions(pkg, {'dist-tags':{q:'1.1.2',w:'3.3.3',t:'4.4.4'}})
assert.deepEqual(pkg, {
versions: {},

View File

@ -13,22 +13,24 @@ import type {IStorageHandler} from '../../types';
setup(configExample.logs);
const generateStorage = function(): IStorageHandler {
const generateStorage = async function() {
const storageConfig = _.clone(configExample);
const storage = `./unit/partials/store/test-storage-store.spec`;
storageConfig.self_path = __dirname;
storageConfig.storage = storage;
const config: Config = new AppConfig(storageConfig);
const store: IStorageHandler = new Storage(config);
await store.init(config);
return new Storage(config);
return store;
}
describe('StorageTest', () => {
jest.setTimeout(10000);
beforeAll((done)=> {
const storage: IStorageHandler = generateStorage();
beforeAll(async (done)=> {
const storage: IStorageHandler = await generateStorage();
var request = httpMocks.createRequest({
method: 'GET',
url: '/react',
@ -50,14 +52,14 @@ describe('StorageTest', () => {
});
});
test('should be defined', () => {
const storage: IStorageHandler = generateStorage();
test('should be defined', async () => {
const storage: IStorageHandler = await generateStorage();
expect(storage).toBeDefined();
});
test('should fetch from uplink react metadata from nmpjs', (done) => {
const storage: IStorageHandler = generateStorage();
test('should fetch from uplink react metadata from nmpjs', async (done) => {
const storage: IStorageHandler = await generateStorage();
// $FlowFixMe
storage._syncUplinksMetadata('react', null, {}, (err, metadata, errors) => {
@ -66,8 +68,8 @@ describe('StorageTest', () => {
});
});
test('should fails on fetch from uplink metadata from nmpjs', (done) => {
const storage: IStorageHandler = generateStorage();
test('should fails on fetch from uplink metadata from nmpjs', async (done) => {
const storage: IStorageHandler = await generateStorage();
// $FlowFixMe
storage._syncUplinksMetadata('@verdaccio/404', null, {}, (err, metadata, errors) => {

View File

@ -57,10 +57,11 @@ export interface IProxy {
upname: string;
fetchTarball(url: string): IReadTarball;
isUplinkValid(url: string): boolean;
getRemoteMetadata(name: string, options: any, callback: Callback): void;
}
export type ProxyList = {
[key: string]: IProxy | null;
[key: string]: IProxy;
}
export type Utils = {
@ -69,7 +70,7 @@ export type Utils = {
isObject: (value: any) => boolean;
validate_name: (value: any) => boolean;
tag_version: (value: any, version: string, tag: string) => void;
normalize_dist_tags: (pkg: Package) => void;
normalizeDistTags: (pkg: Package) => void;
semverSort: (keys: Array<string>) => Array<string>;
}
@ -78,7 +79,8 @@ export interface IStorageHandler {
localStorage: IStorage;
logger: Logger;
uplinks: ProxyList;
addPackage(name: string, metadata: any, callback: Function): void;
addPackage(name: string, metadata: any, callback: Function): Promise<any>;
init(config: Config): Promise<any>;
addVersion(name: string, version: string, metadata: Version, tag: StringValue, callback: Callback): void;
mergeTags(name: string, tagHash: MergeTags, callback: Callback): void;
replaceTags(name: string, tagHash: MergeTags, callback: Callback): void;
@ -92,7 +94,6 @@ export interface IStorageHandler {
getLocalDatabase(callback: Callback): void;
_syncUplinksMetadata(name: string, packageInfo: Package, options: any, callback: Callback): void;
_updateVersionsHiddenUpLink(versions: Versions, upLink: IProxy): void;
_setupUpLinks(config: Config): void;
}
export interface IStorage {
@ -110,6 +111,7 @@ export interface IStorage {
getTarball(name: string, filename: string): IReadTarball;
getPackageMetadata(name: string, callback: Callback): void;
search(startKey: string, options: any): IUploadTarball;
getSecret(config: Config): Promise<any>;
}
export type $RequestExtend = $Request & {remote_user?: any}

View File

@ -227,9 +227,9 @@
lockfile "1.0.3"
lodash "4.17.4"
"@verdaccio/local-storage@1.0.1":
version "1.0.1"
resolved "https://registry.npmjs.org/@verdaccio/local-storage/-/local-storage-1.0.1.tgz#eb5d3a5e035d302ecf756b68eee7f3c48f32ca88"
"@verdaccio/local-storage@1.0.2":
version "1.0.2"
resolved "https://registry.npmjs.org/@verdaccio/local-storage/-/local-storage-1.0.2.tgz#3cdc2b27ce0496787f0ca36560b8a34b4959164e"
dependencies:
"@verdaccio/file-locking" "0.0.5"
"@verdaccio/streams" "1.0.0"
@ -242,9 +242,9 @@
version "1.0.0"
resolved "https://registry.npmjs.org/@verdaccio/streams/-/streams-1.0.0.tgz#d5d24c6747208728b9fd16b908e3932c3fb1f864"
"@verdaccio/types@2.1.0":
version "2.1.0"
resolved "https://registry.npmjs.org/@verdaccio/types/-/types-2.1.0.tgz#1a0b330f96bc63fbc87391c2b5c625fd3be5da84"
"@verdaccio/types@2.1.1":
version "2.1.1"
resolved "https://registry.npmjs.org/@verdaccio/types/-/types-2.1.1.tgz#d0ff73154a9e389b828df3d5046e26fbb67f3233"
JSONStream@1.3.2, JSONStream@^1.0.4:
version "1.3.2"
@ -9048,9 +9048,9 @@ verdaccio-htpasswd@0.2.0:
bcryptjs "2.4.3"
unix-crypt-td-js "^1.0.0"
verdaccio-memory@1.0.0:
version "1.0.0"
resolved "https://registry.npmjs.org/verdaccio-memory/-/verdaccio-memory-1.0.0.tgz#33f12dd27bfc850602f2245799291ed5ec75414f"
verdaccio-memory@1.0.1:
version "1.0.1"
resolved "https://registry.npmjs.org/verdaccio-memory/-/verdaccio-memory-1.0.1.tgz#1bd49c997145a0c2d8d73836e2714e79d1463a12"
dependencies:
"@verdaccio/streams" "^1.0.0"
http-errors "1.6.3"