mirror of
https://github.com/verdaccio/verdaccio.git
synced 2024-11-08 23:25:51 +01:00
refactor: storage utilities
relocate methods, no logic change
This commit is contained in:
parent
a7aa77ac3a
commit
de61e469ed
37
src/lib/metadata-utils.js
Normal file
37
src/lib/metadata-utils.js
Normal file
@ -0,0 +1,37 @@
|
||||
// @flow
|
||||
|
||||
import semver from 'semver';
|
||||
import _ from 'lodash';
|
||||
import {DIST_TAGS} from './utils';
|
||||
|
||||
import type {Package} from '@verdaccio/types';
|
||||
|
||||
/**
|
||||
* Function gets a local info and an info from uplinks and tries to merge it
|
||||
exported for unit tests only.
|
||||
* @param {*} local
|
||||
* @param {*} up
|
||||
* @param {*} config
|
||||
* @static
|
||||
*/
|
||||
export function mergeVersions(local: Package, up: Package) {
|
||||
// copy new versions to a cache
|
||||
// NOTE: if a certain version was updated, we can't refresh it reliably
|
||||
for (let i in up.versions) {
|
||||
if (_.isNil(local.versions[i])) {
|
||||
local.versions[i] = up.versions[i];
|
||||
}
|
||||
}
|
||||
|
||||
for (let i in up[DIST_TAGS]) {
|
||||
if (local[DIST_TAGS][i] !== up[DIST_TAGS][i]) {
|
||||
if (!local[DIST_TAGS][i] || semver.lte(local[DIST_TAGS][i], up[DIST_TAGS][i])) {
|
||||
local[DIST_TAGS][i] = up[DIST_TAGS][i];
|
||||
}
|
||||
if (i === 'latest' && local[DIST_TAGS][i] === up[DIST_TAGS][i]) {
|
||||
// if remote has more fresh package, we should borrow its readme
|
||||
local.readme = up.readme;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -2,11 +2,11 @@
|
||||
|
||||
import _ from 'lodash';
|
||||
import crypto from 'crypto';
|
||||
import * as Utils from './utils';
|
||||
import {ErrorCode, isObject, normalize_dist_tags} from './utils';
|
||||
import Search from './search';
|
||||
|
||||
import type {
|
||||
Package, Version,
|
||||
} from '@verdaccio/types';
|
||||
import type {Package, Version} from '@verdaccio/types';
|
||||
import type {IStorage} from '../../types';
|
||||
|
||||
const pkgFileName = 'package.json';
|
||||
const fileExist: string = 'EEXISTS';
|
||||
@ -42,7 +42,7 @@ function normalizePackage(pkg: Package) {
|
||||
'time'];
|
||||
|
||||
pkgProperties.forEach((key) => {
|
||||
if (_.isNil(Utils.isObject(pkg[key]))) {
|
||||
if (_.isNil(isObject(pkg[key]))) {
|
||||
pkg[key] = {};
|
||||
}
|
||||
});
|
||||
@ -52,7 +52,7 @@ function normalizePackage(pkg: Package) {
|
||||
}
|
||||
|
||||
// normalize dist-tags
|
||||
Utils.normalize_dist_tags(pkg);
|
||||
normalize_dist_tags(pkg);
|
||||
|
||||
return pkg;
|
||||
}
|
||||
@ -71,6 +71,73 @@ function cleanUpReadme(version: Version): Version {
|
||||
return version;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether a package it is already a local package
|
||||
* @param {*} name
|
||||
* @param {*} localStorage
|
||||
*/
|
||||
export function checkPackageLocal(name: string, localStorage: IStorage): Promise<any> {
|
||||
return new Promise((resolve, reject) => {
|
||||
localStorage.getPackageMetadata(name, (err, results) => {
|
||||
if (!_.isNil(err) && err.status !== 404) {
|
||||
return reject(err);
|
||||
}
|
||||
if (results) {
|
||||
return reject(ErrorCode.get409('this package is already present'));
|
||||
}
|
||||
return resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export function publishPackage(name: string, metadata: any, localStorage: IStorage): Promise<any> {
|
||||
return new Promise((resolve, reject) => {
|
||||
localStorage.addPackage(name, metadata, (err, latest) => {
|
||||
if (!_.isNull(err)) {
|
||||
return reject(err);
|
||||
} else if (!_.isUndefined(latest)) {
|
||||
Search.add(latest);
|
||||
}
|
||||
return resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export function checkPackageRemote(name: string, isAllowPublishOffline: boolean, syncMetadata: Function): Promise<any> {
|
||||
return new Promise((resolve, reject) => {
|
||||
// $FlowFixMe
|
||||
syncMetadata(name, null, {}, (err, packageJsonLocal, upLinksErrors) => {
|
||||
|
||||
// something weird
|
||||
if (err && err.status !== 404) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
// checking package exist already
|
||||
if (_.isNil(packageJsonLocal) === false) {
|
||||
return reject(ErrorCode.get409('this package is already present'));
|
||||
}
|
||||
|
||||
for (let errorItem = 0; errorItem < upLinksErrors.length; errorItem++) {
|
||||
// checking error
|
||||
// if uplink fails with a status other than 404, we report failure
|
||||
if (_.isNil(upLinksErrors[errorItem][0]) === false) {
|
||||
if (upLinksErrors[errorItem][0].status !== 404) {
|
||||
|
||||
if (isAllowPublishOffline) {
|
||||
return resolve();
|
||||
}
|
||||
|
||||
return reject(ErrorCode.get503('one of the uplinks is down, refuse to publish'));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export {
|
||||
generatePackageTemplate,
|
||||
normalizePackage,
|
||||
|
@ -3,14 +3,14 @@
|
||||
import _ from 'lodash';
|
||||
import assert from 'assert';
|
||||
import async from 'async';
|
||||
import createError from 'http-errors';
|
||||
import semver from 'semver';
|
||||
import Stream from 'stream';
|
||||
|
||||
import ProxyStorage from './up-storage';
|
||||
import Search from './search';
|
||||
import LocalStorage from './local-storage';
|
||||
import {ReadTarball} from '@verdaccio/streams';
|
||||
import ProxyStorage from './up-storage';
|
||||
import {checkPackageLocal, publishPackage, checkPackageRemote} from './storage-utils';
|
||||
import {setupUpLinks, updateVersionsHiddenUpLink} from './uplink-util';
|
||||
import {mergeVersions} from './metadata-utils';
|
||||
import {ErrorCode, normalize_dist_tags, validate_metadata, isObject, DIST_TAGS} from './utils';
|
||||
import type {IStorage, IProxy, IStorageHandler, ProxyList, StringValue} from '../../types';
|
||||
import type {
|
||||
@ -52,8 +52,7 @@ class Storage implements IStorageHandler {
|
||||
*/
|
||||
constructor(config: Config) {
|
||||
this.config = config;
|
||||
this.uplinks = {};
|
||||
this._setupUpLinks(this.config);
|
||||
this.uplinks = setupUpLinks(config);
|
||||
this.logger = LoggerApi.logger.child();
|
||||
}
|
||||
|
||||
@ -68,106 +67,27 @@ class Storage implements IStorageHandler {
|
||||
Function checks if package with the same name is available from uplinks.
|
||||
If it isn't, we create package locally
|
||||
Used storages: local (write) && uplinks
|
||||
* @param {*} name
|
||||
* @param {*} metadata
|
||||
* @param {*} callback
|
||||
*/
|
||||
addPackage(name: string, metadata: any, callback: Function) {
|
||||
const self = this;
|
||||
async addPackage(name: string, metadata: any, callback: Function) {
|
||||
try {
|
||||
await checkPackageLocal(name, this.localStorage);
|
||||
await checkPackageRemote(name, this._isAllowPublishOffline(), this._syncUplinksMetadata.bind(this));
|
||||
await publishPackage(name, metadata, this.localStorage);
|
||||
callback();
|
||||
} catch (err) {
|
||||
callback(err);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether a package it is already a local package
|
||||
* @return {Promise}
|
||||
*/
|
||||
const checkPackageLocal = () => {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.localStorage.getPackageMetadata(name, (err, results) => {
|
||||
if (!_.isNil(err) && err.status !== 404) {
|
||||
return reject(err);
|
||||
}
|
||||
if (results) {
|
||||
return reject(ErrorCode.get409('this package is already present'));
|
||||
}
|
||||
return resolve();
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Check whether a package exist in any of the uplinks.
|
||||
* @return {Promise}
|
||||
*/
|
||||
const checkPackageRemote = () => {
|
||||
return new Promise((resolve, reject) => {
|
||||
// $FlowFixMe
|
||||
self._syncUplinksMetadata(name, null, {}, (err, results, err_results) => {
|
||||
// something weird
|
||||
if (err && err.status !== 404) {
|
||||
return reject(err);
|
||||
}
|
||||
// checking package
|
||||
if (results) {
|
||||
return reject(ErrorCode.get409('this package is already present'));
|
||||
}
|
||||
for (let i = 0; i < err_results.length; i++) {
|
||||
// checking error
|
||||
// if uplink fails with a status other than 404, we report failure
|
||||
if (_.isNil(err_results[i][0]) === false) {
|
||||
if (err_results[i][0].status !== 404) {
|
||||
if (this.config.publish &&
|
||||
_.isBoolean(this.config.publish.allow_offline) &&
|
||||
this.config.publish.allow_offline) {
|
||||
return resolve();
|
||||
}
|
||||
return reject(createError(503, 'one of the uplinks is down, refuse to publish'));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return resolve();
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Add a package to the local database
|
||||
* @return {Promise}
|
||||
*/
|
||||
const publishPackage = () => {
|
||||
return new Promise((resolve, reject) => {
|
||||
self.localStorage.addPackage(name, metadata, (err, latest) => {
|
||||
if (!_.isNull(err)) {
|
||||
return reject(err);
|
||||
} else if (!_.isUndefined(latest)) {
|
||||
Search.add(latest);
|
||||
}
|
||||
return resolve();
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
// NOTE:
|
||||
// - when we checking package for existance, we ask ALL uplinks
|
||||
// - when we publishing package, we only publish it to some of them
|
||||
// so all requests are necessary
|
||||
checkPackageLocal()
|
||||
.then(() => {
|
||||
return checkPackageRemote().then(() => {
|
||||
return publishPackage().then(() => {
|
||||
callback();
|
||||
}, (err) => callback(err));
|
||||
}, (err) => callback(err));
|
||||
}, (err) => callback(err));
|
||||
_isAllowPublishOffline(): boolean {
|
||||
return typeof this.config.publish !== 'undefined'
|
||||
&& _.isBoolean(this.config.publish.allow_offline)
|
||||
&& this.config.publish.allow_offline;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new version of package {name} to a system
|
||||
Used storages: local (write)
|
||||
* @param {*} name
|
||||
* @param {*} version
|
||||
* @param {*} metadata
|
||||
* @param {*} tag
|
||||
* @param {*} callback
|
||||
*/
|
||||
addVersion(name: string, version: string, metadata: Version, tag: StringValue, callback: Callback) {
|
||||
this.localStorage.addVersion(name, version, metadata, tag, callback);
|
||||
@ -176,9 +96,6 @@ class Storage implements IStorageHandler {
|
||||
/**
|
||||
* Tags a package version with a provided tag
|
||||
Used storages: local (write)
|
||||
* @param {*} name
|
||||
* @param {*} tag_hash
|
||||
* @param {*} callback
|
||||
*/
|
||||
mergeTags(name: string, tagHash: MergeTags, callback: Callback) {
|
||||
this.localStorage.mergeTags(name, tagHash, callback);
|
||||
@ -187,9 +104,6 @@ class Storage implements IStorageHandler {
|
||||
/**
|
||||
* Tags a package version with a provided tag
|
||||
Used storages: local (write)
|
||||
* @param {*} name
|
||||
* @param {*} tag_hash
|
||||
* @param {*} callback
|
||||
*/
|
||||
replaceTags(name: string, tagHash: MergeTags, callback: Callback) {
|
||||
this.logger.warn('method deprecated');
|
||||
@ -200,10 +114,6 @@ class Storage implements IStorageHandler {
|
||||
* Change an existing package (i.e. unpublish one version)
|
||||
Function changes a package info from local storage and all uplinks with write access./
|
||||
Used storages: local (write)
|
||||
* @param {*} name
|
||||
* @param {*} metadata
|
||||
* @param {*} revision
|
||||
* @param {*} callback
|
||||
*/
|
||||
changePackage(name: string, metadata: Package, revision: string, callback: Callback) {
|
||||
this.localStorage.changePackage(name, metadata, revision, callback);
|
||||
@ -213,8 +123,6 @@ class Storage implements IStorageHandler {
|
||||
* Remove a package from a system
|
||||
Function removes a package from local storage
|
||||
Used storages: local (write)
|
||||
* @param {*} name
|
||||
* @param {*} callback
|
||||
*/
|
||||
removePackage(name: string, callback: Callback) {
|
||||
this.localStorage.removePackage(name, callback);
|
||||
@ -228,10 +136,6 @@ class Storage implements IStorageHandler {
|
||||
Tarball in question should not be linked to in any existing
|
||||
versions, i.e. package version should be unpublished first.
|
||||
Used storage: local (write)
|
||||
* @param {*} name
|
||||
* @param {*} filename
|
||||
* @param {*} revision
|
||||
* @param {*} callback
|
||||
*/
|
||||
removeTarball(name: string, filename: string, revision: string, callback: Callback) {
|
||||
this.localStorage.removeTarball(name, filename, revision, callback);
|
||||
@ -241,9 +145,6 @@ class Storage implements IStorageHandler {
|
||||
* Upload a tarball for {name} package
|
||||
Function is syncronous and returns a WritableStream
|
||||
Used storages: local (write)
|
||||
* @param {*} name
|
||||
* @param {*} filename
|
||||
* @return {Stream}
|
||||
*/
|
||||
addTarball(name: string, filename: string): IUploadTarball {
|
||||
return this.localStorage.addTarball(name, filename);
|
||||
@ -255,9 +156,6 @@ class Storage implements IStorageHandler {
|
||||
Function tries to read tarball locally, if it fails then it reads package
|
||||
information in order to figure out where we can get this tarball from
|
||||
Used storages: local || uplink (just one)
|
||||
* @param {*} name
|
||||
* @param {*} filename
|
||||
* @return {Stream}
|
||||
*/
|
||||
getTarball(name: string, filename: string) {
|
||||
let readStream = new ReadTarball();
|
||||
@ -573,7 +471,7 @@ class Storage implements IStorageHandler {
|
||||
|
||||
if (err || !upLinkResponse) {
|
||||
// $FlowFixMe
|
||||
return cb(null, [err || createError(500, 'no data')]);
|
||||
return cb(null, [err || ErrorCode.get500('no data')]);
|
||||
}
|
||||
|
||||
try {
|
||||
@ -596,10 +494,10 @@ class Storage implements IStorageHandler {
|
||||
packageInfo.time = upLinkResponse.time;
|
||||
}
|
||||
|
||||
this._updateVersionsHiddenUpLink(upLinkResponse.versions, upLink);
|
||||
updateVersionsHiddenUpLink(upLinkResponse.versions, upLink);
|
||||
|
||||
try {
|
||||
Storage._mergeVersions(packageInfo, upLinkResponse, self.config);
|
||||
mergeVersions(packageInfo, upLinkResponse);
|
||||
|
||||
} catch(err) {
|
||||
self.logger.error({
|
||||
@ -648,54 +546,6 @@ class Storage implements IStorageHandler {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set up the Up Storage for each link.
|
||||
* @param {Object} config
|
||||
* @private
|
||||
*/
|
||||
_setupUpLinks(config: Config) {
|
||||
for (let uplinkName in config.uplinks) {
|
||||
if (Object.prototype.hasOwnProperty.call(config.uplinks, uplinkName)) {
|
||||
// instance for each up-link definition
|
||||
const proxy: IProxy = new ProxyStorage(config.uplinks[uplinkName], config);
|
||||
proxy.upname = uplinkName;
|
||||
|
||||
this.uplinks[uplinkName] = proxy;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Function gets a local info and an info from uplinks and tries to merge it
|
||||
exported for unit tests only.
|
||||
* @param {*} local
|
||||
* @param {*} up
|
||||
* @param {*} config
|
||||
* @static
|
||||
*/
|
||||
static _mergeVersions(local: Package, up: Package, config: Config) {
|
||||
// copy new versions to a cache
|
||||
// NOTE: if a certain version was updated, we can't refresh it reliably
|
||||
for (let i in up.versions) {
|
||||
if (_.isNil(local.versions[i])) {
|
||||
local.versions[i] = up.versions[i];
|
||||
}
|
||||
}
|
||||
|
||||
for (let i in up[DIST_TAGS]) {
|
||||
if (local[DIST_TAGS][i] !== up[DIST_TAGS][i]) {
|
||||
if (!local[DIST_TAGS][i] || semver.lte(local[DIST_TAGS][i], up[DIST_TAGS][i])) {
|
||||
local[DIST_TAGS][i] = up[DIST_TAGS][i];
|
||||
}
|
||||
if (i === 'latest' && local[DIST_TAGS][i] === up[DIST_TAGS][i]) {
|
||||
// if remote has more fresh package, we should borrow its readme
|
||||
local.readme = up.readme;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export default Storage;
|
||||
|
107
src/lib/uplink-util.js
Normal file
107
src/lib/uplink-util.js
Normal file
@ -0,0 +1,107 @@
|
||||
// @flow
|
||||
|
||||
import {ErrorCode, isObject, validate_metadata} from './utils';
|
||||
import ProxyStorage from './up-storage';
|
||||
import {mergeVersions} from './metadata-utils';
|
||||
|
||||
import type {Package, Versions, Config, Logger} from '@verdaccio/types';
|
||||
import type {IProxy, ProxyList} from '../../types';
|
||||
|
||||
/**
|
||||
* Set up the Up Storage for each link.
|
||||
*/
|
||||
export function setupUpLinks(config: Config): ProxyList {
|
||||
const uplinks: ProxyList = {};
|
||||
|
||||
for (let uplinkName in config.uplinks) {
|
||||
if (Object.prototype.hasOwnProperty.call(config.uplinks, uplinkName)) {
|
||||
// instance for each up-link definition
|
||||
const proxy: IProxy = new ProxyStorage(config.uplinks[uplinkName], config);
|
||||
proxy.upname = uplinkName;
|
||||
|
||||
uplinks[uplinkName] = proxy;
|
||||
}
|
||||
}
|
||||
|
||||
return uplinks;
|
||||
}
|
||||
|
||||
export function updateVersionsHiddenUpLink(versions: Versions, upLink: IProxy) {
|
||||
for (let i in versions) {
|
||||
if (Object.prototype.hasOwnProperty.call(versions, i)) {
|
||||
const version = versions[i];
|
||||
|
||||
// holds a "hidden" value to be used by the package storage.
|
||||
// $FlowFixMe
|
||||
version[Symbol.for('__verdaccio_uplink')] = upLink.upname;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function fetchUplinkMetadata(name: string, packageInfo: Package,
|
||||
options: any, upLink: any, logger: Logger): Promise<any> {
|
||||
|
||||
return new Promise(function(resolve, reject) {
|
||||
const _options = Object.assign({}, options);
|
||||
const upLinkMeta = packageInfo._uplinks[upLink.upname];
|
||||
|
||||
if (isObject(upLinkMeta)) {
|
||||
|
||||
const fetched = upLinkMeta.fetched;
|
||||
|
||||
// check whether is too soon to ask for metadata
|
||||
if (fetched && (Date.now() - fetched) < upLink.maxage) {
|
||||
return resolve(false);
|
||||
}
|
||||
|
||||
_options.etag = upLinkMeta.etag;
|
||||
}
|
||||
|
||||
upLink.getRemoteMetadata(name, _options, function handleUplinkMetadataResponse(err, upLinkResponse, eTag) {
|
||||
if (err && err.remoteStatus === 304) {
|
||||
upLinkMeta.fetched = Date.now();
|
||||
}
|
||||
|
||||
if (err || !upLinkResponse) {
|
||||
// $FlowFixMe
|
||||
return reject(err || ErrorCode.get500('no data'));
|
||||
}
|
||||
|
||||
try {
|
||||
validate_metadata(upLinkResponse, name);
|
||||
} catch(err) {
|
||||
logger.error({
|
||||
sub: 'out',
|
||||
err: err,
|
||||
}, 'package.json validating error @{!err.message}\n@{err.stack}');
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
packageInfo._uplinks[upLink.upname] = {
|
||||
etag: eTag,
|
||||
fetched: Date.now(),
|
||||
};
|
||||
|
||||
// added to fix verdaccio#73
|
||||
if ('time' in upLinkResponse) {
|
||||
packageInfo.time = upLinkResponse.time;
|
||||
}
|
||||
|
||||
updateVersionsHiddenUpLink(upLinkResponse.versions, upLink);
|
||||
|
||||
try {
|
||||
mergeVersions(packageInfo, upLinkResponse);
|
||||
} catch(err) {
|
||||
logger.error({
|
||||
sub: 'out',
|
||||
err: err,
|
||||
}, 'package.json parsing error @{!err.message}\n@{err.stack}');
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
// if we got to this point, assume that the correct package exists
|
||||
// on the uplink
|
||||
resolve(true);
|
||||
});
|
||||
});
|
||||
}
|
@ -345,8 +345,8 @@ const ErrorCode = {
|
||||
get403: (message: string = 'can\'t use this filename') => {
|
||||
return createError(403, message);
|
||||
},
|
||||
get503: () => {
|
||||
return createError(500, 'resource temporarily unavailable');
|
||||
get503: (message: string = 'resource temporarily unavailable') => {
|
||||
return createError(503, message);
|
||||
},
|
||||
get404: (customMessage?: string) => {
|
||||
return createError(404, customMessage || 'no such package available');
|
||||
|
@ -1,6 +1,6 @@
|
||||
let assert = require('assert');
|
||||
let semverSort = require('../../src/lib/utils').semverSort;
|
||||
import Storage from '../../src/lib/storage';
|
||||
import {mergeVersions} from '../../src/lib/metadata-utils';
|
||||
|
||||
require('../../src/lib/logger').setup([]);
|
||||
|
||||
@ -12,7 +12,7 @@ describe('Storage._merge_versions versions', () => {
|
||||
'dist-tags': {},
|
||||
};
|
||||
|
||||
Storage._mergeVersions(pkg, {versions: {a: 2, q: 2}});
|
||||
mergeVersions(pkg, {versions: {a: 2, q: 2}});
|
||||
|
||||
assert.deepEqual(pkg, {
|
||||
'versions': {a: 1, b: 1, c: 1, q: 2},
|
||||
@ -26,7 +26,7 @@ describe('Storage._merge_versions versions', () => {
|
||||
'dist-tags': {q: '1.1.1', w: '2.2.2'},
|
||||
};
|
||||
|
||||
Storage._mergeVersions(pkg, {'dist-tags': {q: '2.2.2', w: '3.3.3', t: '4.4.4'}});
|
||||
mergeVersions(pkg, {'dist-tags': {q: '2.2.2', w: '3.3.3', t: '4.4.4'}});
|
||||
|
||||
assert.deepEqual(pkg, {
|
||||
'versions': {},
|
||||
@ -46,7 +46,7 @@ describe('Storage._merge_versions versions', () => {
|
||||
// against our local 1.1.10, which may end up published as 1.1.3 in the
|
||||
// future
|
||||
|
||||
Storage._mergeVersions(pkg, {'dist-tags':{q:'1.1.2',w:'3.3.3',t:'4.4.4'}})
|
||||
mergeVersions(pkg, {'dist-tags':{q:'1.1.2',w:'3.3.3',t:'4.4.4'}})
|
||||
|
||||
assert.deepEqual(pkg, {
|
||||
versions: {},
|
||||
|
@ -57,10 +57,11 @@ export interface IProxy {
|
||||
upname: string;
|
||||
fetchTarball(url: string): IReadTarball;
|
||||
isUplinkValid(url: string): boolean;
|
||||
getRemoteMetadata(name: string, options: any, callback: Callback): void;
|
||||
}
|
||||
|
||||
export type ProxyList = {
|
||||
[key: string]: IProxy | null;
|
||||
[key: string]: IProxy;
|
||||
}
|
||||
|
||||
export type Utils = {
|
||||
@ -78,7 +79,7 @@ export interface IStorageHandler {
|
||||
localStorage: IStorage;
|
||||
logger: Logger;
|
||||
uplinks: ProxyList;
|
||||
addPackage(name: string, metadata: any, callback: Function): void;
|
||||
addPackage(name: string, metadata: any, callback: Function): Promise<any>;
|
||||
init(config: Config): Promise<any>;
|
||||
addVersion(name: string, version: string, metadata: Version, tag: StringValue, callback: Callback): void;
|
||||
mergeTags(name: string, tagHash: MergeTags, callback: Callback): void;
|
||||
@ -93,7 +94,6 @@ export interface IStorageHandler {
|
||||
getLocalDatabase(callback: Callback): void;
|
||||
_syncUplinksMetadata(name: string, packageInfo: Package, options: any, callback: Callback): void;
|
||||
_updateVersionsHiddenUpLink(versions: Versions, upLink: IProxy): void;
|
||||
_setupUpLinks(config: Config): void;
|
||||
}
|
||||
|
||||
export interface IStorage {
|
||||
|
Loading…
Reference in New Issue
Block a user