* Additional helpers to MenuModule
* Gzip signature (.gz) * Switch to sha-256 vs sha1 for internal file hashes * Nearly complete callback / scan update support for scanFile() * Fix data input issue after performing upload * Support 'sz' recv (uploads)
This commit is contained in:
@@ -278,6 +278,13 @@ function getDefaultConfig() {
|
||||
exts : [ 'rar' ],
|
||||
handler : '7Zip',
|
||||
desc : 'RAR Archive',
|
||||
},
|
||||
gzip : {
|
||||
sig : '1f8b',
|
||||
offset : 0,
|
||||
exts : [ 'gz' ],
|
||||
handler : '7Zip',
|
||||
desc : 'Gzip Archive',
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -294,7 +301,7 @@ function getDefaultConfig() {
|
||||
],
|
||||
recvCmd : 'rz', // Avail on Debian/Ubuntu based systems as the package "lrzsz"
|
||||
recvArgs : [
|
||||
'--zmodem', '--binary', '--restricted', // dumps to CWD which is set to {uploadDir}
|
||||
'--zmodem', '--binary', '--restricted', '--keep-uppercase', // dumps to CWD which is set to {uploadDir}
|
||||
],
|
||||
// :TODO: can we not just use --escape ?
|
||||
escapeTelnet : true, // set to true to escape Telnet codes such as IAC
|
||||
|
||||
@@ -260,7 +260,7 @@ const DB_INIT_TABLE = {
|
||||
`CREATE TABLE IF NOT EXISTS file (
|
||||
file_id INTEGER PRIMARY KEY,
|
||||
area_tag VARCHAR NOT NULL,
|
||||
file_sha1 VARCHAR NOT NULL,
|
||||
file_sha256 VARCHAR NOT NULL,
|
||||
file_name, /* FTS @ file_fts */
|
||||
storage_tag VARCHAR NOT NULL,
|
||||
desc, /* FTS @ file_fts */
|
||||
|
||||
@@ -160,14 +160,14 @@ function getFileEntryPath(fileEntry) {
|
||||
}
|
||||
}
|
||||
|
||||
function getExistingFileEntriesBySha1(sha1, cb) {
|
||||
function getExistingFileEntriesBySha256(sha256, cb) {
|
||||
const entries = [];
|
||||
|
||||
FileDb.each(
|
||||
`SELECT file_id, area_tag
|
||||
FROM file
|
||||
WHERE file_sha1=?;`,
|
||||
[ sha1 ],
|
||||
WHERE file_sha256=?;`,
|
||||
[ sha256 ],
|
||||
(err, fileRow) => {
|
||||
if(fileRow) {
|
||||
entries.push({
|
||||
@@ -237,14 +237,38 @@ function attemptSetEstimatedReleaseDate(fileEntry) {
|
||||
}
|
||||
}
|
||||
|
||||
function populateFileEntryWithArchive(fileEntry, filePath, archiveType, cb) {
|
||||
const archiveUtil = ArchiveUtil.getInstance();
|
||||
function populateFileEntryWithArchive(fileEntry, filePath, stepInfo, iterator, cb) {
|
||||
const archiveUtil = ArchiveUtil.getInstance();
|
||||
const archiveType = fileEntry.meta.archive_type; // we set this previous to populateFileEntryWithArchive()
|
||||
|
||||
async.waterfall(
|
||||
[
|
||||
function getArchiveFileList(callback) {
|
||||
archiveUtil.listEntries(filePath, archiveType, (err, entries) => {
|
||||
return callback(null, entries || []); // ignore any errors here
|
||||
function getArchiveFileList(callback) {
|
||||
stepInfo.step = 'archive_list_start';
|
||||
|
||||
iterator(err => {
|
||||
if(err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
archiveUtil.listEntries(filePath, archiveType, (err, entries) => {
|
||||
if(err) {
|
||||
stepInfo.step = 'archive_list_failed';
|
||||
} else {
|
||||
stepInfo.step = 'archive_list_finish';
|
||||
stepInfo.archiveEntries = entries || [];
|
||||
}
|
||||
|
||||
iterator(iterErr => {
|
||||
return callback( iterErr, entries || [] ); // ignore original |err| here
|
||||
});
|
||||
});
|
||||
});
|
||||
},
|
||||
function processDescFilesStart(entries, callback) {
|
||||
stepInfo.step = 'desc_files_start';
|
||||
iterator(err => {
|
||||
return callback(err, entries);
|
||||
});
|
||||
},
|
||||
function extractDescFiles(entries, callback) {
|
||||
@@ -320,7 +344,11 @@ function populateFileEntryWithArchive(fileEntry, filePath, archiveType, cb) {
|
||||
function attemptReleaseYearEstimation(callback) {
|
||||
attemptSetEstimatedReleaseDate(fileEntry);
|
||||
return callback(null);
|
||||
}
|
||||
},
|
||||
function processDescFilesFinish(callback) {
|
||||
stepInfo.step = 'desc_files_finish';
|
||||
return iterator(callback);
|
||||
},
|
||||
],
|
||||
err => {
|
||||
return cb(err);
|
||||
@@ -328,7 +356,7 @@ function populateFileEntryWithArchive(fileEntry, filePath, archiveType, cb) {
|
||||
);
|
||||
}
|
||||
|
||||
function populateFileEntryNonArchive(fileEntry, filePath, archiveType, cb) {
|
||||
function populateFileEntryNonArchive(fileEntry, filePath, stepInfo, iterator, cb) {
|
||||
// :TODO: implement me!
|
||||
return cb(null);
|
||||
}
|
||||
@@ -352,11 +380,17 @@ function updateFileEntry(fileEntry, filePath, cb) {
|
||||
|
||||
}
|
||||
|
||||
function scanFile(filePath, options, cb) {
|
||||
|
||||
if(_.isFunction(options) && !cb) {
|
||||
cb = options;
|
||||
options = {};
|
||||
const HASH_NAMES = [ 'sha1', 'sha256', 'md5', 'crc32' ];
|
||||
|
||||
function scanFile(filePath, options, iterator, cb) {
|
||||
|
||||
if(3 === arguments.length && _.isFunction(iterator)) {
|
||||
cb = iterator;
|
||||
iterator = null;
|
||||
} else if(2 === arguments.length && _.isFunction(options)) {
|
||||
cb = options;
|
||||
iterator = null;
|
||||
options = {};
|
||||
}
|
||||
|
||||
const fileEntry = new FileEntry({
|
||||
@@ -367,42 +401,96 @@ function scanFile(filePath, options, cb) {
|
||||
storageTag : options.storageTag,
|
||||
});
|
||||
|
||||
const stepInfo = {
|
||||
filePath : filePath,
|
||||
fileName : paths.basename(filePath),
|
||||
};
|
||||
|
||||
function callIter(next) {
|
||||
if(iterator) {
|
||||
return iterator(stepInfo, next);
|
||||
} else {
|
||||
return next(null);
|
||||
}
|
||||
}
|
||||
|
||||
function readErrorCallIter(origError, next) {
|
||||
stepInfo.step = 'read_error';
|
||||
stepInfo.error = origError.message;
|
||||
|
||||
callIter( () => {
|
||||
return next(origError);
|
||||
});
|
||||
}
|
||||
|
||||
async.waterfall(
|
||||
[
|
||||
function startScan(callback) {
|
||||
fs.stat(filePath, (err, stats) => {
|
||||
if(err) {
|
||||
return readErrorCallIter(err, callback);
|
||||
}
|
||||
|
||||
stepInfo.step = 'start';
|
||||
stepInfo.byteSize = fileEntry.meta.byte_size = stats.size;
|
||||
|
||||
return callIter(callback);
|
||||
});
|
||||
},
|
||||
function processPhysicalFileGeneric(callback) {
|
||||
let byteSize = 0;
|
||||
const sha1 = crypto.createHash('sha1');
|
||||
const sha256 = crypto.createHash('sha256');
|
||||
const md5 = crypto.createHash('md5');
|
||||
const crc32 = new CRC32();
|
||||
|
||||
stepInfo.bytesProcessed = 0;
|
||||
|
||||
const hashes = {
|
||||
sha1 : crypto.createHash('sha1'),
|
||||
sha256 : crypto.createHash('sha256'),
|
||||
md5 : crypto.createHash('md5'),
|
||||
crc32 : new CRC32(),
|
||||
};
|
||||
|
||||
const stream = fs.createReadStream(filePath);
|
||||
|
||||
stream.on('data', data => {
|
||||
byteSize += data.length;
|
||||
stream.pause(); // until iterator compeltes
|
||||
|
||||
sha1.update(data);
|
||||
sha256.update(data);
|
||||
md5.update(data);
|
||||
crc32.update(data);
|
||||
stepInfo.bytesProcessed += data.length;
|
||||
stepInfo.step = 'hash_update';
|
||||
|
||||
callIter(err => {
|
||||
if(err) {
|
||||
stream.destroy(); // cancel read
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
async.each( HASH_NAMES, (hashName, nextHash) => {
|
||||
hashes[hashName].update(data);
|
||||
return nextHash(null);
|
||||
}, () => {
|
||||
return stream.resume();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
stream.on('end', () => {
|
||||
fileEntry.meta.byte_size = byteSize;
|
||||
fileEntry.meta.byte_size = stepInfo.bytesProcessed;
|
||||
|
||||
// sha-1 is in basic file entry
|
||||
fileEntry.fileSha1 = sha1.digest('hex');
|
||||
async.each(HASH_NAMES, (hashName, nextHash) => {
|
||||
if('sha256' === hashName) {
|
||||
stepInfo.sha256 = fileEntry.fileSha256 = hashes.sha256.digest('hex');
|
||||
} else if('sha1' === hashName || 'md5' === hashName) {
|
||||
stepInfo[hashName] = fileEntry.meta[`file_${hashName}`] = hashes[hashName].digest('hex');
|
||||
} else if('crc32' === hashName) {
|
||||
stepInfo.crc32 = fileEntry.meta.crc32 = hashes.crc32.finalize().toString(16);
|
||||
}
|
||||
|
||||
// others are meta
|
||||
fileEntry.meta.file_sha256 = sha256.digest('hex');
|
||||
fileEntry.meta.file_md5 = md5.digest('hex');
|
||||
fileEntry.meta.file_crc32 = crc32.finalize().toString(16);
|
||||
|
||||
return callback(null);
|
||||
return nextHash(null);
|
||||
}, () => {
|
||||
stepInfo.step = 'hash_finish';
|
||||
return callIter(callback);
|
||||
});
|
||||
});
|
||||
|
||||
stream.on('error', err => {
|
||||
return callback(err);
|
||||
return readErrorCallIter(err, callback);
|
||||
});
|
||||
},
|
||||
function processPhysicalFileByType(callback) {
|
||||
@@ -413,9 +501,9 @@ function scanFile(filePath, options, cb) {
|
||||
// save this off
|
||||
fileEntry.meta.archive_type = archiveType;
|
||||
|
||||
populateFileEntryWithArchive(fileEntry, filePath, archiveType, err => {
|
||||
populateFileEntryWithArchive(fileEntry, filePath, stepInfo, callIter, err => {
|
||||
if(err) {
|
||||
populateFileEntryNonArchive(fileEntry, filePath, err => {
|
||||
populateFileEntryNonArchive(fileEntry, filePath, stepInfo, callIter, err => {
|
||||
// :TODO: log err
|
||||
return callback(null); // ignore err
|
||||
});
|
||||
@@ -424,7 +512,7 @@ function scanFile(filePath, options, cb) {
|
||||
}
|
||||
});
|
||||
} else {
|
||||
populateFileEntryNonArchive(fileEntry, filePath, err => {
|
||||
populateFileEntryNonArchive(fileEntry, filePath, stepInfo, callIter, err => {
|
||||
// :TODO: log err
|
||||
return callback(null); // ignore err
|
||||
});
|
||||
@@ -432,92 +520,21 @@ function scanFile(filePath, options, cb) {
|
||||
});
|
||||
},
|
||||
function fetchExistingEntry(callback) {
|
||||
getExistingFileEntriesBySha1(fileEntry.fileSha1, (err, existingEntries) => {
|
||||
return callback(err, existingEntries);
|
||||
getExistingFileEntriesBySha256(fileEntry.fileSha256, (err, dupeEntries) => {
|
||||
return callback(err, dupeEntries);
|
||||
});
|
||||
}
|
||||
],
|
||||
(err, existingEntries) => {
|
||||
(err, dupeEntries) => {
|
||||
if(err) {
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
return cb(null, fileEntry, existingEntries);
|
||||
return cb(null, fileEntry, dupeEntries);
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
/*
|
||||
function addOrUpdateFileEntry(areaInfo, storageLocation, fileName, options, cb) {
|
||||
|
||||
const fileEntry = new FileEntry({
|
||||
areaTag : areaInfo.areaTag,
|
||||
meta : options.meta,
|
||||
hashTags : options.hashTags, // Set() or Array
|
||||
fileName : fileName,
|
||||
storageTag : storageLocation.storageTag,
|
||||
});
|
||||
|
||||
const filePath = paths.join(storageLocation.dir, fileName);
|
||||
|
||||
async.waterfall(
|
||||
[
|
||||
function processPhysicalFile(callback) {
|
||||
let byteSize = 0;
|
||||
const sha1 = crypto.createHash('sha1');
|
||||
const sha256 = crypto.createHash('sha256');
|
||||
const md5 = crypto.createHash('md5');
|
||||
const crc32 = new CRC32();
|
||||
|
||||
const stream = fs.createReadStream(filePath);
|
||||
|
||||
stream.on('data', data => {
|
||||
byteSize += data.length;
|
||||
|
||||
sha1.update(data);
|
||||
sha256.update(data);
|
||||
md5.update(data);
|
||||
crc32.update(data);
|
||||
});
|
||||
|
||||
stream.on('end', () => {
|
||||
fileEntry.meta.byte_size = byteSize;
|
||||
|
||||
// sha-1 is in basic file entry
|
||||
fileEntry.fileSha1 = sha1.digest('hex');
|
||||
|
||||
// others are meta
|
||||
fileEntry.meta.file_sha256 = sha256.digest('hex');
|
||||
fileEntry.meta.file_md5 = md5.digest('hex');
|
||||
fileEntry.meta.file_crc32 = crc32.finalize().toString(16);
|
||||
|
||||
return callback(null);
|
||||
});
|
||||
|
||||
stream.on('error', err => {
|
||||
return callback(err);
|
||||
});
|
||||
},
|
||||
function fetchExistingEntry(callback) {
|
||||
getExistingFileEntriesBySha1(fileEntry.fileSha1, (err, existingEntries) => {
|
||||
return callback(err, existingEntries);
|
||||
});
|
||||
},
|
||||
function addOrUpdate(existingEntries, callback) {
|
||||
if(existingEntries.length > 0) {
|
||||
|
||||
} else {
|
||||
return addNewFileEntry(fileEntry, filePath, callback);
|
||||
}
|
||||
},
|
||||
],
|
||||
err => {
|
||||
return cb(err);
|
||||
}
|
||||
);
|
||||
}
|
||||
*/
|
||||
|
||||
function scanFileAreaForChanges(areaInfo, cb) {
|
||||
const storageLocations = getAreaStorageLocations(areaInfo);
|
||||
|
||||
@@ -551,13 +568,13 @@ function scanFileAreaForChanges(areaInfo, cb) {
|
||||
areaTag : areaInfo.areaTag,
|
||||
storageTag : storageLoc.storageTag
|
||||
},
|
||||
(err, fileEntry, existingEntries) => {
|
||||
(err, fileEntry, dupeEntries) => {
|
||||
if(err) {
|
||||
// :TODO: Log me!!!
|
||||
return nextFile(null); // try next anyway
|
||||
}
|
||||
|
||||
if(existingEntries.length > 0) {
|
||||
if(dupeEntries.length > 0) {
|
||||
// :TODO: Handle duplidates -- what to do here???
|
||||
} else {
|
||||
addNewFileEntry(fileEntry, fullPath, err => {
|
||||
|
||||
@@ -12,7 +12,7 @@ const _ = require('lodash');
|
||||
const paths = require('path');
|
||||
|
||||
const FILE_TABLE_MEMBERS = [
|
||||
'file_id', 'area_tag', 'file_sha1', 'file_name', 'storage_tag',
|
||||
'file_id', 'area_tag', 'file_sha256', 'file_name', 'storage_tag',
|
||||
'desc', 'desc_long', 'upload_timestamp'
|
||||
];
|
||||
|
||||
@@ -21,7 +21,7 @@ const FILE_WELL_KNOWN_META = {
|
||||
upload_by_username : null,
|
||||
upload_by_user_id : null,
|
||||
file_md5 : null,
|
||||
file_sha256 : null,
|
||||
file_sha1 : null,
|
||||
file_crc32 : null,
|
||||
est_release_year : (y) => parseInt(y) || new Date().getFullYear(),
|
||||
dl_count : (d) => parseInt(d) || 0,
|
||||
@@ -100,9 +100,9 @@ module.exports = class FileEntry {
|
||||
},
|
||||
function storeEntry(callback) {
|
||||
fileDb.run(
|
||||
`REPLACE INTO file (area_tag, file_sha1, file_name, storage_tag, desc, desc_long, upload_timestamp)
|
||||
`REPLACE INTO file (area_tag, file_sha256, file_name, storage_tag, desc, desc_long, upload_timestamp)
|
||||
VALUES(?, ?, ?, ?, ?, ?, ?);`,
|
||||
[ self.areaTag, self.fileSha1, self.fileName, self.storageTag, self.desc, self.descLong, getISOTimestampString() ],
|
||||
[ self.areaTag, self.fileSha256, self.fileName, self.storageTag, self.desc, self.descLong, getISOTimestampString() ],
|
||||
function inserted(err) { // use non-arrow func for 'this' scope / lastID
|
||||
if(!err) {
|
||||
self.fileId = this.lastID;
|
||||
|
||||
@@ -1,17 +1,18 @@
|
||||
/* jslint node: true */
|
||||
'use strict';
|
||||
|
||||
var PluginModule = require('./plugin_module.js').PluginModule;
|
||||
var theme = require('./theme.js');
|
||||
var ansi = require('./ansi_term.js');
|
||||
var ViewController = require('./view_controller.js').ViewController;
|
||||
var menuUtil = require('./menu_util.js');
|
||||
var Config = require('./config.js').config;
|
||||
const PluginModule = require('./plugin_module.js').PluginModule;
|
||||
const theme = require('./theme.js');
|
||||
const ansi = require('./ansi_term.js');
|
||||
const ViewController = require('./view_controller.js').ViewController;
|
||||
const menuUtil = require('./menu_util.js');
|
||||
const Config = require('./config.js').config;
|
||||
const stringFormat = require('../core/string_format.js');
|
||||
|
||||
// deps
|
||||
var async = require('async');
|
||||
var assert = require('assert');
|
||||
var _ = require('lodash');
|
||||
const async = require('async');
|
||||
const assert = require('assert');
|
||||
const _ = require('lodash');
|
||||
|
||||
exports.MenuModule = MenuModule;
|
||||
|
||||
@@ -386,4 +387,28 @@ MenuModule.prototype.prepViewControllerWithArt = function(name, formId, options,
|
||||
return this.prepViewController(name, formId, artData, cb);
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
MenuModule.prototype.setViewText = function(formName, mciId, text) {
|
||||
const view = this.viewControllers[formName].getView(mciId);
|
||||
if(view) {
|
||||
view.setText(text);
|
||||
}
|
||||
};
|
||||
|
||||
MenuModule.prototype.updateCustomViewTextsWithFilter = function(formName, startId, fmtObj, filter) {
|
||||
let textView;
|
||||
let customMciId = startId;
|
||||
const config = this.menuConfig.config;
|
||||
|
||||
while( (textView = this.viewControllers[formName].getView(customMciId)) ) {
|
||||
const key = `${formName}InfoFormat${customMciId}`;
|
||||
const format = config[key];
|
||||
|
||||
if(format && (!filter || filter.find(f => format.indexOf(f) > - 1))) {
|
||||
textView.setText(stringFormat(format, fmtObj));
|
||||
}
|
||||
|
||||
++customMciId;
|
||||
}
|
||||
};
|
||||
@@ -195,6 +195,17 @@ function getPredefinedMCIValue(client, code) {
|
||||
//
|
||||
// :TODO: System stat log for total ul/dl, total ul/dl bytes
|
||||
|
||||
// :TODO: PT - Messages posted *today* (Obv/2)
|
||||
// :TODO: NT - New users today (Obv/2)
|
||||
// :TODO: CT - Calls *today* (Obv/2)
|
||||
// :TODO: TF - Total files on the system (Obv/2)
|
||||
// :TODO: FT - Files uploaded/added *today* (Obv/2)
|
||||
// :TODO: DD - Files downloaded *today* (iNiQUiTY)
|
||||
// :TODO: TP - total message/posts on the system (Obv/2)
|
||||
// :TODO: LC - name of last caller to system (Obv/2)
|
||||
// :TODO: TZ - Average *system* post/call ratio (iNiQUiTY)
|
||||
|
||||
|
||||
//
|
||||
// Special handling for XY
|
||||
//
|
||||
|
||||
@@ -439,12 +439,12 @@ function TelnetClient(input, output) {
|
||||
};
|
||||
|
||||
this.setTemporaryDataHandler = function(handler) {
|
||||
this.input.removeAllListeners();
|
||||
this.input.removeAllListeners('data');
|
||||
this.input.on('data', handler);
|
||||
};
|
||||
|
||||
this.restoreDataHandler = function() {
|
||||
this.input.removeAllListeners();
|
||||
this.input.removeAllListeners('data');
|
||||
this.input.on('data', this.dataHandler);
|
||||
};
|
||||
|
||||
|
||||
@@ -351,7 +351,13 @@ exports.getModule = class TransferFileModule extends MenuModule {
|
||||
});
|
||||
|
||||
this.client.setTemporaryDataHandler(data => {
|
||||
externalProc.write(data);
|
||||
// needed for things like sz/rz
|
||||
if(external.escapeTelnet) {
|
||||
const tmp = data.toString('binary').replace(/\xff{2}/g, '\xff'); // de-escape
|
||||
externalProc.write(new Buffer(tmp, 'binary'));
|
||||
} else {
|
||||
externalProc.write(data);
|
||||
}
|
||||
});
|
||||
|
||||
//this.client.term.output.pipe(externalProc);
|
||||
@@ -359,7 +365,7 @@ exports.getModule = class TransferFileModule extends MenuModule {
|
||||
externalProc.on('data', data => {
|
||||
// needed for things like sz/rz
|
||||
if(external.escapeTelnet) {
|
||||
const tmp = data.toString('binary').replace(/\xff/g, '\xff\xff');
|
||||
const tmp = data.toString('binary').replace(/\xff/g, '\xff\xff'); // escape
|
||||
this.client.term.rawWrite(new Buffer(tmp, 'binary'));
|
||||
} else {
|
||||
this.client.term.rawWrite(data);
|
||||
@@ -484,7 +490,6 @@ exports.getModule = class TransferFileModule extends MenuModule {
|
||||
StatLog.incrementSystemStat('ul_total_count', uploadCount);
|
||||
StatLog.incrementSystemStat('ul_total_bytes', uploadBytes);
|
||||
|
||||
|
||||
return cb(null);
|
||||
});
|
||||
}
|
||||
@@ -556,12 +561,16 @@ exports.getModule = class TransferFileModule extends MenuModule {
|
||||
self.client.log.warn( { error : err.message }, 'File transfer error');
|
||||
}
|
||||
|
||||
return self.prevMenu();
|
||||
/*
|
||||
|
||||
// Wait for a key press - attempt to avoid issues with some terminals after xfer
|
||||
// :TODO: display ANSI if it exists else prompt -- look @ Obv/2 for filename
|
||||
self.client.term.pipeWrite('|00|07\nTransfer(s) complete. Press a key\n');
|
||||
self.client.waitForKeyPress( () => {
|
||||
return self.prevMenu();
|
||||
});
|
||||
*/
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
/* jslint node: true */
|
||||
'use strict';
|
||||
|
||||
let uuid = require('node-uuid');
|
||||
let assert = require('assert');
|
||||
let _ = require('lodash');
|
||||
let createHash = require('crypto').createHash;
|
||||
const createHash = require('crypto').createHash;
|
||||
|
||||
exports.createNamedUUID = createNamedUUID;
|
||||
|
||||
@@ -13,9 +10,9 @@ function createNamedUUID(namespaceUuid, key) {
|
||||
// v5 UUID generation code based on the work here:
|
||||
// https://github.com/download13/uuidv5/blob/master/uuid.js
|
||||
//
|
||||
if(!Buffer.isBuffer(namespaceUuid)) {
|
||||
namespaceUuid = new Buffer(namespaceUuid);
|
||||
}
|
||||
if(!Buffer.isBuffer(namespaceUuid)) {
|
||||
namespaceUuid = new Buffer(namespaceUuid);
|
||||
}
|
||||
|
||||
if(!Buffer.isBuffer(key)) {
|
||||
key = new Buffer(key);
|
||||
|
||||
Reference in New Issue
Block a user