repo
stringlengths 5
67
| path
stringlengths 4
116
| func_name
stringlengths 0
58
| original_string
stringlengths 52
373k
| language
stringclasses 1
value | code
stringlengths 52
373k
| code_tokens
list | docstring
stringlengths 4
11.8k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 86
226
| partition
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|---|---|
rossmartin/cordova-uglify
|
after_prepare/uglify.js
|
run
|
function run() {
platforms.forEach(function(platform) {
var wwwPath;
switch (platform) {
case 'android':
wwwPath = path.join(platformPath, platform, 'assets', 'www');
if (!fs.existsSync(wwwPath)) {
wwwPath = path.join(platformPath, platform, 'app', 'src', 'main', 'assets', 'www');
}
break;
case 'ios':
case 'browser':
case 'wp8':
case 'windows':
wwwPath = path.join(platformPath, platform, 'www');
break;
default:
console.log('this hook only supports android, ios, wp8, windows, and browser currently');
return;
}
processFolders(wwwPath);
});
}
|
javascript
|
function run() {
platforms.forEach(function(platform) {
var wwwPath;
switch (platform) {
case 'android':
wwwPath = path.join(platformPath, platform, 'assets', 'www');
if (!fs.existsSync(wwwPath)) {
wwwPath = path.join(platformPath, platform, 'app', 'src', 'main', 'assets', 'www');
}
break;
case 'ios':
case 'browser':
case 'wp8':
case 'windows':
wwwPath = path.join(platformPath, platform, 'www');
break;
default:
console.log('this hook only supports android, ios, wp8, windows, and browser currently');
return;
}
processFolders(wwwPath);
});
}
|
[
"function",
"run",
"(",
")",
"{",
"platforms",
".",
"forEach",
"(",
"function",
"(",
"platform",
")",
"{",
"var",
"wwwPath",
";",
"switch",
"(",
"platform",
")",
"{",
"case",
"'android'",
":",
"wwwPath",
"=",
"path",
".",
"join",
"(",
"platformPath",
",",
"platform",
",",
"'assets'",
",",
"'www'",
")",
";",
"if",
"(",
"!",
"fs",
".",
"existsSync",
"(",
"wwwPath",
")",
")",
"{",
"wwwPath",
"=",
"path",
".",
"join",
"(",
"platformPath",
",",
"platform",
",",
"'app'",
",",
"'src'",
",",
"'main'",
",",
"'assets'",
",",
"'www'",
")",
";",
"}",
"break",
";",
"case",
"'ios'",
":",
"case",
"'browser'",
":",
"case",
"'wp8'",
":",
"case",
"'windows'",
":",
"wwwPath",
"=",
"path",
".",
"join",
"(",
"platformPath",
",",
"platform",
",",
"'www'",
")",
";",
"break",
";",
"default",
":",
"console",
".",
"log",
"(",
"'this hook only supports android, ios, wp8, windows, and browser currently'",
")",
";",
"return",
";",
"}",
"processFolders",
"(",
"wwwPath",
")",
";",
"}",
")",
";",
"}"
] |
Run compression for all specified platforms.
@return {undefined}
|
[
"Run",
"compression",
"for",
"all",
"specified",
"platforms",
"."
] |
b9beccc8956d3215e6bd104c4d57d1f5a894bf68
|
https://github.com/rossmartin/cordova-uglify/blob/b9beccc8956d3215e6bd104c4d57d1f5a894bf68/after_prepare/uglify.js#L40-L66
|
train
|
rossmartin/cordova-uglify
|
after_prepare/uglify.js
|
processFolders
|
function processFolders(wwwPath) {
foldersToProcess.forEach(function(folder) {
processFiles(path.join(wwwPath, folder));
});
}
|
javascript
|
function processFolders(wwwPath) {
foldersToProcess.forEach(function(folder) {
processFiles(path.join(wwwPath, folder));
});
}
|
[
"function",
"processFolders",
"(",
"wwwPath",
")",
"{",
"foldersToProcess",
".",
"forEach",
"(",
"function",
"(",
"folder",
")",
"{",
"processFiles",
"(",
"path",
".",
"join",
"(",
"wwwPath",
",",
"folder",
")",
")",
";",
"}",
")",
";",
"}"
] |
Processes defined folders.
@param {string} wwwPath - Path to www directory
@return {undefined}
|
[
"Processes",
"defined",
"folders",
"."
] |
b9beccc8956d3215e6bd104c4d57d1f5a894bf68
|
https://github.com/rossmartin/cordova-uglify/blob/b9beccc8956d3215e6bd104c4d57d1f5a894bf68/after_prepare/uglify.js#L73-L77
|
train
|
rossmartin/cordova-uglify
|
after_prepare/uglify.js
|
compress
|
function compress(file) {
var ext = path.extname(file),
res,
source,
result;
switch (ext) {
case '.js':
console.log('uglifying js file ' + file);
res = ngAnnotate(String(fs.readFileSync(file, 'utf8')), {
add: true
});
result = UglifyJS.minify(res.src, hookConfig.uglifyJsOptions);
fs.writeFileSync(file, result.code, 'utf8'); // overwrite the original unminified file
break;
case '.css':
console.log('minifying css file ' + file);
source = fs.readFileSync(file, 'utf8');
result = cssMinifier.minify(source);
fs.writeFileSync(file, result.styles, 'utf8'); // overwrite the original unminified file
break;
default:
console.log('encountered a ' + ext + ' file, not compressing it');
break;
}
}
|
javascript
|
function compress(file) {
var ext = path.extname(file),
res,
source,
result;
switch (ext) {
case '.js':
console.log('uglifying js file ' + file);
res = ngAnnotate(String(fs.readFileSync(file, 'utf8')), {
add: true
});
result = UglifyJS.minify(res.src, hookConfig.uglifyJsOptions);
fs.writeFileSync(file, result.code, 'utf8'); // overwrite the original unminified file
break;
case '.css':
console.log('minifying css file ' + file);
source = fs.readFileSync(file, 'utf8');
result = cssMinifier.minify(source);
fs.writeFileSync(file, result.styles, 'utf8'); // overwrite the original unminified file
break;
default:
console.log('encountered a ' + ext + ' file, not compressing it');
break;
}
}
|
[
"function",
"compress",
"(",
"file",
")",
"{",
"var",
"ext",
"=",
"path",
".",
"extname",
"(",
"file",
")",
",",
"res",
",",
"source",
",",
"result",
";",
"switch",
"(",
"ext",
")",
"{",
"case",
"'.js'",
":",
"console",
".",
"log",
"(",
"'uglifying js file '",
"+",
"file",
")",
";",
"res",
"=",
"ngAnnotate",
"(",
"String",
"(",
"fs",
".",
"readFileSync",
"(",
"file",
",",
"'utf8'",
")",
")",
",",
"{",
"add",
":",
"true",
"}",
")",
";",
"result",
"=",
"UglifyJS",
".",
"minify",
"(",
"res",
".",
"src",
",",
"hookConfig",
".",
"uglifyJsOptions",
")",
";",
"fs",
".",
"writeFileSync",
"(",
"file",
",",
"result",
".",
"code",
",",
"'utf8'",
")",
";",
"break",
";",
"case",
"'.css'",
":",
"console",
".",
"log",
"(",
"'minifying css file '",
"+",
"file",
")",
";",
"source",
"=",
"fs",
".",
"readFileSync",
"(",
"file",
",",
"'utf8'",
")",
";",
"result",
"=",
"cssMinifier",
".",
"minify",
"(",
"source",
")",
";",
"fs",
".",
"writeFileSync",
"(",
"file",
",",
"result",
".",
"styles",
",",
"'utf8'",
")",
";",
"break",
";",
"default",
":",
"console",
".",
"log",
"(",
"'encountered a '",
"+",
"ext",
"+",
"' file, not compressing it'",
")",
";",
"break",
";",
"}",
"}"
] |
Compresses file.
@param {string} file - File path
@return {undefined}
|
[
"Compresses",
"file",
"."
] |
b9beccc8956d3215e6bd104c4d57d1f5a894bf68
|
https://github.com/rossmartin/cordova-uglify/blob/b9beccc8956d3215e6bd104c4d57d1f5a894bf68/after_prepare/uglify.js#L117-L146
|
train
|
mozilla/makedrive
|
lib/fs-utils.js
|
hasAttr
|
function hasAttr(fs, path, attr, callback) {
fs.getxattr(path, attr, function(err, attrVal) {
// File doesn't exist locally at all
if(err && err.code === 'ENOENT') {
return callback(null, false);
}
// Deal with unexpected error
if(err && err.code !== 'ENOATTR') {
return callback(err);
}
callback(null, !!attrVal);
});
}
|
javascript
|
function hasAttr(fs, path, attr, callback) {
fs.getxattr(path, attr, function(err, attrVal) {
// File doesn't exist locally at all
if(err && err.code === 'ENOENT') {
return callback(null, false);
}
// Deal with unexpected error
if(err && err.code !== 'ENOATTR') {
return callback(err);
}
callback(null, !!attrVal);
});
}
|
[
"function",
"hasAttr",
"(",
"fs",
",",
"path",
",",
"attr",
",",
"callback",
")",
"{",
"fs",
".",
"getxattr",
"(",
"path",
",",
"attr",
",",
"function",
"(",
"err",
",",
"attrVal",
")",
"{",
"if",
"(",
"err",
"&&",
"err",
".",
"code",
"===",
"'ENOENT'",
")",
"{",
"return",
"callback",
"(",
"null",
",",
"false",
")",
";",
"}",
"if",
"(",
"err",
"&&",
"err",
".",
"code",
"!==",
"'ENOATTR'",
")",
"{",
"return",
"callback",
"(",
"err",
")",
";",
"}",
"callback",
"(",
"null",
",",
"!",
"!",
"attrVal",
")",
";",
"}",
")",
";",
"}"
] |
See if a given path a) exists, and whether it is marked with an xattr.
|
[
"See",
"if",
"a",
"given",
"path",
"a",
")",
"exists",
"and",
"whether",
"it",
"is",
"marked",
"with",
"an",
"xattr",
"."
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/lib/fs-utils.js#L7-L21
|
train
|
mozilla/makedrive
|
lib/fs-utils.js
|
removeAttr
|
function removeAttr(fs, pathOrFd, attr, isFd, callback) {
var removeFn = 'fremovexattr';
if(isFd !== true) {
callback = isFd;
removeFn = 'removexattr';
}
fs[removeFn](pathOrFd, attr, function(err) {
if(err && err.code !== 'ENOATTR') {
return callback(err);
}
callback();
});
}
|
javascript
|
function removeAttr(fs, pathOrFd, attr, isFd, callback) {
var removeFn = 'fremovexattr';
if(isFd !== true) {
callback = isFd;
removeFn = 'removexattr';
}
fs[removeFn](pathOrFd, attr, function(err) {
if(err && err.code !== 'ENOATTR') {
return callback(err);
}
callback();
});
}
|
[
"function",
"removeAttr",
"(",
"fs",
",",
"pathOrFd",
",",
"attr",
",",
"isFd",
",",
"callback",
")",
"{",
"var",
"removeFn",
"=",
"'fremovexattr'",
";",
"if",
"(",
"isFd",
"!==",
"true",
")",
"{",
"callback",
"=",
"isFd",
";",
"removeFn",
"=",
"'removexattr'",
";",
"}",
"fs",
"[",
"removeFn",
"]",
"(",
"pathOrFd",
",",
"attr",
",",
"function",
"(",
"err",
")",
"{",
"if",
"(",
"err",
"&&",
"err",
".",
"code",
"!==",
"'ENOATTR'",
")",
"{",
"return",
"callback",
"(",
"err",
")",
";",
"}",
"callback",
"(",
")",
";",
"}",
")",
";",
"}"
] |
Remove the metadata from a path or file descriptor
|
[
"Remove",
"the",
"metadata",
"from",
"a",
"path",
"or",
"file",
"descriptor"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/lib/fs-utils.js#L24-L39
|
train
|
mozilla/makedrive
|
lib/fs-utils.js
|
getAttr
|
function getAttr(fs, pathOrFd, attr, isFd, callback) {
var getFn = 'fgetxattr';
if(isFd !== true) {
callback = isFd;
getFn = 'getxattr';
}
fs[getFn](pathOrFd, attr, function(err, value) {
if(err && err.code !== 'ENOATTR') {
return callback(err);
}
callback(null, value);
});
}
|
javascript
|
function getAttr(fs, pathOrFd, attr, isFd, callback) {
var getFn = 'fgetxattr';
if(isFd !== true) {
callback = isFd;
getFn = 'getxattr';
}
fs[getFn](pathOrFd, attr, function(err, value) {
if(err && err.code !== 'ENOATTR') {
return callback(err);
}
callback(null, value);
});
}
|
[
"function",
"getAttr",
"(",
"fs",
",",
"pathOrFd",
",",
"attr",
",",
"isFd",
",",
"callback",
")",
"{",
"var",
"getFn",
"=",
"'fgetxattr'",
";",
"if",
"(",
"isFd",
"!==",
"true",
")",
"{",
"callback",
"=",
"isFd",
";",
"getFn",
"=",
"'getxattr'",
";",
"}",
"fs",
"[",
"getFn",
"]",
"(",
"pathOrFd",
",",
"attr",
",",
"function",
"(",
"err",
",",
"value",
")",
"{",
"if",
"(",
"err",
"&&",
"err",
".",
"code",
"!==",
"'ENOATTR'",
")",
"{",
"return",
"callback",
"(",
"err",
")",
";",
"}",
"callback",
"(",
"null",
",",
"value",
")",
";",
"}",
")",
";",
"}"
] |
Get the metadata for a path or file descriptor
|
[
"Get",
"the",
"metadata",
"for",
"a",
"path",
"or",
"file",
"descriptor"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/lib/fs-utils.js#L42-L57
|
train
|
mozilla/makedrive
|
lib/fs-utils.js
|
forceCopy
|
function forceCopy(fs, oldPath, newPath, callback) {
fs.unlink(newPath, function(err) {
if(err && err.code !== 'ENOENT') {
return callback(err);
}
fs.readFile(oldPath, function(err, buf) {
if(err) {
return callback(err);
}
fs.writeFile(newPath, buf, callback);
});
});
}
|
javascript
|
function forceCopy(fs, oldPath, newPath, callback) {
fs.unlink(newPath, function(err) {
if(err && err.code !== 'ENOENT') {
return callback(err);
}
fs.readFile(oldPath, function(err, buf) {
if(err) {
return callback(err);
}
fs.writeFile(newPath, buf, callback);
});
});
}
|
[
"function",
"forceCopy",
"(",
"fs",
",",
"oldPath",
",",
"newPath",
",",
"callback",
")",
"{",
"fs",
".",
"unlink",
"(",
"newPath",
",",
"function",
"(",
"err",
")",
"{",
"if",
"(",
"err",
"&&",
"err",
".",
"code",
"!==",
"'ENOENT'",
")",
"{",
"return",
"callback",
"(",
"err",
")",
";",
"}",
"fs",
".",
"readFile",
"(",
"oldPath",
",",
"function",
"(",
"err",
",",
"buf",
")",
"{",
"if",
"(",
"err",
")",
"{",
"return",
"callback",
"(",
"err",
")",
";",
"}",
"fs",
".",
"writeFile",
"(",
"newPath",
",",
"buf",
",",
"callback",
")",
";",
"}",
")",
";",
"}",
")",
";",
"}"
] |
copy oldPath to newPath, deleting newPath if it exists
|
[
"copy",
"oldPath",
"to",
"newPath",
"deleting",
"newPath",
"if",
"it",
"exists"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/lib/fs-utils.js#L60-L74
|
train
|
mozilla/makedrive
|
lib/fs-utils.js
|
isPathUnsynced
|
function isPathUnsynced(fs, path, callback) {
hasAttr(fs, path, constants.attributes.unsynced, callback);
}
|
javascript
|
function isPathUnsynced(fs, path, callback) {
hasAttr(fs, path, constants.attributes.unsynced, callback);
}
|
[
"function",
"isPathUnsynced",
"(",
"fs",
",",
"path",
",",
"callback",
")",
"{",
"hasAttr",
"(",
"fs",
",",
"path",
",",
"constants",
".",
"attributes",
".",
"unsynced",
",",
"callback",
")",
";",
"}"
] |
See if a given path a) exists, and whether it is marked unsynced.
|
[
"See",
"if",
"a",
"given",
"path",
"a",
")",
"exists",
"and",
"whether",
"it",
"is",
"marked",
"unsynced",
"."
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/lib/fs-utils.js#L77-L79
|
train
|
mozilla/makedrive
|
lib/fs-utils.js
|
removeUnsynced
|
function removeUnsynced(fs, path, callback) {
removeAttr(fs, path, constants.attributes.unsynced, callback);
}
|
javascript
|
function removeUnsynced(fs, path, callback) {
removeAttr(fs, path, constants.attributes.unsynced, callback);
}
|
[
"function",
"removeUnsynced",
"(",
"fs",
",",
"path",
",",
"callback",
")",
"{",
"removeAttr",
"(",
"fs",
",",
"path",
",",
"constants",
".",
"attributes",
".",
"unsynced",
",",
"callback",
")",
";",
"}"
] |
Remove the unsynced metadata from a path
|
[
"Remove",
"the",
"unsynced",
"metadata",
"from",
"a",
"path"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/lib/fs-utils.js#L82-L84
|
train
|
mozilla/makedrive
|
lib/fs-utils.js
|
setUnsynced
|
function setUnsynced(fs, path, callback) {
fs.setxattr(path, constants.attributes.unsynced, Date.now(), callback);
}
|
javascript
|
function setUnsynced(fs, path, callback) {
fs.setxattr(path, constants.attributes.unsynced, Date.now(), callback);
}
|
[
"function",
"setUnsynced",
"(",
"fs",
",",
"path",
",",
"callback",
")",
"{",
"fs",
".",
"setxattr",
"(",
"path",
",",
"constants",
".",
"attributes",
".",
"unsynced",
",",
"Date",
".",
"now",
"(",
")",
",",
"callback",
")",
";",
"}"
] |
Set the unsynced metadata for a path
|
[
"Set",
"the",
"unsynced",
"metadata",
"for",
"a",
"path"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/lib/fs-utils.js#L90-L92
|
train
|
mozilla/makedrive
|
lib/fs-utils.js
|
getUnsynced
|
function getUnsynced(fs, path, callback) {
getAttr(fs, path, constants.attributes.unsynced, callback);
}
|
javascript
|
function getUnsynced(fs, path, callback) {
getAttr(fs, path, constants.attributes.unsynced, callback);
}
|
[
"function",
"getUnsynced",
"(",
"fs",
",",
"path",
",",
"callback",
")",
"{",
"getAttr",
"(",
"fs",
",",
"path",
",",
"constants",
".",
"attributes",
".",
"unsynced",
",",
"callback",
")",
";",
"}"
] |
Get the unsynced metadata for a path
|
[
"Get",
"the",
"unsynced",
"metadata",
"for",
"a",
"path"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/lib/fs-utils.js#L98-L100
|
train
|
mozilla/makedrive
|
lib/fs-utils.js
|
removeChecksum
|
function removeChecksum(fs, path, callback) {
removeAttr(fs, path, constants.attributes.checksum, callback);
}
|
javascript
|
function removeChecksum(fs, path, callback) {
removeAttr(fs, path, constants.attributes.checksum, callback);
}
|
[
"function",
"removeChecksum",
"(",
"fs",
",",
"path",
",",
"callback",
")",
"{",
"removeAttr",
"(",
"fs",
",",
"path",
",",
"constants",
".",
"attributes",
".",
"checksum",
",",
"callback",
")",
";",
"}"
] |
Remove the Checksum metadata from a path
|
[
"Remove",
"the",
"Checksum",
"metadata",
"from",
"a",
"path"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/lib/fs-utils.js#L106-L108
|
train
|
mozilla/makedrive
|
lib/fs-utils.js
|
setChecksum
|
function setChecksum(fs, path, checksum, callback) {
fs.setxattr(path, constants.attributes.checksum, checksum, callback);
}
|
javascript
|
function setChecksum(fs, path, checksum, callback) {
fs.setxattr(path, constants.attributes.checksum, checksum, callback);
}
|
[
"function",
"setChecksum",
"(",
"fs",
",",
"path",
",",
"checksum",
",",
"callback",
")",
"{",
"fs",
".",
"setxattr",
"(",
"path",
",",
"constants",
".",
"attributes",
".",
"checksum",
",",
"checksum",
",",
"callback",
")",
";",
"}"
] |
Set the Checksum metadata for a path
|
[
"Set",
"the",
"Checksum",
"metadata",
"for",
"a",
"path"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/lib/fs-utils.js#L114-L116
|
train
|
mozilla/makedrive
|
lib/fs-utils.js
|
getChecksum
|
function getChecksum(fs, path, callback) {
getAttr(fs, path, constants.attributes.checksum, callback);
}
|
javascript
|
function getChecksum(fs, path, callback) {
getAttr(fs, path, constants.attributes.checksum, callback);
}
|
[
"function",
"getChecksum",
"(",
"fs",
",",
"path",
",",
"callback",
")",
"{",
"getAttr",
"(",
"fs",
",",
"path",
",",
"constants",
".",
"attributes",
".",
"checksum",
",",
"callback",
")",
";",
"}"
] |
Get the Checksum metadata for a path
|
[
"Get",
"the",
"Checksum",
"metadata",
"for",
"a",
"path"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/lib/fs-utils.js#L122-L124
|
train
|
mozilla/makedrive
|
lib/fs-utils.js
|
isPathPartial
|
function isPathPartial(fs, path, callback) {
hasAttr(fs, path, constants.attributes.partial, callback);
}
|
javascript
|
function isPathPartial(fs, path, callback) {
hasAttr(fs, path, constants.attributes.partial, callback);
}
|
[
"function",
"isPathPartial",
"(",
"fs",
",",
"path",
",",
"callback",
")",
"{",
"hasAttr",
"(",
"fs",
",",
"path",
",",
"constants",
".",
"attributes",
".",
"partial",
",",
"callback",
")",
";",
"}"
] |
See if a given path a) exists, and whether it is marked partial.
|
[
"See",
"if",
"a",
"given",
"path",
"a",
")",
"exists",
"and",
"whether",
"it",
"is",
"marked",
"partial",
"."
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/lib/fs-utils.js#L130-L132
|
train
|
mozilla/makedrive
|
lib/fs-utils.js
|
removePartial
|
function removePartial(fs, path, callback) {
removeAttr(fs, path, constants.attributes.partial, callback);
}
|
javascript
|
function removePartial(fs, path, callback) {
removeAttr(fs, path, constants.attributes.partial, callback);
}
|
[
"function",
"removePartial",
"(",
"fs",
",",
"path",
",",
"callback",
")",
"{",
"removeAttr",
"(",
"fs",
",",
"path",
",",
"constants",
".",
"attributes",
".",
"partial",
",",
"callback",
")",
";",
"}"
] |
Remove the partial metadata from a path
|
[
"Remove",
"the",
"partial",
"metadata",
"from",
"a",
"path"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/lib/fs-utils.js#L135-L137
|
train
|
mozilla/makedrive
|
lib/fs-utils.js
|
setPartial
|
function setPartial(fs, path, nodeCount, callback) {
fs.setxattr(path, constants.attributes.partial, nodeCount, callback);
}
|
javascript
|
function setPartial(fs, path, nodeCount, callback) {
fs.setxattr(path, constants.attributes.partial, nodeCount, callback);
}
|
[
"function",
"setPartial",
"(",
"fs",
",",
"path",
",",
"nodeCount",
",",
"callback",
")",
"{",
"fs",
".",
"setxattr",
"(",
"path",
",",
"constants",
".",
"attributes",
".",
"partial",
",",
"nodeCount",
",",
"callback",
")",
";",
"}"
] |
Set the partial metadata for a path
|
[
"Set",
"the",
"partial",
"metadata",
"for",
"a",
"path"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/lib/fs-utils.js#L143-L145
|
train
|
mozilla/makedrive
|
lib/fs-utils.js
|
getPartial
|
function getPartial(fs, path, callback) {
getAttr(fs, path, constants.attributes.partial, callback);
}
|
javascript
|
function getPartial(fs, path, callback) {
getAttr(fs, path, constants.attributes.partial, callback);
}
|
[
"function",
"getPartial",
"(",
"fs",
",",
"path",
",",
"callback",
")",
"{",
"getAttr",
"(",
"fs",
",",
"path",
",",
"constants",
".",
"attributes",
".",
"partial",
",",
"callback",
")",
";",
"}"
] |
Get the partial metadata for a path
|
[
"Get",
"the",
"partial",
"metadata",
"for",
"a",
"path"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/lib/fs-utils.js#L151-L153
|
train
|
mozilla/makedrive
|
lib/fs-utils.js
|
setPathsToSync
|
function setPathsToSync(fs, path, pathsToSync, callback) {
fs.setxattr(path, constants.attributes.pathsToSync, pathsToSync, callback);
}
|
javascript
|
function setPathsToSync(fs, path, pathsToSync, callback) {
fs.setxattr(path, constants.attributes.pathsToSync, pathsToSync, callback);
}
|
[
"function",
"setPathsToSync",
"(",
"fs",
",",
"path",
",",
"pathsToSync",
",",
"callback",
")",
"{",
"fs",
".",
"setxattr",
"(",
"path",
",",
"constants",
".",
"attributes",
".",
"pathsToSync",
",",
"pathsToSync",
",",
"callback",
")",
";",
"}"
] |
Set the pathsToSync metadata for a path
|
[
"Set",
"the",
"pathsToSync",
"metadata",
"for",
"a",
"path"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/lib/fs-utils.js#L159-L161
|
train
|
mozilla/makedrive
|
lib/fs-utils.js
|
getPathsToSync
|
function getPathsToSync(fs, path, callback) {
getAttr(fs, path, constants.attributes.pathsToSync, callback);
}
|
javascript
|
function getPathsToSync(fs, path, callback) {
getAttr(fs, path, constants.attributes.pathsToSync, callback);
}
|
[
"function",
"getPathsToSync",
"(",
"fs",
",",
"path",
",",
"callback",
")",
"{",
"getAttr",
"(",
"fs",
",",
"path",
",",
"constants",
".",
"attributes",
".",
"pathsToSync",
",",
"callback",
")",
";",
"}"
] |
Get the pathsToSync metadata for a path
|
[
"Get",
"the",
"pathsToSync",
"metadata",
"for",
"a",
"path"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/lib/fs-utils.js#L167-L169
|
train
|
mozilla/makedrive
|
server/lib/sync-lock.js
|
request
|
function request(client, path, callback) {
var key = SyncLock.generateKey(client.username);
var id = client.id;
// Try to set this key/value pair, but fail if the path for the key already exists.
redis.hsetnx(key, path, id, function(err, reply) {
if(err) {
log.error({err: err, client: client}, 'Error trying to set redis key with hsetnx');
return callback(err);
}
if(reply === 1) {
// Success, we have the lock (path for the key was set). Return a new SyncLock instance
var lock = new SyncLock(key, id, path);
log.debug({client: client, syncLock: lock}, 'Lock acquired.');
return callback(null, lock);
}
// Path for key was not set (held by another client). See if the lock owner would be
// willing to let us take it. We'll wait a bit for a reply, and if
// we don't get one, assume the client holding the lock, or its server,
// has crashed, and the lock is OK to take.
// Act if we don't hear back from the lock owner in a reasonable
// amount of time, and set the lock ourselves.
var waitTimer = setTimeout(function() {
redis.removeListener('lock-response', client._handleLockResponseFn);
client._handleLockResponseFn = null;
redis.hset(key, path, id, function(err) {
if(err) {
log.error({err: err, client: client}, 'Error setting redis lock key.');
return callback(err);
}
var lock = new SyncLock(key, id, path);
log.debug({client: client, syncLock: lock}, 'Lock request timeout, setting lock manually.');
callback(null, lock);
});
}, CLIENT_TIMEOUT_MS);
waitTimer.unref();
// Listen for a response from the client holding the lock
client._handleLockResponseFn = function(message) {
handleLockResponse(message, key, path, client, waitTimer, callback);
};
redis.on('lock-response', client._handleLockResponseFn);
// Ask the client holding the lock to give it to us
log.debug({client: client}, 'Requesting lock override for ' + path);
redis.publish(Constants.server.lockRequestChannel, JSON.stringify({key: key, id: id, path: path}));
});
}
|
javascript
|
function request(client, path, callback) {
var key = SyncLock.generateKey(client.username);
var id = client.id;
// Try to set this key/value pair, but fail if the path for the key already exists.
redis.hsetnx(key, path, id, function(err, reply) {
if(err) {
log.error({err: err, client: client}, 'Error trying to set redis key with hsetnx');
return callback(err);
}
if(reply === 1) {
// Success, we have the lock (path for the key was set). Return a new SyncLock instance
var lock = new SyncLock(key, id, path);
log.debug({client: client, syncLock: lock}, 'Lock acquired.');
return callback(null, lock);
}
// Path for key was not set (held by another client). See if the lock owner would be
// willing to let us take it. We'll wait a bit for a reply, and if
// we don't get one, assume the client holding the lock, or its server,
// has crashed, and the lock is OK to take.
// Act if we don't hear back from the lock owner in a reasonable
// amount of time, and set the lock ourselves.
var waitTimer = setTimeout(function() {
redis.removeListener('lock-response', client._handleLockResponseFn);
client._handleLockResponseFn = null;
redis.hset(key, path, id, function(err) {
if(err) {
log.error({err: err, client: client}, 'Error setting redis lock key.');
return callback(err);
}
var lock = new SyncLock(key, id, path);
log.debug({client: client, syncLock: lock}, 'Lock request timeout, setting lock manually.');
callback(null, lock);
});
}, CLIENT_TIMEOUT_MS);
waitTimer.unref();
// Listen for a response from the client holding the lock
client._handleLockResponseFn = function(message) {
handleLockResponse(message, key, path, client, waitTimer, callback);
};
redis.on('lock-response', client._handleLockResponseFn);
// Ask the client holding the lock to give it to us
log.debug({client: client}, 'Requesting lock override for ' + path);
redis.publish(Constants.server.lockRequestChannel, JSON.stringify({key: key, id: id, path: path}));
});
}
|
[
"function",
"request",
"(",
"client",
",",
"path",
",",
"callback",
")",
"{",
"var",
"key",
"=",
"SyncLock",
".",
"generateKey",
"(",
"client",
".",
"username",
")",
";",
"var",
"id",
"=",
"client",
".",
"id",
";",
"redis",
".",
"hsetnx",
"(",
"key",
",",
"path",
",",
"id",
",",
"function",
"(",
"err",
",",
"reply",
")",
"{",
"if",
"(",
"err",
")",
"{",
"log",
".",
"error",
"(",
"{",
"err",
":",
"err",
",",
"client",
":",
"client",
"}",
",",
"'Error trying to set redis key with hsetnx'",
")",
";",
"return",
"callback",
"(",
"err",
")",
";",
"}",
"if",
"(",
"reply",
"===",
"1",
")",
"{",
"var",
"lock",
"=",
"new",
"SyncLock",
"(",
"key",
",",
"id",
",",
"path",
")",
";",
"log",
".",
"debug",
"(",
"{",
"client",
":",
"client",
",",
"syncLock",
":",
"lock",
"}",
",",
"'Lock acquired.'",
")",
";",
"return",
"callback",
"(",
"null",
",",
"lock",
")",
";",
"}",
"var",
"waitTimer",
"=",
"setTimeout",
"(",
"function",
"(",
")",
"{",
"redis",
".",
"removeListener",
"(",
"'lock-response'",
",",
"client",
".",
"_handleLockResponseFn",
")",
";",
"client",
".",
"_handleLockResponseFn",
"=",
"null",
";",
"redis",
".",
"hset",
"(",
"key",
",",
"path",
",",
"id",
",",
"function",
"(",
"err",
")",
"{",
"if",
"(",
"err",
")",
"{",
"log",
".",
"error",
"(",
"{",
"err",
":",
"err",
",",
"client",
":",
"client",
"}",
",",
"'Error setting redis lock key.'",
")",
";",
"return",
"callback",
"(",
"err",
")",
";",
"}",
"var",
"lock",
"=",
"new",
"SyncLock",
"(",
"key",
",",
"id",
",",
"path",
")",
";",
"log",
".",
"debug",
"(",
"{",
"client",
":",
"client",
",",
"syncLock",
":",
"lock",
"}",
",",
"'Lock request timeout, setting lock manually.'",
")",
";",
"callback",
"(",
"null",
",",
"lock",
")",
";",
"}",
")",
";",
"}",
",",
"CLIENT_TIMEOUT_MS",
")",
";",
"waitTimer",
".",
"unref",
"(",
")",
";",
"client",
".",
"_handleLockResponseFn",
"=",
"function",
"(",
"message",
")",
"{",
"handleLockResponse",
"(",
"message",
",",
"key",
",",
"path",
",",
"client",
",",
"waitTimer",
",",
"callback",
")",
";",
"}",
";",
"redis",
".",
"on",
"(",
"'lock-response'",
",",
"client",
".",
"_handleLockResponseFn",
")",
";",
"log",
".",
"debug",
"(",
"{",
"client",
":",
"client",
"}",
",",
"'Requesting lock override for '",
"+",
"path",
")",
";",
"redis",
".",
"publish",
"(",
"Constants",
".",
"server",
".",
"lockRequestChannel",
",",
"JSON",
".",
"stringify",
"(",
"{",
"key",
":",
"key",
",",
"id",
":",
"id",
",",
"path",
":",
"path",
"}",
")",
")",
";",
"}",
")",
";",
"}"
] |
Request a lock for the current client.
|
[
"Request",
"a",
"lock",
"for",
"the",
"current",
"client",
"."
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/server/lib/sync-lock.js#L159-L211
|
train
|
mozilla/makedrive
|
server/lib/sync-lock.js
|
isUserLocked
|
function isUserLocked(username, path, callback) {
var key = SyncLock.generateKey(username);
redis.hget(key, path, function(err, value) {
if(err) {
log.error(err, 'Error getting redis lock key %s.', key);
return callback(err);
}
callback(null, !!value);
});
}
|
javascript
|
function isUserLocked(username, path, callback) {
var key = SyncLock.generateKey(username);
redis.hget(key, path, function(err, value) {
if(err) {
log.error(err, 'Error getting redis lock key %s.', key);
return callback(err);
}
callback(null, !!value);
});
}
|
[
"function",
"isUserLocked",
"(",
"username",
",",
"path",
",",
"callback",
")",
"{",
"var",
"key",
"=",
"SyncLock",
".",
"generateKey",
"(",
"username",
")",
";",
"redis",
".",
"hget",
"(",
"key",
",",
"path",
",",
"function",
"(",
"err",
",",
"value",
")",
"{",
"if",
"(",
"err",
")",
"{",
"log",
".",
"error",
"(",
"err",
",",
"'Error getting redis lock key %s.'",
",",
"key",
")",
";",
"return",
"callback",
"(",
"err",
")",
";",
"}",
"callback",
"(",
"null",
",",
"!",
"!",
"value",
")",
";",
"}",
")",
";",
"}"
] |
Check to see if a lock is held for the given username.
|
[
"Check",
"to",
"see",
"if",
"a",
"lock",
"is",
"held",
"for",
"the",
"given",
"username",
"."
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/server/lib/sync-lock.js#L216-L226
|
train
|
mozilla/makedrive
|
lib/rsync/rsync-utils.js
|
validateParams
|
function validateParams(fs, param2) {
var err;
if(!fs) {
err = new Errors.EINVAL('No filesystem provided');
} else if(!param2) {
err = new Errors.EINVAL('Second argument must be specified');
}
return err;
}
|
javascript
|
function validateParams(fs, param2) {
var err;
if(!fs) {
err = new Errors.EINVAL('No filesystem provided');
} else if(!param2) {
err = new Errors.EINVAL('Second argument must be specified');
}
return err;
}
|
[
"function",
"validateParams",
"(",
"fs",
",",
"param2",
")",
"{",
"var",
"err",
";",
"if",
"(",
"!",
"fs",
")",
"{",
"err",
"=",
"new",
"Errors",
".",
"EINVAL",
"(",
"'No filesystem provided'",
")",
";",
"}",
"else",
"if",
"(",
"!",
"param2",
")",
"{",
"err",
"=",
"new",
"Errors",
".",
"EINVAL",
"(",
"'Second argument must be specified'",
")",
";",
"}",
"return",
"err",
";",
"}"
] |
Validate the parameters sent to each rsync method
|
[
"Validate",
"the",
"parameters",
"sent",
"to",
"each",
"rsync",
"method"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/lib/rsync/rsync-utils.js#L62-L72
|
train
|
mozilla/makedrive
|
lib/rsync/rsync-utils.js
|
calcWeak32
|
function calcWeak32(data, prev, start, end) {
var a = 0;
var b = 0;
var M = 1 << 16;
var N = 65521;
if (!prev) {
var len = (start >= 0 && end >= 0) ? (end - start + 1) : data.length;
var datai;
for (var i = 0; i < len; i++) {
datai = data[i];
a += datai;
b += ((len - i) * datai);
}
a %= N;
b %= N;
} else {
var k = start;
var l = end - 1;
var prev_k = k - 1;
var prev_l = l - 1;
var prev_first = data[prev_k];
var curr_last = data[l];
a = (prev.a - prev_first + curr_last) % N;
b = (prev.b - (prev_l - prev_k + 1) * prev_first + a) % N;
}
return { a: a, b: b, sum: a + b * M };
}
|
javascript
|
function calcWeak32(data, prev, start, end) {
var a = 0;
var b = 0;
var M = 1 << 16;
var N = 65521;
if (!prev) {
var len = (start >= 0 && end >= 0) ? (end - start + 1) : data.length;
var datai;
for (var i = 0; i < len; i++) {
datai = data[i];
a += datai;
b += ((len - i) * datai);
}
a %= N;
b %= N;
} else {
var k = start;
var l = end - 1;
var prev_k = k - 1;
var prev_l = l - 1;
var prev_first = data[prev_k];
var curr_last = data[l];
a = (prev.a - prev_first + curr_last) % N;
b = (prev.b - (prev_l - prev_k + 1) * prev_first + a) % N;
}
return { a: a, b: b, sum: a + b * M };
}
|
[
"function",
"calcWeak32",
"(",
"data",
",",
"prev",
",",
"start",
",",
"end",
")",
"{",
"var",
"a",
"=",
"0",
";",
"var",
"b",
"=",
"0",
";",
"var",
"M",
"=",
"1",
"<<",
"16",
";",
"var",
"N",
"=",
"65521",
";",
"if",
"(",
"!",
"prev",
")",
"{",
"var",
"len",
"=",
"(",
"start",
">=",
"0",
"&&",
"end",
">=",
"0",
")",
"?",
"(",
"end",
"-",
"start",
"+",
"1",
")",
":",
"data",
".",
"length",
";",
"var",
"datai",
";",
"for",
"(",
"var",
"i",
"=",
"0",
";",
"i",
"<",
"len",
";",
"i",
"++",
")",
"{",
"datai",
"=",
"data",
"[",
"i",
"]",
";",
"a",
"+=",
"datai",
";",
"b",
"+=",
"(",
"(",
"len",
"-",
"i",
")",
"*",
"datai",
")",
";",
"}",
"a",
"%=",
"N",
";",
"b",
"%=",
"N",
";",
"}",
"else",
"{",
"var",
"k",
"=",
"start",
";",
"var",
"l",
"=",
"end",
"-",
"1",
";",
"var",
"prev_k",
"=",
"k",
"-",
"1",
";",
"var",
"prev_l",
"=",
"l",
"-",
"1",
";",
"var",
"prev_first",
"=",
"data",
"[",
"prev_k",
"]",
";",
"var",
"curr_last",
"=",
"data",
"[",
"l",
"]",
";",
"a",
"=",
"(",
"prev",
".",
"a",
"-",
"prev_first",
"+",
"curr_last",
")",
"%",
"N",
";",
"b",
"=",
"(",
"prev",
".",
"b",
"-",
"(",
"prev_l",
"-",
"prev_k",
"+",
"1",
")",
"*",
"prev_first",
"+",
"a",
")",
"%",
"N",
";",
"}",
"return",
"{",
"a",
":",
"a",
",",
"b",
":",
"b",
",",
"sum",
":",
"a",
"+",
"b",
"*",
"M",
"}",
";",
"}"
] |
Weak32 hashing for RSync based on Mark Adler's 32bit checksum algorithm
|
[
"Weak32",
"hashing",
"for",
"RSync",
"based",
"on",
"Mark",
"Adler",
"s",
"32bit",
"checksum",
"algorithm"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/lib/rsync/rsync-utils.js#L80-L109
|
train
|
mozilla/makedrive
|
lib/rsync/rsync-utils.js
|
createHashtable
|
function createHashtable(checksums) {
var hashtable = {};
var len = checksums.length;
var checksum;
var weak16;
for (var i = 0; i < len; i++) {
checksum = checksums[i];
weak16 = calcWeak16(checksum.weak);
if (hashtable[weak16]) {
hashtable[weak16].push(checksum);
} else {
hashtable[weak16] = [checksum];
}
}
return hashtable;
}
|
javascript
|
function createHashtable(checksums) {
var hashtable = {};
var len = checksums.length;
var checksum;
var weak16;
for (var i = 0; i < len; i++) {
checksum = checksums[i];
weak16 = calcWeak16(checksum.weak);
if (hashtable[weak16]) {
hashtable[weak16].push(checksum);
} else {
hashtable[weak16] = [checksum];
}
}
return hashtable;
}
|
[
"function",
"createHashtable",
"(",
"checksums",
")",
"{",
"var",
"hashtable",
"=",
"{",
"}",
";",
"var",
"len",
"=",
"checksums",
".",
"length",
";",
"var",
"checksum",
";",
"var",
"weak16",
";",
"for",
"(",
"var",
"i",
"=",
"0",
";",
"i",
"<",
"len",
";",
"i",
"++",
")",
"{",
"checksum",
"=",
"checksums",
"[",
"i",
"]",
";",
"weak16",
"=",
"calcWeak16",
"(",
"checksum",
".",
"weak",
")",
";",
"if",
"(",
"hashtable",
"[",
"weak16",
"]",
")",
"{",
"hashtable",
"[",
"weak16",
"]",
".",
"push",
"(",
"checksum",
")",
";",
"}",
"else",
"{",
"hashtable",
"[",
"weak16",
"]",
"=",
"[",
"checksum",
"]",
";",
"}",
"}",
"return",
"hashtable",
";",
"}"
] |
RSync algorithm to create a hashtable from checksums
|
[
"RSync",
"algorithm",
"to",
"create",
"a",
"hashtable",
"from",
"checksums"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/lib/rsync/rsync-utils.js#L117-L133
|
train
|
mozilla/makedrive
|
lib/rsync/rsync-utils.js
|
roll
|
function roll(data, checksums, blockSize) {
var results = [];
var hashtable = createHashtable(checksums);
var length = data.length;
var start = 0;
var end = blockSize > length ? length : blockSize;
// Updated when a block matches
var lastMatchedEnd = 0;
// This gets updated every iteration with the previous weak 32bit hash
var prevRollingWeak = null;
var weak;
var weak16;
var match;
var d;
var len;
var mightMatch;
var chunk;
var strong;
var hashtable_weak16;
var hashtable_weak16i;
for (; end <= length; start++, end++) {
weak = calcWeak32(data, prevRollingWeak, start, end);
weak16 = calcWeak16(weak.sum);
match = false;
d = null;
prevRollingWeak = weak;
hashtable_weak16 = hashtable[weak16];
if (hashtable_weak16) {
len = hashtable_weak16.length;
for (var i = 0; i < len; i++) {
hashtable_weak16i = hashtable_weak16[i];
if (hashtable_weak16i.weak === weak.sum) {
mightMatch = hashtable_weak16i;
chunk = data.slice(start, end);
strong = md5sum(chunk);
if (mightMatch.strong === strong) {
match = mightMatch;
break;
}
}
}
}
if (match) {
if(start < lastMatchedEnd) {
d = data.slice(lastMatchedEnd - 1, end);
results.push({
data: d,
index: match.index
});
} else if (start - lastMatchedEnd > 0) {
d = data.slice(lastMatchedEnd, start);
results.push({
data: d,
index: match.index
});
} else {
results.push({
index: match.index
});
}
lastMatchedEnd = end;
} else if (end === length) {
// No match and last block
d = data.slice(lastMatchedEnd);
results.push({
data: d
});
}
}
return results;
}
|
javascript
|
function roll(data, checksums, blockSize) {
var results = [];
var hashtable = createHashtable(checksums);
var length = data.length;
var start = 0;
var end = blockSize > length ? length : blockSize;
// Updated when a block matches
var lastMatchedEnd = 0;
// This gets updated every iteration with the previous weak 32bit hash
var prevRollingWeak = null;
var weak;
var weak16;
var match;
var d;
var len;
var mightMatch;
var chunk;
var strong;
var hashtable_weak16;
var hashtable_weak16i;
for (; end <= length; start++, end++) {
weak = calcWeak32(data, prevRollingWeak, start, end);
weak16 = calcWeak16(weak.sum);
match = false;
d = null;
prevRollingWeak = weak;
hashtable_weak16 = hashtable[weak16];
if (hashtable_weak16) {
len = hashtable_weak16.length;
for (var i = 0; i < len; i++) {
hashtable_weak16i = hashtable_weak16[i];
if (hashtable_weak16i.weak === weak.sum) {
mightMatch = hashtable_weak16i;
chunk = data.slice(start, end);
strong = md5sum(chunk);
if (mightMatch.strong === strong) {
match = mightMatch;
break;
}
}
}
}
if (match) {
if(start < lastMatchedEnd) {
d = data.slice(lastMatchedEnd - 1, end);
results.push({
data: d,
index: match.index
});
} else if (start - lastMatchedEnd > 0) {
d = data.slice(lastMatchedEnd, start);
results.push({
data: d,
index: match.index
});
} else {
results.push({
index: match.index
});
}
lastMatchedEnd = end;
} else if (end === length) {
// No match and last block
d = data.slice(lastMatchedEnd);
results.push({
data: d
});
}
}
return results;
}
|
[
"function",
"roll",
"(",
"data",
",",
"checksums",
",",
"blockSize",
")",
"{",
"var",
"results",
"=",
"[",
"]",
";",
"var",
"hashtable",
"=",
"createHashtable",
"(",
"checksums",
")",
";",
"var",
"length",
"=",
"data",
".",
"length",
";",
"var",
"start",
"=",
"0",
";",
"var",
"end",
"=",
"blockSize",
">",
"length",
"?",
"length",
":",
"blockSize",
";",
"var",
"lastMatchedEnd",
"=",
"0",
";",
"var",
"prevRollingWeak",
"=",
"null",
";",
"var",
"weak",
";",
"var",
"weak16",
";",
"var",
"match",
";",
"var",
"d",
";",
"var",
"len",
";",
"var",
"mightMatch",
";",
"var",
"chunk",
";",
"var",
"strong",
";",
"var",
"hashtable_weak16",
";",
"var",
"hashtable_weak16i",
";",
"for",
"(",
";",
"end",
"<=",
"length",
";",
"start",
"++",
",",
"end",
"++",
")",
"{",
"weak",
"=",
"calcWeak32",
"(",
"data",
",",
"prevRollingWeak",
",",
"start",
",",
"end",
")",
";",
"weak16",
"=",
"calcWeak16",
"(",
"weak",
".",
"sum",
")",
";",
"match",
"=",
"false",
";",
"d",
"=",
"null",
";",
"prevRollingWeak",
"=",
"weak",
";",
"hashtable_weak16",
"=",
"hashtable",
"[",
"weak16",
"]",
";",
"if",
"(",
"hashtable_weak16",
")",
"{",
"len",
"=",
"hashtable_weak16",
".",
"length",
";",
"for",
"(",
"var",
"i",
"=",
"0",
";",
"i",
"<",
"len",
";",
"i",
"++",
")",
"{",
"hashtable_weak16i",
"=",
"hashtable_weak16",
"[",
"i",
"]",
";",
"if",
"(",
"hashtable_weak16i",
".",
"weak",
"===",
"weak",
".",
"sum",
")",
"{",
"mightMatch",
"=",
"hashtable_weak16i",
";",
"chunk",
"=",
"data",
".",
"slice",
"(",
"start",
",",
"end",
")",
";",
"strong",
"=",
"md5sum",
"(",
"chunk",
")",
";",
"if",
"(",
"mightMatch",
".",
"strong",
"===",
"strong",
")",
"{",
"match",
"=",
"mightMatch",
";",
"break",
";",
"}",
"}",
"}",
"}",
"if",
"(",
"match",
")",
"{",
"if",
"(",
"start",
"<",
"lastMatchedEnd",
")",
"{",
"d",
"=",
"data",
".",
"slice",
"(",
"lastMatchedEnd",
"-",
"1",
",",
"end",
")",
";",
"results",
".",
"push",
"(",
"{",
"data",
":",
"d",
",",
"index",
":",
"match",
".",
"index",
"}",
")",
";",
"}",
"else",
"if",
"(",
"start",
"-",
"lastMatchedEnd",
">",
"0",
")",
"{",
"d",
"=",
"data",
".",
"slice",
"(",
"lastMatchedEnd",
",",
"start",
")",
";",
"results",
".",
"push",
"(",
"{",
"data",
":",
"d",
",",
"index",
":",
"match",
".",
"index",
"}",
")",
";",
"}",
"else",
"{",
"results",
".",
"push",
"(",
"{",
"index",
":",
"match",
".",
"index",
"}",
")",
";",
"}",
"lastMatchedEnd",
"=",
"end",
";",
"}",
"else",
"if",
"(",
"end",
"===",
"length",
")",
"{",
"d",
"=",
"data",
".",
"slice",
"(",
"lastMatchedEnd",
")",
";",
"results",
".",
"push",
"(",
"{",
"data",
":",
"d",
"}",
")",
";",
"}",
"}",
"return",
"results",
";",
"}"
] |
RSync algorithm to perform data rolling
|
[
"RSync",
"algorithm",
"to",
"perform",
"data",
"rolling"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/lib/rsync/rsync-utils.js#L136-L209
|
train
|
mozilla/makedrive
|
lib/rsync/rsync-utils.js
|
blockChecksums
|
function blockChecksums(fs, path, size, callback) {
var cache = {};
fs.readFile(path, function (err, data) {
if (!err) {
// cache file
cache[path] = data;
} else if (err && err.code === 'ENOENT') {
cache[path] = [];
} else {
return callback(err);
}
var length = cache[path].length;
var incr = size;
var start = 0;
var end = incr > length ? length : incr;
var blockIndex = 0;
var result = [];
var chunk;
var weak;
var strong;
while (start < length) {
chunk = cache[path].slice(start, end);
weak = calcWeak32(chunk).sum;
strong = md5sum(chunk);
result.push({
index: blockIndex,
weak: weak,
strong: strong
});
// update slice indices
start += incr;
end = (end + incr) > length ? length : end + incr;
// update block index
blockIndex++;
}
callback(null, result);
});
}
|
javascript
|
function blockChecksums(fs, path, size, callback) {
var cache = {};
fs.readFile(path, function (err, data) {
if (!err) {
// cache file
cache[path] = data;
} else if (err && err.code === 'ENOENT') {
cache[path] = [];
} else {
return callback(err);
}
var length = cache[path].length;
var incr = size;
var start = 0;
var end = incr > length ? length : incr;
var blockIndex = 0;
var result = [];
var chunk;
var weak;
var strong;
while (start < length) {
chunk = cache[path].slice(start, end);
weak = calcWeak32(chunk).sum;
strong = md5sum(chunk);
result.push({
index: blockIndex,
weak: weak,
strong: strong
});
// update slice indices
start += incr;
end = (end + incr) > length ? length : end + incr;
// update block index
blockIndex++;
}
callback(null, result);
});
}
|
[
"function",
"blockChecksums",
"(",
"fs",
",",
"path",
",",
"size",
",",
"callback",
")",
"{",
"var",
"cache",
"=",
"{",
"}",
";",
"fs",
".",
"readFile",
"(",
"path",
",",
"function",
"(",
"err",
",",
"data",
")",
"{",
"if",
"(",
"!",
"err",
")",
"{",
"cache",
"[",
"path",
"]",
"=",
"data",
";",
"}",
"else",
"if",
"(",
"err",
"&&",
"err",
".",
"code",
"===",
"'ENOENT'",
")",
"{",
"cache",
"[",
"path",
"]",
"=",
"[",
"]",
";",
"}",
"else",
"{",
"return",
"callback",
"(",
"err",
")",
";",
"}",
"var",
"length",
"=",
"cache",
"[",
"path",
"]",
".",
"length",
";",
"var",
"incr",
"=",
"size",
";",
"var",
"start",
"=",
"0",
";",
"var",
"end",
"=",
"incr",
">",
"length",
"?",
"length",
":",
"incr",
";",
"var",
"blockIndex",
"=",
"0",
";",
"var",
"result",
"=",
"[",
"]",
";",
"var",
"chunk",
";",
"var",
"weak",
";",
"var",
"strong",
";",
"while",
"(",
"start",
"<",
"length",
")",
"{",
"chunk",
"=",
"cache",
"[",
"path",
"]",
".",
"slice",
"(",
"start",
",",
"end",
")",
";",
"weak",
"=",
"calcWeak32",
"(",
"chunk",
")",
".",
"sum",
";",
"strong",
"=",
"md5sum",
"(",
"chunk",
")",
";",
"result",
".",
"push",
"(",
"{",
"index",
":",
"blockIndex",
",",
"weak",
":",
"weak",
",",
"strong",
":",
"strong",
"}",
")",
";",
"start",
"+=",
"incr",
";",
"end",
"=",
"(",
"end",
"+",
"incr",
")",
">",
"length",
"?",
"length",
":",
"end",
"+",
"incr",
";",
"blockIndex",
"++",
";",
"}",
"callback",
"(",
"null",
",",
"result",
")",
";",
"}",
")",
";",
"}"
] |
Rsync function to calculate checksums for a file by dividing it into blocks of data whose size is passed in and checksuming each block of data
|
[
"Rsync",
"function",
"to",
"calculate",
"checksums",
"for",
"a",
"file",
"by",
"dividing",
"it",
"into",
"blocks",
"of",
"data",
"whose",
"size",
"is",
"passed",
"in",
"and",
"checksuming",
"each",
"block",
"of",
"data"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/lib/rsync/rsync-utils.js#L215-L257
|
train
|
mozilla/makedrive
|
lib/rsync/rsync-utils.js
|
getChecksum
|
function getChecksum(fs, path, callback) {
fs.readFile(path, function(err, data) {
if(!err) {
callback(null, md5sum(data));
} else if(err.code === 'ENOENT') {
// File does not exist so the checksum is an empty string
callback(null, "");
} else {
callback(err);
}
});
}
|
javascript
|
function getChecksum(fs, path, callback) {
fs.readFile(path, function(err, data) {
if(!err) {
callback(null, md5sum(data));
} else if(err.code === 'ENOENT') {
// File does not exist so the checksum is an empty string
callback(null, "");
} else {
callback(err);
}
});
}
|
[
"function",
"getChecksum",
"(",
"fs",
",",
"path",
",",
"callback",
")",
"{",
"fs",
".",
"readFile",
"(",
"path",
",",
"function",
"(",
"err",
",",
"data",
")",
"{",
"if",
"(",
"!",
"err",
")",
"{",
"callback",
"(",
"null",
",",
"md5sum",
"(",
"data",
")",
")",
";",
"}",
"else",
"if",
"(",
"err",
".",
"code",
"===",
"'ENOENT'",
")",
"{",
"callback",
"(",
"null",
",",
"\"\"",
")",
";",
"}",
"else",
"{",
"callback",
"(",
"err",
")",
";",
"}",
"}",
")",
";",
"}"
] |
Generate the MD5 hash for the data of a file in its entirety
|
[
"Generate",
"the",
"MD5",
"hash",
"for",
"the",
"data",
"of",
"a",
"file",
"in",
"its",
"entirety"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/lib/rsync/rsync-utils.js#L261-L272
|
train
|
mozilla/makedrive
|
lib/rsync/rsync-utils.js
|
generateChecksums
|
function generateChecksums(fs, paths, stampNode, callback) {
// Maybe stampNode was not passed in
if(typeof callback !== 'function') {
callback = findCallback(callback, stampNode);
stampNode = false;
}
var paramError = validateParams(fs, paths);
if(paramError) {
return callback(paramError);
}
var checksumList = [];
function ChecksumNode(path, type, checksum) {
this.path = path;
this.type = type;
this.checksum = checksum;
}
function addChecksumNode(path, nodeType, checksum, callback) {
var checksumNode;
// If no checksum was passed in
if(typeof checksum === 'function') {
callback = checksum;
checksumNode = new ChecksumNode(path, nodeType);
} else {
checksumNode = new ChecksumNode(path, nodeType, checksum);
}
checksumList.push(checksumNode);
callback();
}
// Only calculate the checksums for synced paths
function maybeAddChecksumNode(path, nodeType, callback) {
fsUtils.isPathUnsynced(fs, path, function(err, unsynced) {
if(err) {
return callback(err);
}
if(unsynced) {
return callback();
}
getChecksum(fs, path, function(err, checksum) {
if(err) {
return callback(err);
}
// If we shouldn't add the checksum stamp or
// the node does not exist (cannot add a stamp)
// immediately add the checksum
if(!stampNode || checksum === "") {
return addChecksumNode(path, nodeType, checksum, callback);
}
// Stamp the node with the checksum
fsUtils.setChecksum(fs, path, checksum, function(err) {
if(err) {
return callback(err);
}
addChecksumNode(path, nodeType, checksum, callback);
});
});
});
}
function calcChecksum(path, callback) {
fs.lstat(path, function(err, stat) {
var nodeType = stat && stat.type;
if(err) {
if(err.code !== 'ENOENT') {
return callback(err);
}
// Checksums for non-existent files
maybeAddChecksumNode(path, nodeType, callback);
} else if(stat.isDirectory()) {
// Directory checksums are not calculated i.e. are undefined
addChecksumNode(path, nodeType, callback);
} else {
// Checksums for synced files/links
maybeAddChecksumNode(path, nodeType, callback);
}
});
}
async.eachSeries(paths, calcChecksum, function(err) {
if(err) {
return callback(err);
}
callback(null, checksumList);
});
}
|
javascript
|
function generateChecksums(fs, paths, stampNode, callback) {
// Maybe stampNode was not passed in
if(typeof callback !== 'function') {
callback = findCallback(callback, stampNode);
stampNode = false;
}
var paramError = validateParams(fs, paths);
if(paramError) {
return callback(paramError);
}
var checksumList = [];
function ChecksumNode(path, type, checksum) {
this.path = path;
this.type = type;
this.checksum = checksum;
}
function addChecksumNode(path, nodeType, checksum, callback) {
var checksumNode;
// If no checksum was passed in
if(typeof checksum === 'function') {
callback = checksum;
checksumNode = new ChecksumNode(path, nodeType);
} else {
checksumNode = new ChecksumNode(path, nodeType, checksum);
}
checksumList.push(checksumNode);
callback();
}
// Only calculate the checksums for synced paths
function maybeAddChecksumNode(path, nodeType, callback) {
fsUtils.isPathUnsynced(fs, path, function(err, unsynced) {
if(err) {
return callback(err);
}
if(unsynced) {
return callback();
}
getChecksum(fs, path, function(err, checksum) {
if(err) {
return callback(err);
}
// If we shouldn't add the checksum stamp or
// the node does not exist (cannot add a stamp)
// immediately add the checksum
if(!stampNode || checksum === "") {
return addChecksumNode(path, nodeType, checksum, callback);
}
// Stamp the node with the checksum
fsUtils.setChecksum(fs, path, checksum, function(err) {
if(err) {
return callback(err);
}
addChecksumNode(path, nodeType, checksum, callback);
});
});
});
}
function calcChecksum(path, callback) {
fs.lstat(path, function(err, stat) {
var nodeType = stat && stat.type;
if(err) {
if(err.code !== 'ENOENT') {
return callback(err);
}
// Checksums for non-existent files
maybeAddChecksumNode(path, nodeType, callback);
} else if(stat.isDirectory()) {
// Directory checksums are not calculated i.e. are undefined
addChecksumNode(path, nodeType, callback);
} else {
// Checksums for synced files/links
maybeAddChecksumNode(path, nodeType, callback);
}
});
}
async.eachSeries(paths, calcChecksum, function(err) {
if(err) {
return callback(err);
}
callback(null, checksumList);
});
}
|
[
"function",
"generateChecksums",
"(",
"fs",
",",
"paths",
",",
"stampNode",
",",
"callback",
")",
"{",
"if",
"(",
"typeof",
"callback",
"!==",
"'function'",
")",
"{",
"callback",
"=",
"findCallback",
"(",
"callback",
",",
"stampNode",
")",
";",
"stampNode",
"=",
"false",
";",
"}",
"var",
"paramError",
"=",
"validateParams",
"(",
"fs",
",",
"paths",
")",
";",
"if",
"(",
"paramError",
")",
"{",
"return",
"callback",
"(",
"paramError",
")",
";",
"}",
"var",
"checksumList",
"=",
"[",
"]",
";",
"function",
"ChecksumNode",
"(",
"path",
",",
"type",
",",
"checksum",
")",
"{",
"this",
".",
"path",
"=",
"path",
";",
"this",
".",
"type",
"=",
"type",
";",
"this",
".",
"checksum",
"=",
"checksum",
";",
"}",
"function",
"addChecksumNode",
"(",
"path",
",",
"nodeType",
",",
"checksum",
",",
"callback",
")",
"{",
"var",
"checksumNode",
";",
"if",
"(",
"typeof",
"checksum",
"===",
"'function'",
")",
"{",
"callback",
"=",
"checksum",
";",
"checksumNode",
"=",
"new",
"ChecksumNode",
"(",
"path",
",",
"nodeType",
")",
";",
"}",
"else",
"{",
"checksumNode",
"=",
"new",
"ChecksumNode",
"(",
"path",
",",
"nodeType",
",",
"checksum",
")",
";",
"}",
"checksumList",
".",
"push",
"(",
"checksumNode",
")",
";",
"callback",
"(",
")",
";",
"}",
"function",
"maybeAddChecksumNode",
"(",
"path",
",",
"nodeType",
",",
"callback",
")",
"{",
"fsUtils",
".",
"isPathUnsynced",
"(",
"fs",
",",
"path",
",",
"function",
"(",
"err",
",",
"unsynced",
")",
"{",
"if",
"(",
"err",
")",
"{",
"return",
"callback",
"(",
"err",
")",
";",
"}",
"if",
"(",
"unsynced",
")",
"{",
"return",
"callback",
"(",
")",
";",
"}",
"getChecksum",
"(",
"fs",
",",
"path",
",",
"function",
"(",
"err",
",",
"checksum",
")",
"{",
"if",
"(",
"err",
")",
"{",
"return",
"callback",
"(",
"err",
")",
";",
"}",
"if",
"(",
"!",
"stampNode",
"||",
"checksum",
"===",
"\"\"",
")",
"{",
"return",
"addChecksumNode",
"(",
"path",
",",
"nodeType",
",",
"checksum",
",",
"callback",
")",
";",
"}",
"fsUtils",
".",
"setChecksum",
"(",
"fs",
",",
"path",
",",
"checksum",
",",
"function",
"(",
"err",
")",
"{",
"if",
"(",
"err",
")",
"{",
"return",
"callback",
"(",
"err",
")",
";",
"}",
"addChecksumNode",
"(",
"path",
",",
"nodeType",
",",
"checksum",
",",
"callback",
")",
";",
"}",
")",
";",
"}",
")",
";",
"}",
")",
";",
"}",
"function",
"calcChecksum",
"(",
"path",
",",
"callback",
")",
"{",
"fs",
".",
"lstat",
"(",
"path",
",",
"function",
"(",
"err",
",",
"stat",
")",
"{",
"var",
"nodeType",
"=",
"stat",
"&&",
"stat",
".",
"type",
";",
"if",
"(",
"err",
")",
"{",
"if",
"(",
"err",
".",
"code",
"!==",
"'ENOENT'",
")",
"{",
"return",
"callback",
"(",
"err",
")",
";",
"}",
"maybeAddChecksumNode",
"(",
"path",
",",
"nodeType",
",",
"callback",
")",
";",
"}",
"else",
"if",
"(",
"stat",
".",
"isDirectory",
"(",
")",
")",
"{",
"addChecksumNode",
"(",
"path",
",",
"nodeType",
",",
"callback",
")",
";",
"}",
"else",
"{",
"maybeAddChecksumNode",
"(",
"path",
",",
"nodeType",
",",
"callback",
")",
";",
"}",
"}",
")",
";",
"}",
"async",
".",
"eachSeries",
"(",
"paths",
",",
"calcChecksum",
",",
"function",
"(",
"err",
")",
"{",
"if",
"(",
"err",
")",
"{",
"return",
"callback",
"(",
"err",
")",
";",
"}",
"callback",
"(",
"null",
",",
"checksumList",
")",
";",
"}",
")",
";",
"}"
] |
Generate checksums for an array of paths to be used for comparison It also takes an optional parameter called stampNode, a boolean which indicates whether the checksum should be stamped as an xattr on the node.
|
[
"Generate",
"checksums",
"for",
"an",
"array",
"of",
"paths",
"to",
"be",
"used",
"for",
"comparison",
"It",
"also",
"takes",
"an",
"optional",
"parameter",
"called",
"stampNode",
"a",
"boolean",
"which",
"indicates",
"whether",
"the",
"checksum",
"should",
"be",
"stamped",
"as",
"an",
"xattr",
"on",
"the",
"node",
"."
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/lib/rsync/rsync-utils.js#L277-L374
|
train
|
mozilla/makedrive
|
lib/rsync/rsync-utils.js
|
maybeAddChecksumNode
|
function maybeAddChecksumNode(path, nodeType, callback) {
fsUtils.isPathUnsynced(fs, path, function(err, unsynced) {
if(err) {
return callback(err);
}
if(unsynced) {
return callback();
}
getChecksum(fs, path, function(err, checksum) {
if(err) {
return callback(err);
}
// If we shouldn't add the checksum stamp or
// the node does not exist (cannot add a stamp)
// immediately add the checksum
if(!stampNode || checksum === "") {
return addChecksumNode(path, nodeType, checksum, callback);
}
// Stamp the node with the checksum
fsUtils.setChecksum(fs, path, checksum, function(err) {
if(err) {
return callback(err);
}
addChecksumNode(path, nodeType, checksum, callback);
});
});
});
}
|
javascript
|
function maybeAddChecksumNode(path, nodeType, callback) {
fsUtils.isPathUnsynced(fs, path, function(err, unsynced) {
if(err) {
return callback(err);
}
if(unsynced) {
return callback();
}
getChecksum(fs, path, function(err, checksum) {
if(err) {
return callback(err);
}
// If we shouldn't add the checksum stamp or
// the node does not exist (cannot add a stamp)
// immediately add the checksum
if(!stampNode || checksum === "") {
return addChecksumNode(path, nodeType, checksum, callback);
}
// Stamp the node with the checksum
fsUtils.setChecksum(fs, path, checksum, function(err) {
if(err) {
return callback(err);
}
addChecksumNode(path, nodeType, checksum, callback);
});
});
});
}
|
[
"function",
"maybeAddChecksumNode",
"(",
"path",
",",
"nodeType",
",",
"callback",
")",
"{",
"fsUtils",
".",
"isPathUnsynced",
"(",
"fs",
",",
"path",
",",
"function",
"(",
"err",
",",
"unsynced",
")",
"{",
"if",
"(",
"err",
")",
"{",
"return",
"callback",
"(",
"err",
")",
";",
"}",
"if",
"(",
"unsynced",
")",
"{",
"return",
"callback",
"(",
")",
";",
"}",
"getChecksum",
"(",
"fs",
",",
"path",
",",
"function",
"(",
"err",
",",
"checksum",
")",
"{",
"if",
"(",
"err",
")",
"{",
"return",
"callback",
"(",
"err",
")",
";",
"}",
"if",
"(",
"!",
"stampNode",
"||",
"checksum",
"===",
"\"\"",
")",
"{",
"return",
"addChecksumNode",
"(",
"path",
",",
"nodeType",
",",
"checksum",
",",
"callback",
")",
";",
"}",
"fsUtils",
".",
"setChecksum",
"(",
"fs",
",",
"path",
",",
"checksum",
",",
"function",
"(",
"err",
")",
"{",
"if",
"(",
"err",
")",
"{",
"return",
"callback",
"(",
"err",
")",
";",
"}",
"addChecksumNode",
"(",
"path",
",",
"nodeType",
",",
"checksum",
",",
"callback",
")",
";",
"}",
")",
";",
"}",
")",
";",
"}",
")",
";",
"}"
] |
Only calculate the checksums for synced paths
|
[
"Only",
"calculate",
"the",
"checksums",
"for",
"synced",
"paths"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/lib/rsync/rsync-utils.js#L313-L344
|
train
|
mozilla/makedrive
|
lib/rsync/rsync-utils.js
|
compareContents
|
function compareContents(fs, checksumList, callback) {
var ECHKSUM = "Checksums do not match";
var paramError = validateParams(fs, checksumList);
if(paramError) {
return callback(paramError);
}
function compare(checksumNode, callback) {
var path = checksumNode.path;
fs.lstat(path, function(err, stat) {
if(err && err.code !== 'ENOENT') {
return callback(err);
}
// If the types of the nodes on each fs do not match
// i.e. /a is a file on fs1 and /a is a directory on fs2
if(!err && checksumNode.type !== stat.type) {
return callback(ECHKSUM);
}
// If the node type is a directory, checksum should not exist
if(!err && stat.isDirectory()) {
if(!checksumNode.checksum) {
return callback();
}
callback(ECHKSUM);
}
// Checksum comparison for a non-existent path or file/link
getChecksum(fs, path, function(err, checksum) {
if(err) {
return callback(err);
}
if(checksum !== checksumNode.checksum) {
return callback(ECHKSUM);
}
callback();
});
});
}
async.eachSeries(checksumList, compare, function(err) {
if(err && err !== ECHKSUM) {
return callback(err);
}
callback(null, err !== ECHKSUM);
});
}
|
javascript
|
function compareContents(fs, checksumList, callback) {
var ECHKSUM = "Checksums do not match";
var paramError = validateParams(fs, checksumList);
if(paramError) {
return callback(paramError);
}
function compare(checksumNode, callback) {
var path = checksumNode.path;
fs.lstat(path, function(err, stat) {
if(err && err.code !== 'ENOENT') {
return callback(err);
}
// If the types of the nodes on each fs do not match
// i.e. /a is a file on fs1 and /a is a directory on fs2
if(!err && checksumNode.type !== stat.type) {
return callback(ECHKSUM);
}
// If the node type is a directory, checksum should not exist
if(!err && stat.isDirectory()) {
if(!checksumNode.checksum) {
return callback();
}
callback(ECHKSUM);
}
// Checksum comparison for a non-existent path or file/link
getChecksum(fs, path, function(err, checksum) {
if(err) {
return callback(err);
}
if(checksum !== checksumNode.checksum) {
return callback(ECHKSUM);
}
callback();
});
});
}
async.eachSeries(checksumList, compare, function(err) {
if(err && err !== ECHKSUM) {
return callback(err);
}
callback(null, err !== ECHKSUM);
});
}
|
[
"function",
"compareContents",
"(",
"fs",
",",
"checksumList",
",",
"callback",
")",
"{",
"var",
"ECHKSUM",
"=",
"\"Checksums do not match\"",
";",
"var",
"paramError",
"=",
"validateParams",
"(",
"fs",
",",
"checksumList",
")",
";",
"if",
"(",
"paramError",
")",
"{",
"return",
"callback",
"(",
"paramError",
")",
";",
"}",
"function",
"compare",
"(",
"checksumNode",
",",
"callback",
")",
"{",
"var",
"path",
"=",
"checksumNode",
".",
"path",
";",
"fs",
".",
"lstat",
"(",
"path",
",",
"function",
"(",
"err",
",",
"stat",
")",
"{",
"if",
"(",
"err",
"&&",
"err",
".",
"code",
"!==",
"'ENOENT'",
")",
"{",
"return",
"callback",
"(",
"err",
")",
";",
"}",
"if",
"(",
"!",
"err",
"&&",
"checksumNode",
".",
"type",
"!==",
"stat",
".",
"type",
")",
"{",
"return",
"callback",
"(",
"ECHKSUM",
")",
";",
"}",
"if",
"(",
"!",
"err",
"&&",
"stat",
".",
"isDirectory",
"(",
")",
")",
"{",
"if",
"(",
"!",
"checksumNode",
".",
"checksum",
")",
"{",
"return",
"callback",
"(",
")",
";",
"}",
"callback",
"(",
"ECHKSUM",
")",
";",
"}",
"getChecksum",
"(",
"fs",
",",
"path",
",",
"function",
"(",
"err",
",",
"checksum",
")",
"{",
"if",
"(",
"err",
")",
"{",
"return",
"callback",
"(",
"err",
")",
";",
"}",
"if",
"(",
"checksum",
"!==",
"checksumNode",
".",
"checksum",
")",
"{",
"return",
"callback",
"(",
"ECHKSUM",
")",
";",
"}",
"callback",
"(",
")",
";",
"}",
")",
";",
"}",
")",
";",
"}",
"async",
".",
"eachSeries",
"(",
"checksumList",
",",
"compare",
",",
"function",
"(",
"err",
")",
"{",
"if",
"(",
"err",
"&&",
"err",
"!==",
"ECHKSUM",
")",
"{",
"return",
"callback",
"(",
"err",
")",
";",
"}",
"callback",
"(",
"null",
",",
"err",
"!==",
"ECHKSUM",
")",
";",
"}",
")",
";",
"}"
] |
Compare two file systems. This is done by comparing the checksums for a collection of paths in one file system against the checksums for the same those paths in another file system
|
[
"Compare",
"two",
"file",
"systems",
".",
"This",
"is",
"done",
"by",
"comparing",
"the",
"checksums",
"for",
"a",
"collection",
"of",
"paths",
"in",
"one",
"file",
"system",
"against",
"the",
"checksums",
"for",
"the",
"same",
"those",
"paths",
"in",
"another",
"file",
"system"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/lib/rsync/rsync-utils.js#L380-L433
|
train
|
mozilla/makedrive
|
lib/rsync/patch.js
|
createParentDirectories
|
function createParentDirectories(path, callback) {
(new fs.Shell()).mkdirp(Path.dirname(path), function(err) {
if(err && err.code !== 'EEXIST') {
return callback(err);
}
callback();
});
}
|
javascript
|
function createParentDirectories(path, callback) {
(new fs.Shell()).mkdirp(Path.dirname(path), function(err) {
if(err && err.code !== 'EEXIST') {
return callback(err);
}
callback();
});
}
|
[
"function",
"createParentDirectories",
"(",
"path",
",",
"callback",
")",
"{",
"(",
"new",
"fs",
".",
"Shell",
"(",
")",
")",
".",
"mkdirp",
"(",
"Path",
".",
"dirname",
"(",
"path",
")",
",",
"function",
"(",
"err",
")",
"{",
"if",
"(",
"err",
"&&",
"err",
".",
"code",
"!==",
"'EEXIST'",
")",
"{",
"return",
"callback",
"(",
"err",
")",
";",
"}",
"callback",
"(",
")",
";",
"}",
")",
";",
"}"
] |
Create any parent directories that do not exist
|
[
"Create",
"any",
"parent",
"directories",
"that",
"do",
"not",
"exist"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/lib/rsync/patch.js#L482-L490
|
train
|
mozilla/makedrive
|
lib/util.js
|
findPathIndexInArray
|
function findPathIndexInArray(array, path) {
for(var i = 0; i < array.length; i++) {
if(array[i].path === path) {
return i;
}
}
return -1;
}
|
javascript
|
function findPathIndexInArray(array, path) {
for(var i = 0; i < array.length; i++) {
if(array[i].path === path) {
return i;
}
}
return -1;
}
|
[
"function",
"findPathIndexInArray",
"(",
"array",
",",
"path",
")",
"{",
"for",
"(",
"var",
"i",
"=",
"0",
";",
"i",
"<",
"array",
".",
"length",
";",
"i",
"++",
")",
"{",
"if",
"(",
"array",
"[",
"i",
"]",
".",
"path",
"===",
"path",
")",
"{",
"return",
"i",
";",
"}",
"}",
"return",
"-",
"1",
";",
"}"
] |
General utility methods
|
[
"General",
"utility",
"methods"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/lib/util.js#L3-L11
|
train
|
mozilla/makedrive
|
server/lib/filer-www/json-handler.js
|
handle404
|
function handle404(url, res) {
var json = {
error: {
code: 404,
message: 'The requested URL ' + url + ' was not found on this server.'
}
};
write(json, res, 404);
}
|
javascript
|
function handle404(url, res) {
var json = {
error: {
code: 404,
message: 'The requested URL ' + url + ' was not found on this server.'
}
};
write(json, res, 404);
}
|
[
"function",
"handle404",
"(",
"url",
",",
"res",
")",
"{",
"var",
"json",
"=",
"{",
"error",
":",
"{",
"code",
":",
"404",
",",
"message",
":",
"'The requested URL '",
"+",
"url",
"+",
"' was not found on this server.'",
"}",
"}",
";",
"write",
"(",
"json",
",",
"res",
",",
"404",
")",
";",
"}"
] |
Send an Apache-style 404
|
[
"Send",
"an",
"Apache",
"-",
"style",
"404"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/server/lib/filer-www/json-handler.js#L15-L23
|
train
|
mozilla/makedrive
|
server/lib/filer-www/json-handler.js
|
handleDir
|
function handleDir(fs, path, res) {
var sh = new fs.Shell();
sh.ls(path, {recursive: true}, function(err, listing) {
if(err) {
log.error(err, 'Unable to get listing for path `%s`', path);
handle404(path, res);
return;
}
write(listing, res);
});
}
|
javascript
|
function handleDir(fs, path, res) {
var sh = new fs.Shell();
sh.ls(path, {recursive: true}, function(err, listing) {
if(err) {
log.error(err, 'Unable to get listing for path `%s`', path);
handle404(path, res);
return;
}
write(listing, res);
});
}
|
[
"function",
"handleDir",
"(",
"fs",
",",
"path",
",",
"res",
")",
"{",
"var",
"sh",
"=",
"new",
"fs",
".",
"Shell",
"(",
")",
";",
"sh",
".",
"ls",
"(",
"path",
",",
"{",
"recursive",
":",
"true",
"}",
",",
"function",
"(",
"err",
",",
"listing",
")",
"{",
"if",
"(",
"err",
")",
"{",
"log",
".",
"error",
"(",
"err",
",",
"'Unable to get listing for path `%s`'",
",",
"path",
")",
";",
"handle404",
"(",
"path",
",",
"res",
")",
";",
"return",
";",
"}",
"write",
"(",
"listing",
",",
"res",
")",
";",
"}",
")",
";",
"}"
] |
Send recursive dir listing
|
[
"Send",
"recursive",
"dir",
"listing"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/server/lib/filer-www/json-handler.js#L51-L62
|
train
|
mozilla/makedrive
|
server/lib/sync-protocol-handler.js
|
ensureLock
|
function ensureLock(client, path) {
var lock = client.lock;
if(!(lock && !('unlocked' in lock))) {
// Create an error so we get a stack, too.
var err = new Error('Attempted sync step without lock.');
log.error({client: client, err: err}, 'Client should own lock but does not for ' + path);
return false;
}
return true;
}
|
javascript
|
function ensureLock(client, path) {
var lock = client.lock;
if(!(lock && !('unlocked' in lock))) {
// Create an error so we get a stack, too.
var err = new Error('Attempted sync step without lock.');
log.error({client: client, err: err}, 'Client should own lock but does not for ' + path);
return false;
}
return true;
}
|
[
"function",
"ensureLock",
"(",
"client",
",",
"path",
")",
"{",
"var",
"lock",
"=",
"client",
".",
"lock",
";",
"if",
"(",
"!",
"(",
"lock",
"&&",
"!",
"(",
"'unlocked'",
"in",
"lock",
")",
")",
")",
"{",
"var",
"err",
"=",
"new",
"Error",
"(",
"'Attempted sync step without lock.'",
")",
";",
"log",
".",
"error",
"(",
"{",
"client",
":",
"client",
",",
"err",
":",
"err",
"}",
",",
"'Client should own lock but does not for '",
"+",
"path",
")",
";",
"return",
"false",
";",
"}",
"return",
"true",
";",
"}"
] |
Most upstream sync steps require a lock to be held. It's a bug if we get into one of these steps without the lock.
|
[
"Most",
"upstream",
"sync",
"steps",
"require",
"a",
"lock",
"to",
"be",
"held",
".",
"It",
"s",
"a",
"bug",
"if",
"we",
"get",
"into",
"one",
"of",
"these",
"steps",
"without",
"the",
"lock",
"."
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/server/lib/sync-protocol-handler.js#L90-L99
|
train
|
mozilla/makedrive
|
server/lib/sync-protocol-handler.js
|
checkFileSizeLimit
|
function checkFileSizeLimit(client, srcList) {
function maxSizeExceeded(obj) {
var errorMsg;
client.lock.release(function(err) {
if(err) {
log.error({err: err, client: client}, 'Error releasing sync lock');
}
releaseLock(client);
errorMsg = SyncMessage.error.maxsizeExceeded;
errorMsg.content = {path: obj.path};
client.sendMessage(errorMsg);
});
}
for (var key in srcList) {
if(srcList.hasOwnProperty(key)) {
var obj = srcList[key];
for (var prop in obj) {
if(obj.hasOwnProperty(prop) && prop === 'size') {
if(obj.size > MAX_SYNC_SIZE_BYTES) {
// Fail this sync, contains a file that is too large.
log.warn({client: client},
'Client tried to exceed file sync size limit: file was %s bytes, limit is %s',
obj.size, MAX_SYNC_SIZE_BYTES);
maxSizeExceeded(obj);
return false;
}
}
}
}
}
return true;
}
|
javascript
|
function checkFileSizeLimit(client, srcList) {
function maxSizeExceeded(obj) {
var errorMsg;
client.lock.release(function(err) {
if(err) {
log.error({err: err, client: client}, 'Error releasing sync lock');
}
releaseLock(client);
errorMsg = SyncMessage.error.maxsizeExceeded;
errorMsg.content = {path: obj.path};
client.sendMessage(errorMsg);
});
}
for (var key in srcList) {
if(srcList.hasOwnProperty(key)) {
var obj = srcList[key];
for (var prop in obj) {
if(obj.hasOwnProperty(prop) && prop === 'size') {
if(obj.size > MAX_SYNC_SIZE_BYTES) {
// Fail this sync, contains a file that is too large.
log.warn({client: client},
'Client tried to exceed file sync size limit: file was %s bytes, limit is %s',
obj.size, MAX_SYNC_SIZE_BYTES);
maxSizeExceeded(obj);
return false;
}
}
}
}
}
return true;
}
|
[
"function",
"checkFileSizeLimit",
"(",
"client",
",",
"srcList",
")",
"{",
"function",
"maxSizeExceeded",
"(",
"obj",
")",
"{",
"var",
"errorMsg",
";",
"client",
".",
"lock",
".",
"release",
"(",
"function",
"(",
"err",
")",
"{",
"if",
"(",
"err",
")",
"{",
"log",
".",
"error",
"(",
"{",
"err",
":",
"err",
",",
"client",
":",
"client",
"}",
",",
"'Error releasing sync lock'",
")",
";",
"}",
"releaseLock",
"(",
"client",
")",
";",
"errorMsg",
"=",
"SyncMessage",
".",
"error",
".",
"maxsizeExceeded",
";",
"errorMsg",
".",
"content",
"=",
"{",
"path",
":",
"obj",
".",
"path",
"}",
";",
"client",
".",
"sendMessage",
"(",
"errorMsg",
")",
";",
"}",
")",
";",
"}",
"for",
"(",
"var",
"key",
"in",
"srcList",
")",
"{",
"if",
"(",
"srcList",
".",
"hasOwnProperty",
"(",
"key",
")",
")",
"{",
"var",
"obj",
"=",
"srcList",
"[",
"key",
"]",
";",
"for",
"(",
"var",
"prop",
"in",
"obj",
")",
"{",
"if",
"(",
"obj",
".",
"hasOwnProperty",
"(",
"prop",
")",
"&&",
"prop",
"===",
"'size'",
")",
"{",
"if",
"(",
"obj",
".",
"size",
">",
"MAX_SYNC_SIZE_BYTES",
")",
"{",
"log",
".",
"warn",
"(",
"{",
"client",
":",
"client",
"}",
",",
"'Client tried to exceed file sync size limit: file was %s bytes, limit is %s'",
",",
"obj",
".",
"size",
",",
"MAX_SYNC_SIZE_BYTES",
")",
";",
"maxSizeExceeded",
"(",
"obj",
")",
";",
"return",
"false",
";",
"}",
"}",
"}",
"}",
"}",
"return",
"true",
";",
"}"
] |
Returns true if file sizes are all within limit, false if not. The client's lock is released, and an error sent to client in the false case.
|
[
"Returns",
"true",
"if",
"file",
"sizes",
"are",
"all",
"within",
"limit",
"false",
"if",
"not",
".",
"The",
"client",
"s",
"lock",
"is",
"released",
"and",
"an",
"error",
"sent",
"to",
"client",
"in",
"the",
"false",
"case",
"."
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/server/lib/sync-protocol-handler.js#L113-L149
|
train
|
mozilla/makedrive
|
server/lib/sync-protocol-handler.js
|
maybeReleaseLock
|
function maybeReleaseLock() {
var lock = self.client.lock;
function done(err) {
log.debug({client: self.client}, 'Closed client sync handler');
self.client.lock = null;
self.client = null;
callback(err);
}
// No lock
if(!lock) {
return done();
}
// Lock reference, but it's already unlocked
if(lock.unlocked) {
return done();
}
// Holding lock, release it
lock.release(function(err) {
if(err) {
log.error({err: err, client: self.client}, 'Error releasing sync lock');
return done(err);
}
done();
});
}
|
javascript
|
function maybeReleaseLock() {
var lock = self.client.lock;
function done(err) {
log.debug({client: self.client}, 'Closed client sync handler');
self.client.lock = null;
self.client = null;
callback(err);
}
// No lock
if(!lock) {
return done();
}
// Lock reference, but it's already unlocked
if(lock.unlocked) {
return done();
}
// Holding lock, release it
lock.release(function(err) {
if(err) {
log.error({err: err, client: self.client}, 'Error releasing sync lock');
return done(err);
}
done();
});
}
|
[
"function",
"maybeReleaseLock",
"(",
")",
"{",
"var",
"lock",
"=",
"self",
".",
"client",
".",
"lock",
";",
"function",
"done",
"(",
"err",
")",
"{",
"log",
".",
"debug",
"(",
"{",
"client",
":",
"self",
".",
"client",
"}",
",",
"'Closed client sync handler'",
")",
";",
"self",
".",
"client",
".",
"lock",
"=",
"null",
";",
"self",
".",
"client",
"=",
"null",
";",
"callback",
"(",
"err",
")",
";",
"}",
"if",
"(",
"!",
"lock",
")",
"{",
"return",
"done",
"(",
")",
";",
"}",
"if",
"(",
"lock",
".",
"unlocked",
")",
"{",
"return",
"done",
"(",
")",
";",
"}",
"lock",
".",
"release",
"(",
"function",
"(",
"err",
")",
"{",
"if",
"(",
"err",
")",
"{",
"log",
".",
"error",
"(",
"{",
"err",
":",
"err",
",",
"client",
":",
"self",
".",
"client",
"}",
",",
"'Error releasing sync lock'",
")",
";",
"return",
"done",
"(",
"err",
")",
";",
"}",
"done",
"(",
")",
";",
"}",
")",
";",
"}"
] |
If we're still holding a valid lock, release it first.
|
[
"If",
"we",
"re",
"still",
"holding",
"a",
"valid",
"lock",
"release",
"it",
"first",
"."
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/server/lib/sync-protocol-handler.js#L225-L251
|
train
|
mozilla/makedrive
|
server/lib/websocket-auth.js
|
getUsernameByToken
|
function getUsernameByToken(token) {
for(var username in authTable) {
if(authTable[username].indexOf(token) > -1) {
return username;
}
}
return null;
}
|
javascript
|
function getUsernameByToken(token) {
for(var username in authTable) {
if(authTable[username].indexOf(token) > -1) {
return username;
}
}
return null;
}
|
[
"function",
"getUsernameByToken",
"(",
"token",
")",
"{",
"for",
"(",
"var",
"username",
"in",
"authTable",
")",
"{",
"if",
"(",
"authTable",
"[",
"username",
"]",
".",
"indexOf",
"(",
"token",
")",
">",
"-",
"1",
")",
"{",
"return",
"username",
";",
"}",
"}",
"return",
"null",
";",
"}"
] |
Default to 60 sec
|
[
"Default",
"to",
"60",
"sec"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/server/lib/websocket-auth.js#L23-L30
|
train
|
mozilla/makedrive
|
client/src/message-handler.js
|
handlePatchAckResponse
|
function handlePatchAckResponse() {
var syncedPath = data.content.path;
function complete() {
fsUtils.removeUnsynced(fs, syncedPath, function(err) {
if(err && err.code !== 'ENOENT') {
log.error('Failed to remove unsynced attribute for ' + syncedPath + ' in handlePatchAckResponse, complete()');
}
syncManager.syncNext(syncedPath);
});
}
fs.lstat(syncedPath, function(err, stats) {
if(err) {
if(err.code !== 'ENOENT') {
log.error('Failed to access ' + syncedPath + ' in handlePatchAckResponse');
return fs.delaySync(function(delayErr, delayedPath) {
if(delayErr) {
log.error('Failed to delay upstream sync for ' + delayedPath + ' in handlePatchAckResponse');
}
onError(syncManager, err);
});
}
// Non-existent paths usually due to renames or
// deletes cannot be stamped with a checksum
return complete();
}
if(!stats.isFile()) {
return complete();
}
rsyncUtils.getChecksum(rawFs, syncedPath, function(err, checksum) {
if(err) {
log.error('Failed to get the checksum for ' + syncedPath + ' in handlePatchAckResponse');
return fs.delaySync(function(delayErr, delayedPath) {
if(delayErr) {
log.error('Failed to delay upstream sync for ' + delayedPath + ' in handlePatchAckResponse while getting checksum');
}
onError(syncManager, err);
});
}
fsUtils.setChecksum(rawFs, syncedPath, checksum, function(err) {
if(err) {
log.error('Failed to stamp the checksum for ' + syncedPath + ' in handlePatchAckResponse');
return fs.delaySync(function(delayErr, delayedPath) {
if(delayErr) {
log.error('Failed to delay upstream sync for ' + delayedPath + ' in handlePatchAckResponse while setting checksum');
}
onError(syncManager, err);
});
}
complete();
});
});
});
}
|
javascript
|
function handlePatchAckResponse() {
var syncedPath = data.content.path;
function complete() {
fsUtils.removeUnsynced(fs, syncedPath, function(err) {
if(err && err.code !== 'ENOENT') {
log.error('Failed to remove unsynced attribute for ' + syncedPath + ' in handlePatchAckResponse, complete()');
}
syncManager.syncNext(syncedPath);
});
}
fs.lstat(syncedPath, function(err, stats) {
if(err) {
if(err.code !== 'ENOENT') {
log.error('Failed to access ' + syncedPath + ' in handlePatchAckResponse');
return fs.delaySync(function(delayErr, delayedPath) {
if(delayErr) {
log.error('Failed to delay upstream sync for ' + delayedPath + ' in handlePatchAckResponse');
}
onError(syncManager, err);
});
}
// Non-existent paths usually due to renames or
// deletes cannot be stamped with a checksum
return complete();
}
if(!stats.isFile()) {
return complete();
}
rsyncUtils.getChecksum(rawFs, syncedPath, function(err, checksum) {
if(err) {
log.error('Failed to get the checksum for ' + syncedPath + ' in handlePatchAckResponse');
return fs.delaySync(function(delayErr, delayedPath) {
if(delayErr) {
log.error('Failed to delay upstream sync for ' + delayedPath + ' in handlePatchAckResponse while getting checksum');
}
onError(syncManager, err);
});
}
fsUtils.setChecksum(rawFs, syncedPath, checksum, function(err) {
if(err) {
log.error('Failed to stamp the checksum for ' + syncedPath + ' in handlePatchAckResponse');
return fs.delaySync(function(delayErr, delayedPath) {
if(delayErr) {
log.error('Failed to delay upstream sync for ' + delayedPath + ' in handlePatchAckResponse while setting checksum');
}
onError(syncManager, err);
});
}
complete();
});
});
});
}
|
[
"function",
"handlePatchAckResponse",
"(",
")",
"{",
"var",
"syncedPath",
"=",
"data",
".",
"content",
".",
"path",
";",
"function",
"complete",
"(",
")",
"{",
"fsUtils",
".",
"removeUnsynced",
"(",
"fs",
",",
"syncedPath",
",",
"function",
"(",
"err",
")",
"{",
"if",
"(",
"err",
"&&",
"err",
".",
"code",
"!==",
"'ENOENT'",
")",
"{",
"log",
".",
"error",
"(",
"'Failed to remove unsynced attribute for '",
"+",
"syncedPath",
"+",
"' in handlePatchAckResponse, complete()'",
")",
";",
"}",
"syncManager",
".",
"syncNext",
"(",
"syncedPath",
")",
";",
"}",
")",
";",
"}",
"fs",
".",
"lstat",
"(",
"syncedPath",
",",
"function",
"(",
"err",
",",
"stats",
")",
"{",
"if",
"(",
"err",
")",
"{",
"if",
"(",
"err",
".",
"code",
"!==",
"'ENOENT'",
")",
"{",
"log",
".",
"error",
"(",
"'Failed to access '",
"+",
"syncedPath",
"+",
"' in handlePatchAckResponse'",
")",
";",
"return",
"fs",
".",
"delaySync",
"(",
"function",
"(",
"delayErr",
",",
"delayedPath",
")",
"{",
"if",
"(",
"delayErr",
")",
"{",
"log",
".",
"error",
"(",
"'Failed to delay upstream sync for '",
"+",
"delayedPath",
"+",
"' in handlePatchAckResponse'",
")",
";",
"}",
"onError",
"(",
"syncManager",
",",
"err",
")",
";",
"}",
")",
";",
"}",
"return",
"complete",
"(",
")",
";",
"}",
"if",
"(",
"!",
"stats",
".",
"isFile",
"(",
")",
")",
"{",
"return",
"complete",
"(",
")",
";",
"}",
"rsyncUtils",
".",
"getChecksum",
"(",
"rawFs",
",",
"syncedPath",
",",
"function",
"(",
"err",
",",
"checksum",
")",
"{",
"if",
"(",
"err",
")",
"{",
"log",
".",
"error",
"(",
"'Failed to get the checksum for '",
"+",
"syncedPath",
"+",
"' in handlePatchAckResponse'",
")",
";",
"return",
"fs",
".",
"delaySync",
"(",
"function",
"(",
"delayErr",
",",
"delayedPath",
")",
"{",
"if",
"(",
"delayErr",
")",
"{",
"log",
".",
"error",
"(",
"'Failed to delay upstream sync for '",
"+",
"delayedPath",
"+",
"' in handlePatchAckResponse while getting checksum'",
")",
";",
"}",
"onError",
"(",
"syncManager",
",",
"err",
")",
";",
"}",
")",
";",
"}",
"fsUtils",
".",
"setChecksum",
"(",
"rawFs",
",",
"syncedPath",
",",
"checksum",
",",
"function",
"(",
"err",
")",
"{",
"if",
"(",
"err",
")",
"{",
"log",
".",
"error",
"(",
"'Failed to stamp the checksum for '",
"+",
"syncedPath",
"+",
"' in handlePatchAckResponse'",
")",
";",
"return",
"fs",
".",
"delaySync",
"(",
"function",
"(",
"delayErr",
",",
"delayedPath",
")",
"{",
"if",
"(",
"delayErr",
")",
"{",
"log",
".",
"error",
"(",
"'Failed to delay upstream sync for '",
"+",
"delayedPath",
"+",
"' in handlePatchAckResponse while setting checksum'",
")",
";",
"}",
"onError",
"(",
"syncManager",
",",
"err",
")",
";",
"}",
")",
";",
"}",
"complete",
"(",
")",
";",
"}",
")",
";",
"}",
")",
";",
"}",
")",
";",
"}"
] |
As soon as an upstream sync happens, the file synced becomes the last synced version and must be stamped with its checksum to version it and the unsynced attribute must be removed
|
[
"As",
"soon",
"as",
"an",
"upstream",
"sync",
"happens",
"the",
"file",
"synced",
"becomes",
"the",
"last",
"synced",
"version",
"and",
"must",
"be",
"stamped",
"with",
"its",
"checksum",
"to",
"version",
"it",
"and",
"the",
"unsynced",
"attribute",
"must",
"be",
"removed"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/client/src/message-handler.js#L282-L342
|
train
|
ns1/ns1-js
|
src/rest_resource.js
|
convert_json_to_objects
|
function convert_json_to_objects(data) {
if (Array.isArray(data)) {
return data.map((item, index, arr) => {
return new this(item, true)
})
} else {
return new this(data, true)
}
}
|
javascript
|
function convert_json_to_objects(data) {
if (Array.isArray(data)) {
return data.map((item, index, arr) => {
return new this(item, true)
})
} else {
return new this(data, true)
}
}
|
[
"function",
"convert_json_to_objects",
"(",
"data",
")",
"{",
"if",
"(",
"Array",
".",
"isArray",
"(",
"data",
")",
")",
"{",
"return",
"data",
".",
"map",
"(",
"(",
"item",
",",
"index",
",",
"arr",
")",
"=>",
"{",
"return",
"new",
"this",
"(",
"item",
",",
"true",
")",
"}",
")",
"}",
"else",
"{",
"return",
"new",
"this",
"(",
"data",
",",
"true",
")",
"}",
"}"
] |
Takes in JSON from requests and converts internal objects into objects representing the
resource class.
@param {Array/Object} data - JSON data, either a single object or an array of objects
@return {Array/Object} - Returns the data as objects of the resource class
@private
|
[
"Takes",
"in",
"JSON",
"from",
"requests",
"and",
"converts",
"internal",
"objects",
"into",
"objects",
"representing",
"the",
"resource",
"class",
"."
] |
750ad13d97054196732dd957ca574a698e4ce4c2
|
https://github.com/ns1/ns1-js/blob/750ad13d97054196732dd957ca574a698e4ce4c2/src/rest_resource.js#L143-L151
|
train
|
mozilla/makedrive
|
server/lib/client-manager.js
|
runClient
|
function runClient(client) {
var ws = client.ws;
function invalidMessage() {
var message = SyncMessage.error.format;
message.content = {error: 'Unable to parse/handle message, invalid message format.'};
client.sendMessage(message);
}
ws.onmessage = function(msg, flags) {
var data;
var message;
var info;
if(!flags || !flags.binary) {
try {
// Keep track of how much data we receive
info = client.info();
if(info) {
info.bytesReceived += Buffer.byteLength(msg.data, 'utf8');
}
data = JSON.parse(msg.data);
message = SyncMessage.parse(data);
} catch(error) {
log.error({client: client, err: error}, 'Unable to parse/handle client message. Data was `%s`', msg.data);
return invalidMessage();
}
// Delegate ws messages to the sync protocol handler at this point
client.handler.handleMessage(message);
} else {
log.warn({client: client}, 'Expected string but got binary data over web socket.');
invalidMessage();
}
};
// Send an AUTHZ response to let client know normal sync'ing can begin.
client.state = States.LISTENING;
client.sendMessage(SyncMessage.response.authz);
log.debug({client: client}, 'Starting authorized client session');
}
|
javascript
|
function runClient(client) {
var ws = client.ws;
function invalidMessage() {
var message = SyncMessage.error.format;
message.content = {error: 'Unable to parse/handle message, invalid message format.'};
client.sendMessage(message);
}
ws.onmessage = function(msg, flags) {
var data;
var message;
var info;
if(!flags || !flags.binary) {
try {
// Keep track of how much data we receive
info = client.info();
if(info) {
info.bytesReceived += Buffer.byteLength(msg.data, 'utf8');
}
data = JSON.parse(msg.data);
message = SyncMessage.parse(data);
} catch(error) {
log.error({client: client, err: error}, 'Unable to parse/handle client message. Data was `%s`', msg.data);
return invalidMessage();
}
// Delegate ws messages to the sync protocol handler at this point
client.handler.handleMessage(message);
} else {
log.warn({client: client}, 'Expected string but got binary data over web socket.');
invalidMessage();
}
};
// Send an AUTHZ response to let client know normal sync'ing can begin.
client.state = States.LISTENING;
client.sendMessage(SyncMessage.response.authz);
log.debug({client: client}, 'Starting authorized client session');
}
|
[
"function",
"runClient",
"(",
"client",
")",
"{",
"var",
"ws",
"=",
"client",
".",
"ws",
";",
"function",
"invalidMessage",
"(",
")",
"{",
"var",
"message",
"=",
"SyncMessage",
".",
"error",
".",
"format",
";",
"message",
".",
"content",
"=",
"{",
"error",
":",
"'Unable to parse/handle message, invalid message format.'",
"}",
";",
"client",
".",
"sendMessage",
"(",
"message",
")",
";",
"}",
"ws",
".",
"onmessage",
"=",
"function",
"(",
"msg",
",",
"flags",
")",
"{",
"var",
"data",
";",
"var",
"message",
";",
"var",
"info",
";",
"if",
"(",
"!",
"flags",
"||",
"!",
"flags",
".",
"binary",
")",
"{",
"try",
"{",
"info",
"=",
"client",
".",
"info",
"(",
")",
";",
"if",
"(",
"info",
")",
"{",
"info",
".",
"bytesReceived",
"+=",
"Buffer",
".",
"byteLength",
"(",
"msg",
".",
"data",
",",
"'utf8'",
")",
";",
"}",
"data",
"=",
"JSON",
".",
"parse",
"(",
"msg",
".",
"data",
")",
";",
"message",
"=",
"SyncMessage",
".",
"parse",
"(",
"data",
")",
";",
"}",
"catch",
"(",
"error",
")",
"{",
"log",
".",
"error",
"(",
"{",
"client",
":",
"client",
",",
"err",
":",
"error",
"}",
",",
"'Unable to parse/handle client message. Data was `%s`'",
",",
"msg",
".",
"data",
")",
";",
"return",
"invalidMessage",
"(",
")",
";",
"}",
"client",
".",
"handler",
".",
"handleMessage",
"(",
"message",
")",
";",
"}",
"else",
"{",
"log",
".",
"warn",
"(",
"{",
"client",
":",
"client",
"}",
",",
"'Expected string but got binary data over web socket.'",
")",
";",
"invalidMessage",
"(",
")",
";",
"}",
"}",
";",
"client",
".",
"state",
"=",
"States",
".",
"LISTENING",
";",
"client",
".",
"sendMessage",
"(",
"SyncMessage",
".",
"response",
".",
"authz",
")",
";",
"log",
".",
"debug",
"(",
"{",
"client",
":",
"client",
"}",
",",
"'Starting authorized client session'",
")",
";",
"}"
] |
Run the client normally through protocol steps.
|
[
"Run",
"the",
"client",
"normally",
"through",
"protocol",
"steps",
"."
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/server/lib/client-manager.js#L12-L53
|
train
|
mozilla/makedrive
|
server/lib/client-manager.js
|
initClient
|
function initClient(client) {
var ws = client.ws;
client.state = States.CONNECTING;
// Wait until we get the user's token so we can finish authorizing
ws.onmessage = function(msg) {
var data;
var info;
try {
// Keep track of how much data we receive
info = client.info();
if(info) {
info.bytesReceived += Buffer.byteLength(msg.data, 'utf8');
}
data = JSON.parse(msg.data);
} catch(err) {
log.error({client: client, err: err}, 'Error parsing client token. Data was `%s`', msg.data);
ClientInfo.remove(token);
client.close({
code: 1011,
message: 'Error: token could not be parsed.'
});
return;
}
// Authorize user
var token = data.token;
var username = WebsocketAuth.getAuthorizedUsername(token);
if (!username) {
log.warn({client: client}, 'Client sent an invalid or expired token (could not get username): token=%s', token);
ClientInfo.remove(token);
client.close({
code: 1008,
message: 'Error: invalid token.'
});
return;
}
// Update client details now that he/she is authenticated
client.id = token;
client.username = username;
client.fs = filesystem.create(username);
ClientInfo.update(client);
log.info({client: client}, 'Client connected');
runClient(client);
};
}
|
javascript
|
function initClient(client) {
var ws = client.ws;
client.state = States.CONNECTING;
// Wait until we get the user's token so we can finish authorizing
ws.onmessage = function(msg) {
var data;
var info;
try {
// Keep track of how much data we receive
info = client.info();
if(info) {
info.bytesReceived += Buffer.byteLength(msg.data, 'utf8');
}
data = JSON.parse(msg.data);
} catch(err) {
log.error({client: client, err: err}, 'Error parsing client token. Data was `%s`', msg.data);
ClientInfo.remove(token);
client.close({
code: 1011,
message: 'Error: token could not be parsed.'
});
return;
}
// Authorize user
var token = data.token;
var username = WebsocketAuth.getAuthorizedUsername(token);
if (!username) {
log.warn({client: client}, 'Client sent an invalid or expired token (could not get username): token=%s', token);
ClientInfo.remove(token);
client.close({
code: 1008,
message: 'Error: invalid token.'
});
return;
}
// Update client details now that he/she is authenticated
client.id = token;
client.username = username;
client.fs = filesystem.create(username);
ClientInfo.update(client);
log.info({client: client}, 'Client connected');
runClient(client);
};
}
|
[
"function",
"initClient",
"(",
"client",
")",
"{",
"var",
"ws",
"=",
"client",
".",
"ws",
";",
"client",
".",
"state",
"=",
"States",
".",
"CONNECTING",
";",
"ws",
".",
"onmessage",
"=",
"function",
"(",
"msg",
")",
"{",
"var",
"data",
";",
"var",
"info",
";",
"try",
"{",
"info",
"=",
"client",
".",
"info",
"(",
")",
";",
"if",
"(",
"info",
")",
"{",
"info",
".",
"bytesReceived",
"+=",
"Buffer",
".",
"byteLength",
"(",
"msg",
".",
"data",
",",
"'utf8'",
")",
";",
"}",
"data",
"=",
"JSON",
".",
"parse",
"(",
"msg",
".",
"data",
")",
";",
"}",
"catch",
"(",
"err",
")",
"{",
"log",
".",
"error",
"(",
"{",
"client",
":",
"client",
",",
"err",
":",
"err",
"}",
",",
"'Error parsing client token. Data was `%s`'",
",",
"msg",
".",
"data",
")",
";",
"ClientInfo",
".",
"remove",
"(",
"token",
")",
";",
"client",
".",
"close",
"(",
"{",
"code",
":",
"1011",
",",
"message",
":",
"'Error: token could not be parsed.'",
"}",
")",
";",
"return",
";",
"}",
"var",
"token",
"=",
"data",
".",
"token",
";",
"var",
"username",
"=",
"WebsocketAuth",
".",
"getAuthorizedUsername",
"(",
"token",
")",
";",
"if",
"(",
"!",
"username",
")",
"{",
"log",
".",
"warn",
"(",
"{",
"client",
":",
"client",
"}",
",",
"'Client sent an invalid or expired token (could not get username): token=%s'",
",",
"token",
")",
";",
"ClientInfo",
".",
"remove",
"(",
"token",
")",
";",
"client",
".",
"close",
"(",
"{",
"code",
":",
"1008",
",",
"message",
":",
"'Error: invalid token.'",
"}",
")",
";",
"return",
";",
"}",
"client",
".",
"id",
"=",
"token",
";",
"client",
".",
"username",
"=",
"username",
";",
"client",
".",
"fs",
"=",
"filesystem",
".",
"create",
"(",
"username",
")",
";",
"ClientInfo",
".",
"update",
"(",
"client",
")",
";",
"log",
".",
"info",
"(",
"{",
"client",
":",
"client",
"}",
",",
"'Client connected'",
")",
";",
"runClient",
"(",
"client",
")",
";",
"}",
";",
"}"
] |
Handle initial connection and authentication, bind user data
to client, including filesystem, and switch the client to normal
run mode.
|
[
"Handle",
"initial",
"connection",
"and",
"authentication",
"bind",
"user",
"data",
"to",
"client",
"including",
"filesystem",
"and",
"switch",
"the",
"client",
"to",
"normal",
"run",
"mode",
"."
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/server/lib/client-manager.js#L60-L111
|
train
|
mozilla/makedrive
|
server/lib/client-manager.js
|
remove
|
function remove(client) {
if(!clients) {
return;
}
var idx = clients.indexOf(client);
if(idx > -1) {
clients.splice(idx, 1);
}
}
|
javascript
|
function remove(client) {
if(!clients) {
return;
}
var idx = clients.indexOf(client);
if(idx > -1) {
clients.splice(idx, 1);
}
}
|
[
"function",
"remove",
"(",
"client",
")",
"{",
"if",
"(",
"!",
"clients",
")",
"{",
"return",
";",
"}",
"var",
"idx",
"=",
"clients",
".",
"indexOf",
"(",
"client",
")",
";",
"if",
"(",
"idx",
">",
"-",
"1",
")",
"{",
"clients",
".",
"splice",
"(",
"idx",
",",
"1",
")",
";",
"}",
"}"
] |
Remove client from the list. Does not affect client state
or life-cycle.
|
[
"Remove",
"client",
"from",
"the",
"list",
".",
"Does",
"not",
"affect",
"client",
"state",
"or",
"life",
"-",
"cycle",
"."
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/server/lib/client-manager.js#L122-L131
|
train
|
mozilla/makedrive
|
server/lib/client-manager.js
|
add
|
function add(client) {
// Auto-remove clients on close
client.once('closed', function() {
remove(client);
});
clients = clients || [];
clients.push(client);
initClient(client);
}
|
javascript
|
function add(client) {
// Auto-remove clients on close
client.once('closed', function() {
remove(client);
});
clients = clients || [];
clients.push(client);
initClient(client);
}
|
[
"function",
"add",
"(",
"client",
")",
"{",
"client",
".",
"once",
"(",
"'closed'",
",",
"function",
"(",
")",
"{",
"remove",
"(",
"client",
")",
";",
"}",
")",
";",
"clients",
"=",
"clients",
"||",
"[",
"]",
";",
"clients",
".",
"push",
"(",
"client",
")",
";",
"initClient",
"(",
"client",
")",
";",
"}"
] |
Add a client to the list, and manage its life-cycle.
|
[
"Add",
"a",
"client",
"to",
"the",
"list",
"and",
"manage",
"its",
"life",
"-",
"cycle",
"."
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/server/lib/client-manager.js#L136-L145
|
train
|
mozilla/makedrive
|
server/lib/client-manager.js
|
shutdown
|
function shutdown(callback) {
var closed = 0;
var connected = clients ? clients.length : 0;
function maybeFinished() {
if(++closed >= connected) {
clients = null;
log.info('[Shutdown] All client connections safely closed.');
return callback();
}
log.info('[Shutdown] Closed client %s of %s.', closed, connected);
}
if(!connected) {
return maybeFinished();
}
var client;
for(var i = 0; i < connected; i++) {
client = clients[i] || null;
if(!client) {
maybeFinished();
} else {
client.once('closed', maybeFinished);
if(client.state !== States.CLOSING && client.state !== States.CLOSED) {
client.close();
}
}
}
}
|
javascript
|
function shutdown(callback) {
var closed = 0;
var connected = clients ? clients.length : 0;
function maybeFinished() {
if(++closed >= connected) {
clients = null;
log.info('[Shutdown] All client connections safely closed.');
return callback();
}
log.info('[Shutdown] Closed client %s of %s.', closed, connected);
}
if(!connected) {
return maybeFinished();
}
var client;
for(var i = 0; i < connected; i++) {
client = clients[i] || null;
if(!client) {
maybeFinished();
} else {
client.once('closed', maybeFinished);
if(client.state !== States.CLOSING && client.state !== States.CLOSED) {
client.close();
}
}
}
}
|
[
"function",
"shutdown",
"(",
"callback",
")",
"{",
"var",
"closed",
"=",
"0",
";",
"var",
"connected",
"=",
"clients",
"?",
"clients",
".",
"length",
":",
"0",
";",
"function",
"maybeFinished",
"(",
")",
"{",
"if",
"(",
"++",
"closed",
">=",
"connected",
")",
"{",
"clients",
"=",
"null",
";",
"log",
".",
"info",
"(",
"'[Shutdown] All client connections safely closed.'",
")",
";",
"return",
"callback",
"(",
")",
";",
"}",
"log",
".",
"info",
"(",
"'[Shutdown] Closed client %s of %s.'",
",",
"closed",
",",
"connected",
")",
";",
"}",
"if",
"(",
"!",
"connected",
")",
"{",
"return",
"maybeFinished",
"(",
")",
";",
"}",
"var",
"client",
";",
"for",
"(",
"var",
"i",
"=",
"0",
";",
"i",
"<",
"connected",
";",
"i",
"++",
")",
"{",
"client",
"=",
"clients",
"[",
"i",
"]",
"||",
"null",
";",
"if",
"(",
"!",
"client",
")",
"{",
"maybeFinished",
"(",
")",
";",
"}",
"else",
"{",
"client",
".",
"once",
"(",
"'closed'",
",",
"maybeFinished",
")",
";",
"if",
"(",
"client",
".",
"state",
"!==",
"States",
".",
"CLOSING",
"&&",
"client",
".",
"state",
"!==",
"States",
".",
"CLOSED",
")",
"{",
"client",
".",
"close",
"(",
")",
";",
"}",
"}",
"}",
"}"
] |
Safe shutdown, waiting on all clients to close.
|
[
"Safe",
"shutdown",
"waiting",
"on",
"all",
"clients",
"to",
"close",
"."
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/server/lib/client-manager.js#L150-L183
|
train
|
ns1/ns1-js
|
src/NS1_request.js
|
apply_data
|
function apply_data(query, files) {
if(startCb){
startCb();
}
if (query !== undefined) {
if (this.method === 'get') {
this.request = this.request.query(query)
} else {
this.request = this.request.send(query)
}
}
if (files !== undefined) {
if (files instanceof FormData) {
this.request = this.request.send(files)
} else {
Object.keys(files).forEach((key) => {
this.request = this.request.attach(key, files[key])
})
}
}
}
|
javascript
|
function apply_data(query, files) {
if(startCb){
startCb();
}
if (query !== undefined) {
if (this.method === 'get') {
this.request = this.request.query(query)
} else {
this.request = this.request.send(query)
}
}
if (files !== undefined) {
if (files instanceof FormData) {
this.request = this.request.send(files)
} else {
Object.keys(files).forEach((key) => {
this.request = this.request.attach(key, files[key])
})
}
}
}
|
[
"function",
"apply_data",
"(",
"query",
",",
"files",
")",
"{",
"if",
"(",
"startCb",
")",
"{",
"startCb",
"(",
")",
";",
"}",
"if",
"(",
"query",
"!==",
"undefined",
")",
"{",
"if",
"(",
"this",
".",
"method",
"===",
"'get'",
")",
"{",
"this",
".",
"request",
"=",
"this",
".",
"request",
".",
"query",
"(",
"query",
")",
"}",
"else",
"{",
"this",
".",
"request",
"=",
"this",
".",
"request",
".",
"send",
"(",
"query",
")",
"}",
"}",
"if",
"(",
"files",
"!==",
"undefined",
")",
"{",
"if",
"(",
"files",
"instanceof",
"FormData",
")",
"{",
"this",
".",
"request",
"=",
"this",
".",
"request",
".",
"send",
"(",
"files",
")",
"}",
"else",
"{",
"Object",
".",
"keys",
"(",
"files",
")",
".",
"forEach",
"(",
"(",
"key",
")",
"=>",
"{",
"this",
".",
"request",
"=",
"this",
".",
"request",
".",
"attach",
"(",
"key",
",",
"files",
"[",
"key",
"]",
")",
"}",
")",
"}",
"}",
"}"
] |
Applies data to the this.request superagent object. Works with query params or
file attachments.
@param {Object} query - Any parameters to be sent in the query string for GET requests or in the req body for others
@param {Object/FormData} files - Key / value mapped object containing file paths for uploads, or a FormData object if it's coming from the browser
@private
|
[
"Applies",
"data",
"to",
"the",
"this",
".",
"request",
"superagent",
"object",
".",
"Works",
"with",
"query",
"params",
"or",
"file",
"attachments",
"."
] |
750ad13d97054196732dd957ca574a698e4ce4c2
|
https://github.com/ns1/ns1-js/blob/750ad13d97054196732dd957ca574a698e4ce4c2/src/NS1_request.js#L94-L115
|
train
|
mozilla/makedrive
|
server/lib/filer-www/default-handler.js
|
handleFile
|
function handleFile(fs, path, res) {
var contentType = mime.lookup(path);
var encoding = mime.charsets.lookup(contentType) === "UTF-8" ? "utf8" : null;
fs.readFile(path, {encoding: encoding}, function(err, data) {
if(err) {
log.error(err, 'Unable to read file path `%s`', path);
handle404(path, res);
return;
}
write(data, contentType, res);
});
}
|
javascript
|
function handleFile(fs, path, res) {
var contentType = mime.lookup(path);
var encoding = mime.charsets.lookup(contentType) === "UTF-8" ? "utf8" : null;
fs.readFile(path, {encoding: encoding}, function(err, data) {
if(err) {
log.error(err, 'Unable to read file path `%s`', path);
handle404(path, res);
return;
}
write(data, contentType, res);
});
}
|
[
"function",
"handleFile",
"(",
"fs",
",",
"path",
",",
"res",
")",
"{",
"var",
"contentType",
"=",
"mime",
".",
"lookup",
"(",
"path",
")",
";",
"var",
"encoding",
"=",
"mime",
".",
"charsets",
".",
"lookup",
"(",
"contentType",
")",
"===",
"\"UTF-8\"",
"?",
"\"utf8\"",
":",
"null",
";",
"fs",
".",
"readFile",
"(",
"path",
",",
"{",
"encoding",
":",
"encoding",
"}",
",",
"function",
"(",
"err",
",",
"data",
")",
"{",
"if",
"(",
"err",
")",
"{",
"log",
".",
"error",
"(",
"err",
",",
"'Unable to read file path `%s`'",
",",
"path",
")",
";",
"handle404",
"(",
"path",
",",
"res",
")",
";",
"return",
";",
"}",
"write",
"(",
"data",
",",
"contentType",
",",
"res",
")",
";",
"}",
")",
";",
"}"
] |
Send the raw file, making it somewhat more readable
|
[
"Send",
"the",
"raw",
"file",
"making",
"it",
"somewhat",
"more",
"readable"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/server/lib/filer-www/default-handler.js#L27-L40
|
train
|
mozilla/makedrive
|
server/lib/filer-www/default-handler.js
|
handleDir
|
function handleDir(fs, path, res) {
var sh = new fs.Shell();
var parent = Path.dirname(path);
var header = '<!DOCTYPE html>' +
'<html><head><title>Index of ' + path + '</title></head>' +
'<body><h1>Index of ' + path + '</h1>' +
'<table><tr><th><img src="/icons/blank.png" alt="[ICO]"></th>' +
'<th><a href="#">Name</a></th><th><a href="#">Last modified</a></th>' +
'<th><a href="#">Size</a></th><th><a href="#">Description</a></th></tr>' +
'<tr><th colspan="5"><hr></th></tr>' +
'<tr><td valign="top"><img src="/icons/back.png" alt="[DIR]"></td>' +
'<td><a href="' + parent + '">Parent Directory</a> </td><td> </td>' +
'<td align="right"> - </td><td> </td></tr>';
var footer = '<tr><th colspan="5"><hr></th></tr>' +
'</table><address>MakeDrive/' + version + ' (Web)</address>' +
'</body></html>';
function row(icon, alt, href, name, modified, size) {
icon = icon || '/icons/unknown.png';
alt = alt || '[ ]';
modified = util.formatDate(new Date(modified));
size = util.formatSize(size);
return '<tr><td valign="top"><img src="' + icon + '" alt="' + alt + '"></td><td>' +
'<a href="' + href + '">' + name + '</a> </td>' +
'<td align="right">' + modified + ' </td>' +
'<td align="right">' + size + '</td><td> </td></tr>';
}
function processEntries(entries) {
var rows = '';
entries.forEach(function(entry) {
var name = Path.basename(entry.path);
var ext = Path.extname(entry.path);
var href = Path.join('/p', path, entry.path);
var icon;
var alt;
if(entry.type === 'DIRECTORY') {
icon = '/icons/folder.png';
alt = '[DIR]';
} else { // file
if(util.isImage(ext)) {
icon = '/icons/image2.png';
alt = '[IMG]';
} else if(util.isMedia(ext)) {
icon = '/icons/movie.png';
alt = '[MOV]';
} else {
icon = '/icons/text.png';
alt = '[TXT]';
}
}
rows += row(icon, alt, href, name, entry.modified, entry.size);
});
var content = header + rows + footer;
write(content, 'text/html', res);
}
sh.ls(path, function(err, list) {
if(err) {
log.error(err, 'Unable to get listing for path `%s`', path);
handle404(path, res);
return;
}
processEntries(list);
});
}
|
javascript
|
function handleDir(fs, path, res) {
var sh = new fs.Shell();
var parent = Path.dirname(path);
var header = '<!DOCTYPE html>' +
'<html><head><title>Index of ' + path + '</title></head>' +
'<body><h1>Index of ' + path + '</h1>' +
'<table><tr><th><img src="/icons/blank.png" alt="[ICO]"></th>' +
'<th><a href="#">Name</a></th><th><a href="#">Last modified</a></th>' +
'<th><a href="#">Size</a></th><th><a href="#">Description</a></th></tr>' +
'<tr><th colspan="5"><hr></th></tr>' +
'<tr><td valign="top"><img src="/icons/back.png" alt="[DIR]"></td>' +
'<td><a href="' + parent + '">Parent Directory</a> </td><td> </td>' +
'<td align="right"> - </td><td> </td></tr>';
var footer = '<tr><th colspan="5"><hr></th></tr>' +
'</table><address>MakeDrive/' + version + ' (Web)</address>' +
'</body></html>';
function row(icon, alt, href, name, modified, size) {
icon = icon || '/icons/unknown.png';
alt = alt || '[ ]';
modified = util.formatDate(new Date(modified));
size = util.formatSize(size);
return '<tr><td valign="top"><img src="' + icon + '" alt="' + alt + '"></td><td>' +
'<a href="' + href + '">' + name + '</a> </td>' +
'<td align="right">' + modified + ' </td>' +
'<td align="right">' + size + '</td><td> </td></tr>';
}
function processEntries(entries) {
var rows = '';
entries.forEach(function(entry) {
var name = Path.basename(entry.path);
var ext = Path.extname(entry.path);
var href = Path.join('/p', path, entry.path);
var icon;
var alt;
if(entry.type === 'DIRECTORY') {
icon = '/icons/folder.png';
alt = '[DIR]';
} else { // file
if(util.isImage(ext)) {
icon = '/icons/image2.png';
alt = '[IMG]';
} else if(util.isMedia(ext)) {
icon = '/icons/movie.png';
alt = '[MOV]';
} else {
icon = '/icons/text.png';
alt = '[TXT]';
}
}
rows += row(icon, alt, href, name, entry.modified, entry.size);
});
var content = header + rows + footer;
write(content, 'text/html', res);
}
sh.ls(path, function(err, list) {
if(err) {
log.error(err, 'Unable to get listing for path `%s`', path);
handle404(path, res);
return;
}
processEntries(list);
});
}
|
[
"function",
"handleDir",
"(",
"fs",
",",
"path",
",",
"res",
")",
"{",
"var",
"sh",
"=",
"new",
"fs",
".",
"Shell",
"(",
")",
";",
"var",
"parent",
"=",
"Path",
".",
"dirname",
"(",
"path",
")",
";",
"var",
"header",
"=",
"'<!DOCTYPE html>'",
"+",
"'<html><head><title>Index of '",
"+",
"path",
"+",
"'</title></head>'",
"+",
"'<body><h1>Index of '",
"+",
"path",
"+",
"'</h1>'",
"+",
"'<table><tr><th><img src=\"/icons/blank.png\" alt=\"[ICO]\"></th>'",
"+",
"'<th><a href=\"#\">Name</a></th><th><a href=\"#\">Last modified</a></th>'",
"+",
"'<th><a href=\"#\">Size</a></th><th><a href=\"#\">Description</a></th></tr>'",
"+",
"'<tr><th colspan=\"5\"><hr></th></tr>'",
"+",
"'<tr><td valign=\"top\"><img src=\"/icons/back.png\" alt=\"[DIR]\"></td>'",
"+",
"'<td><a href=\"'",
"+",
"parent",
"+",
"'\">Parent Directory</a> </td><td> </td>'",
"+",
"'<td align=\"right\"> - </td><td> </td></tr>'",
";",
"var",
"footer",
"=",
"'<tr><th colspan=\"5\"><hr></th></tr>'",
"+",
"'</table><address>MakeDrive/'",
"+",
"version",
"+",
"' (Web)</address>'",
"+",
"'</body></html>'",
";",
"function",
"row",
"(",
"icon",
",",
"alt",
",",
"href",
",",
"name",
",",
"modified",
",",
"size",
")",
"{",
"icon",
"=",
"icon",
"||",
"'/icons/unknown.png'",
";",
"alt",
"=",
"alt",
"||",
"'[ ]'",
";",
"modified",
"=",
"util",
".",
"formatDate",
"(",
"new",
"Date",
"(",
"modified",
")",
")",
";",
"size",
"=",
"util",
".",
"formatSize",
"(",
"size",
")",
";",
"return",
"'<tr><td valign=\"top\"><img src=\"'",
"+",
"icon",
"+",
"'\" alt=\"'",
"+",
"alt",
"+",
"'\"></td><td>'",
"+",
"'<a href=\"'",
"+",
"href",
"+",
"'\">'",
"+",
"name",
"+",
"'</a> </td>'",
"+",
"'<td align=\"right\">'",
"+",
"modified",
"+",
"' </td>'",
"+",
"'<td align=\"right\">'",
"+",
"size",
"+",
"'</td><td> </td></tr>'",
";",
"}",
"function",
"processEntries",
"(",
"entries",
")",
"{",
"var",
"rows",
"=",
"''",
";",
"entries",
".",
"forEach",
"(",
"function",
"(",
"entry",
")",
"{",
"var",
"name",
"=",
"Path",
".",
"basename",
"(",
"entry",
".",
"path",
")",
";",
"var",
"ext",
"=",
"Path",
".",
"extname",
"(",
"entry",
".",
"path",
")",
";",
"var",
"href",
"=",
"Path",
".",
"join",
"(",
"'/p'",
",",
"path",
",",
"entry",
".",
"path",
")",
";",
"var",
"icon",
";",
"var",
"alt",
";",
"if",
"(",
"entry",
".",
"type",
"===",
"'DIRECTORY'",
")",
"{",
"icon",
"=",
"'/icons/folder.png'",
";",
"alt",
"=",
"'[DIR]'",
";",
"}",
"else",
"{",
"if",
"(",
"util",
".",
"isImage",
"(",
"ext",
")",
")",
"{",
"icon",
"=",
"'/icons/image2.png'",
";",
"alt",
"=",
"'[IMG]'",
";",
"}",
"else",
"if",
"(",
"util",
".",
"isMedia",
"(",
"ext",
")",
")",
"{",
"icon",
"=",
"'/icons/movie.png'",
";",
"alt",
"=",
"'[MOV]'",
";",
"}",
"else",
"{",
"icon",
"=",
"'/icons/text.png'",
";",
"alt",
"=",
"'[TXT]'",
";",
"}",
"}",
"rows",
"+=",
"row",
"(",
"icon",
",",
"alt",
",",
"href",
",",
"name",
",",
"entry",
".",
"modified",
",",
"entry",
".",
"size",
")",
";",
"}",
")",
";",
"var",
"content",
"=",
"header",
"+",
"rows",
"+",
"footer",
";",
"write",
"(",
"content",
",",
"'text/html'",
",",
"res",
")",
";",
"}",
"sh",
".",
"ls",
"(",
"path",
",",
"function",
"(",
"err",
",",
"list",
")",
"{",
"if",
"(",
"err",
")",
"{",
"log",
".",
"error",
"(",
"err",
",",
"'Unable to get listing for path `%s`'",
",",
"path",
")",
";",
"handle404",
"(",
"path",
",",
"res",
")",
";",
"return",
";",
"}",
"processEntries",
"(",
"list",
")",
";",
"}",
")",
";",
"}"
] |
Send an Apache-style directory listing
|
[
"Send",
"an",
"Apache",
"-",
"style",
"directory",
"listing"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/server/lib/filer-www/default-handler.js#L45-L115
|
train
|
mozilla/makedrive
|
server/redis-clients.js
|
onmessage
|
function onmessage(channel, message) {
if(closing) {
return;
}
switch(channel) {
case ChannelConstants.syncChannel:
module.exports.emit('sync', message);
break;
case ChannelConstants.lockRequestChannel:
module.exports.emit('lock-request', message);
break;
case ChannelConstants.lockResponseChannel:
module.exports.emit('lock-response', message);
break;
default:
log.warn('[Redis] Got unexpected message on channel `%s`. Message was: `%s`', channel, message);
break;
}
}
|
javascript
|
function onmessage(channel, message) {
if(closing) {
return;
}
switch(channel) {
case ChannelConstants.syncChannel:
module.exports.emit('sync', message);
break;
case ChannelConstants.lockRequestChannel:
module.exports.emit('lock-request', message);
break;
case ChannelConstants.lockResponseChannel:
module.exports.emit('lock-response', message);
break;
default:
log.warn('[Redis] Got unexpected message on channel `%s`. Message was: `%s`', channel, message);
break;
}
}
|
[
"function",
"onmessage",
"(",
"channel",
",",
"message",
")",
"{",
"if",
"(",
"closing",
")",
"{",
"return",
";",
"}",
"switch",
"(",
"channel",
")",
"{",
"case",
"ChannelConstants",
".",
"syncChannel",
":",
"module",
".",
"exports",
".",
"emit",
"(",
"'sync'",
",",
"message",
")",
";",
"break",
";",
"case",
"ChannelConstants",
".",
"lockRequestChannel",
":",
"module",
".",
"exports",
".",
"emit",
"(",
"'lock-request'",
",",
"message",
")",
";",
"break",
";",
"case",
"ChannelConstants",
".",
"lockResponseChannel",
":",
"module",
".",
"exports",
".",
"emit",
"(",
"'lock-response'",
",",
"message",
")",
";",
"break",
";",
"default",
":",
"log",
".",
"warn",
"(",
"'[Redis] Got unexpected message on channel `%s`. Message was: `%s`'",
",",
"channel",
",",
"message",
")",
";",
"break",
";",
"}",
"}"
] |
redis subscription messages. Split the different types out based on channel
|
[
"redis",
"subscription",
"messages",
".",
"Split",
"the",
"different",
"types",
"out",
"based",
"on",
"channel"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/server/redis-clients.js#L60-L79
|
train
|
mozilla/makedrive
|
client/src/index.js
|
windowCloseHandler
|
function windowCloseHandler(event) {
if(!options.windowCloseWarning) {
return;
}
if(sync.state !== sync.SYNC_SYNCING) {
return;
}
var confirmationMessage = "Sync currently underway, are you sure you want to close?";
(event || global.event).returnValue = confirmationMessage;
return confirmationMessage;
}
|
javascript
|
function windowCloseHandler(event) {
if(!options.windowCloseWarning) {
return;
}
if(sync.state !== sync.SYNC_SYNCING) {
return;
}
var confirmationMessage = "Sync currently underway, are you sure you want to close?";
(event || global.event).returnValue = confirmationMessage;
return confirmationMessage;
}
|
[
"function",
"windowCloseHandler",
"(",
"event",
")",
"{",
"if",
"(",
"!",
"options",
".",
"windowCloseWarning",
")",
"{",
"return",
";",
"}",
"if",
"(",
"sync",
".",
"state",
"!==",
"sync",
".",
"SYNC_SYNCING",
")",
"{",
"return",
";",
"}",
"var",
"confirmationMessage",
"=",
"\"Sync currently underway, are you sure you want to close?\"",
";",
"(",
"event",
"||",
"global",
".",
"event",
")",
".",
"returnValue",
"=",
"confirmationMessage",
";",
"return",
"confirmationMessage",
";",
"}"
] |
Optionally warn when closing the window if still syncing
|
[
"Optionally",
"warn",
"when",
"closing",
"the",
"window",
"if",
"still",
"syncing"
] |
542b8acf595cd37a88ca880b3730befeb7e86743
|
https://github.com/mozilla/makedrive/blob/542b8acf595cd37a88ca880b3730befeb7e86743/client/src/index.js#L151-L164
|
train
|
crudlio/crudl
|
gulpfile.js
|
continuousBundle
|
function continuousBundle() {
const bundle = b => b.bundle()
.on('error', (err) => {
notifier.notify({
title: 'Browserify Error',
message: err.message,
});
gutil.log('Browserify Error', err);
})
.pipe(source('crudl.js'))
.pipe(gulp.dest(dist))
.on('end', () => {
notifier.notify({
title: 'Browserify',
message: 'OK',
});
})
const opts = assign({}, watchify.args, browersifyOptions, { debug: true });
const bundler = watchify(browserify(opts).transform(babelify.configure(babelifyOptions)));
bundler.on('update', () => bundle(bundler)); // on any dep update, runs the bundler
bundler.on('log', gutil.log); // output build logs to terminal
return bundle(bundler)
}
|
javascript
|
function continuousBundle() {
const bundle = b => b.bundle()
.on('error', (err) => {
notifier.notify({
title: 'Browserify Error',
message: err.message,
});
gutil.log('Browserify Error', err);
})
.pipe(source('crudl.js'))
.pipe(gulp.dest(dist))
.on('end', () => {
notifier.notify({
title: 'Browserify',
message: 'OK',
});
})
const opts = assign({}, watchify.args, browersifyOptions, { debug: true });
const bundler = watchify(browserify(opts).transform(babelify.configure(babelifyOptions)));
bundler.on('update', () => bundle(bundler)); // on any dep update, runs the bundler
bundler.on('log', gutil.log); // output build logs to terminal
return bundle(bundler)
}
|
[
"function",
"continuousBundle",
"(",
")",
"{",
"const",
"bundle",
"=",
"b",
"=>",
"b",
".",
"bundle",
"(",
")",
".",
"on",
"(",
"'error'",
",",
"(",
"err",
")",
"=>",
"{",
"notifier",
".",
"notify",
"(",
"{",
"title",
":",
"'Browserify Error'",
",",
"message",
":",
"err",
".",
"message",
",",
"}",
")",
";",
"gutil",
".",
"log",
"(",
"'Browserify Error'",
",",
"err",
")",
";",
"}",
")",
".",
"pipe",
"(",
"source",
"(",
"'crudl.js'",
")",
")",
".",
"pipe",
"(",
"gulp",
".",
"dest",
"(",
"dist",
")",
")",
".",
"on",
"(",
"'end'",
",",
"(",
")",
"=>",
"{",
"notifier",
".",
"notify",
"(",
"{",
"title",
":",
"'Browserify'",
",",
"message",
":",
"'OK'",
",",
"}",
")",
";",
"}",
")",
"const",
"opts",
"=",
"assign",
"(",
"{",
"}",
",",
"watchify",
".",
"args",
",",
"browersifyOptions",
",",
"{",
"debug",
":",
"true",
"}",
")",
";",
"const",
"bundler",
"=",
"watchify",
"(",
"browserify",
"(",
"opts",
")",
".",
"transform",
"(",
"babelify",
".",
"configure",
"(",
"babelifyOptions",
")",
")",
")",
";",
"bundler",
".",
"on",
"(",
"'update'",
",",
"(",
")",
"=>",
"bundle",
"(",
"bundler",
")",
")",
";",
"bundler",
".",
"on",
"(",
"'log'",
",",
"gutil",
".",
"log",
")",
";",
"return",
"bundle",
"(",
"bundler",
")",
"}"
] |
Watch for changes and bundle
|
[
"Watch",
"for",
"changes",
"and",
"bundle"
] |
2b2e2a6cecd8a0e25f1e097c3098ebaaabe2e361
|
https://github.com/crudlio/crudl/blob/2b2e2a6cecd8a0e25f1e097c3098ebaaabe2e361/gulpfile.js#L47-L70
|
train
|
crudlio/crudl
|
gulpfile.js
|
bundleDevelopment
|
function bundleDevelopment() {
const opts = assign({}, browersifyOptions, { debug: true });
const bundler = browserify(opts).transform(babelify.configure(babelifyOptions));
bundler.on('log', gutil.log); // output build logs to terminal
return bundler.bundle()
.on('error', (err) => { // log errors if they happen
gutil.log('Browserify Error', err);
})
.pipe(source('crudl.js'))
.pipe(gulp.dest(dist))
.on('end', () => {
gutil.log(`Successfully build ${gutil.colors.magenta(`${dist}/crudl.js`)}`)
})
}
|
javascript
|
function bundleDevelopment() {
const opts = assign({}, browersifyOptions, { debug: true });
const bundler = browserify(opts).transform(babelify.configure(babelifyOptions));
bundler.on('log', gutil.log); // output build logs to terminal
return bundler.bundle()
.on('error', (err) => { // log errors if they happen
gutil.log('Browserify Error', err);
})
.pipe(source('crudl.js'))
.pipe(gulp.dest(dist))
.on('end', () => {
gutil.log(`Successfully build ${gutil.colors.magenta(`${dist}/crudl.js`)}`)
})
}
|
[
"function",
"bundleDevelopment",
"(",
")",
"{",
"const",
"opts",
"=",
"assign",
"(",
"{",
"}",
",",
"browersifyOptions",
",",
"{",
"debug",
":",
"true",
"}",
")",
";",
"const",
"bundler",
"=",
"browserify",
"(",
"opts",
")",
".",
"transform",
"(",
"babelify",
".",
"configure",
"(",
"babelifyOptions",
")",
")",
";",
"bundler",
".",
"on",
"(",
"'log'",
",",
"gutil",
".",
"log",
")",
";",
"return",
"bundler",
".",
"bundle",
"(",
")",
".",
"on",
"(",
"'error'",
",",
"(",
"err",
")",
"=>",
"{",
"gutil",
".",
"log",
"(",
"'Browserify Error'",
",",
"err",
")",
";",
"}",
")",
".",
"pipe",
"(",
"source",
"(",
"'crudl.js'",
")",
")",
".",
"pipe",
"(",
"gulp",
".",
"dest",
"(",
"dist",
")",
")",
".",
"on",
"(",
"'end'",
",",
"(",
")",
"=>",
"{",
"gutil",
".",
"log",
"(",
"`",
"${",
"gutil",
".",
"colors",
".",
"magenta",
"(",
"`",
"${",
"dist",
"}",
"`",
")",
"}",
"`",
")",
"}",
")",
"}"
] |
Bundle the dev module
|
[
"Bundle",
"the",
"dev",
"module"
] |
2b2e2a6cecd8a0e25f1e097c3098ebaaabe2e361
|
https://github.com/crudlio/crudl/blob/2b2e2a6cecd8a0e25f1e097c3098ebaaabe2e361/gulpfile.js#L73-L87
|
train
|
crudlio/crudl
|
gulpfile.js
|
bundleProduction
|
function bundleProduction() {
return browserify(browersifyOptions)
.transform(babelify.configure(babelifyOptions))
.transform(envify({ _: 'purge', NODE_ENV: 'production' }), { global: true })
.on('log', gutil.log)
.bundle()
// minify
.pipe(source('crudl.js'))
.pipe(buffer())
.pipe(uglify())
.pipe(rename('crudl.min.js'))
.on('error', gutil.log.bind(gutil, 'uglify:'))
// Prepend the license
.pipe(concat('crudl.min.js'))
.pipe(concat.header(`/* LICENSE: ${packageJSON.license} */\n`))
// Copy to dist
.pipe(gulp.dest(dist))
.on('end', () => {
gutil.log(`Successfully build ${gutil.colors.magenta(`${dist}/crudl.min.js`)}`)
})
}
|
javascript
|
function bundleProduction() {
return browserify(browersifyOptions)
.transform(babelify.configure(babelifyOptions))
.transform(envify({ _: 'purge', NODE_ENV: 'production' }), { global: true })
.on('log', gutil.log)
.bundle()
// minify
.pipe(source('crudl.js'))
.pipe(buffer())
.pipe(uglify())
.pipe(rename('crudl.min.js'))
.on('error', gutil.log.bind(gutil, 'uglify:'))
// Prepend the license
.pipe(concat('crudl.min.js'))
.pipe(concat.header(`/* LICENSE: ${packageJSON.license} */\n`))
// Copy to dist
.pipe(gulp.dest(dist))
.on('end', () => {
gutil.log(`Successfully build ${gutil.colors.magenta(`${dist}/crudl.min.js`)}`)
})
}
|
[
"function",
"bundleProduction",
"(",
")",
"{",
"return",
"browserify",
"(",
"browersifyOptions",
")",
".",
"transform",
"(",
"babelify",
".",
"configure",
"(",
"babelifyOptions",
")",
")",
".",
"transform",
"(",
"envify",
"(",
"{",
"_",
":",
"'purge'",
",",
"NODE_ENV",
":",
"'production'",
"}",
")",
",",
"{",
"global",
":",
"true",
"}",
")",
".",
"on",
"(",
"'log'",
",",
"gutil",
".",
"log",
")",
".",
"bundle",
"(",
")",
".",
"pipe",
"(",
"source",
"(",
"'crudl.js'",
")",
")",
".",
"pipe",
"(",
"buffer",
"(",
")",
")",
".",
"pipe",
"(",
"uglify",
"(",
")",
")",
".",
"pipe",
"(",
"rename",
"(",
"'crudl.min.js'",
")",
")",
".",
"on",
"(",
"'error'",
",",
"gutil",
".",
"log",
".",
"bind",
"(",
"gutil",
",",
"'uglify:'",
")",
")",
".",
"pipe",
"(",
"concat",
"(",
"'crudl.min.js'",
")",
")",
".",
"pipe",
"(",
"concat",
".",
"header",
"(",
"`",
"${",
"packageJSON",
".",
"license",
"}",
"\\n",
"`",
")",
")",
".",
"pipe",
"(",
"gulp",
".",
"dest",
"(",
"dist",
")",
")",
".",
"on",
"(",
"'end'",
",",
"(",
")",
"=>",
"{",
"gutil",
".",
"log",
"(",
"`",
"${",
"gutil",
".",
"colors",
".",
"magenta",
"(",
"`",
"${",
"dist",
"}",
"`",
")",
"}",
"`",
")",
"}",
")",
"}"
] |
Bundle task for the production environment
|
[
"Bundle",
"task",
"for",
"the",
"production",
"environment"
] |
2b2e2a6cecd8a0e25f1e097c3098ebaaabe2e361
|
https://github.com/crudlio/crudl/blob/2b2e2a6cecd8a0e25f1e097c3098ebaaabe2e361/gulpfile.js#L90-L114
|
train
|
crudlio/crudl
|
gulpfile.js
|
sassWatch
|
function sassWatch() {
return gulp
// ... and compile if necessary
.watch(sassSrcFiles, ['sass-compile'])
.on('change', (event) => {
gutil.log(`File ${event.path} was ${event.type}, running tasks...`);
});
}
|
javascript
|
function sassWatch() {
return gulp
// ... and compile if necessary
.watch(sassSrcFiles, ['sass-compile'])
.on('change', (event) => {
gutil.log(`File ${event.path} was ${event.type}, running tasks...`);
});
}
|
[
"function",
"sassWatch",
"(",
")",
"{",
"return",
"gulp",
".",
"watch",
"(",
"sassSrcFiles",
",",
"[",
"'sass-compile'",
"]",
")",
".",
"on",
"(",
"'change'",
",",
"(",
"event",
")",
"=>",
"{",
"gutil",
".",
"log",
"(",
"`",
"${",
"event",
".",
"path",
"}",
"${",
"event",
".",
"type",
"}",
"`",
")",
";",
"}",
")",
";",
"}"
] |
Watch sass files ...
|
[
"Watch",
"sass",
"files",
"..."
] |
2b2e2a6cecd8a0e25f1e097c3098ebaaabe2e361
|
https://github.com/crudlio/crudl/blob/2b2e2a6cecd8a0e25f1e097c3098ebaaabe2e361/gulpfile.js#L129-L136
|
train
|
crudlio/crudl
|
src/reducers/core.js
|
transit
|
function transit(state, variable, value) {
// FIXME: deep copying of the whole state can be eventually slow...
const newState = cloneDeep(state)
set(newState, variable, value)
return newState
}
|
javascript
|
function transit(state, variable, value) {
// FIXME: deep copying of the whole state can be eventually slow...
const newState = cloneDeep(state)
set(newState, variable, value)
return newState
}
|
[
"function",
"transit",
"(",
"state",
",",
"variable",
",",
"value",
")",
"{",
"const",
"newState",
"=",
"cloneDeep",
"(",
"state",
")",
"set",
"(",
"newState",
",",
"variable",
",",
"value",
")",
"return",
"newState",
"}"
] |
Returns a copy of the state with a new value of the given variable
|
[
"Returns",
"a",
"copy",
"of",
"the",
"state",
"with",
"a",
"new",
"value",
"of",
"the",
"given",
"variable"
] |
2b2e2a6cecd8a0e25f1e097c3098ebaaabe2e361
|
https://github.com/crudlio/crudl/blob/2b2e2a6cecd8a0e25f1e097c3098ebaaabe2e361/src/reducers/core.js#L28-L33
|
train
|
mongodb-js/data-service
|
lib/instance-detail-helper.js
|
getStats
|
function getStats(results, done) {
const databases = results.databases;
const keys = ['document_count', 'storage_size', 'index_count', 'index_size'];
const stats = {};
keys.map(function(k) {
stats[k] = 0;
});
databases.map(function(db) {
keys.map(function(k) {
stats[k] += db[k];
});
});
done(null, stats);
}
|
javascript
|
function getStats(results, done) {
const databases = results.databases;
const keys = ['document_count', 'storage_size', 'index_count', 'index_size'];
const stats = {};
keys.map(function(k) {
stats[k] = 0;
});
databases.map(function(db) {
keys.map(function(k) {
stats[k] += db[k];
});
});
done(null, stats);
}
|
[
"function",
"getStats",
"(",
"results",
",",
"done",
")",
"{",
"const",
"databases",
"=",
"results",
".",
"databases",
";",
"const",
"keys",
"=",
"[",
"'document_count'",
",",
"'storage_size'",
",",
"'index_count'",
",",
"'index_size'",
"]",
";",
"const",
"stats",
"=",
"{",
"}",
";",
"keys",
".",
"map",
"(",
"function",
"(",
"k",
")",
"{",
"stats",
"[",
"k",
"]",
"=",
"0",
";",
"}",
")",
";",
"databases",
".",
"map",
"(",
"function",
"(",
"db",
")",
"{",
"keys",
".",
"map",
"(",
"function",
"(",
"k",
")",
"{",
"stats",
"[",
"k",
"]",
"+=",
"db",
"[",
"k",
"]",
";",
"}",
")",
";",
"}",
")",
";",
"done",
"(",
"null",
",",
"stats",
")",
";",
"}"
] |
aggregates stats across all found databases
@param {Object} results async.auto results
@param {Function} done callback
|
[
"aggregates",
"stats",
"across",
"all",
"found",
"databases"
] |
49f0e1470df6d0675f6328a2a0a2a58ac31640e9
|
https://github.com/mongodb-js/data-service/blob/49f0e1470df6d0675f6328a2a0a2a58ac31640e9/lib/instance-detail-helper.js#L25-L39
|
train
|
mongodb-js/data-service
|
lib/instance-detail-helper.js
|
getInstanceDetail
|
function getInstanceDetail(client, db, done) {
const tasks = {
client: attach.bind(null, client),
db: attach.bind(null, db),
userInfo: ['client', 'db', getUserInfo],
host: ['client', 'db', getHostInfo],
build: ['client', 'db', getBuildInfo],
cmdLineOpts: ['client', 'db', getCmdLineOpts],
genuineMongoDB: ['build', 'cmdLineOpts', getGenuineMongoDB],
listDatabases: ['client', 'db', 'userInfo', listDatabases],
allowedDatabases: ['userInfo', getAllowedDatabases],
databases: [
'client',
'db',
'listDatabases',
'allowedDatabases',
getDatabases
],
listCollections: ['client', 'db', 'databases', listCollections],
allowedCollections: ['userInfo', getAllowedCollections],
collections: [
'client',
'db',
'listCollections',
'allowedCollections',
getCollections
],
hierarchy: ['databases', 'collections', getHierarchy],
stats: ['databases', getStats]
};
async.auto(tasks, function(err, results) {
if (err) {
// report error
return done(err);
}
// cleanup
results = omit(results, [
'db',
'listDatabases',
'allowedDatabases',
'userInfo',
'listCollections',
'allowedCollections',
'cmdLineOpts'
]);
return done(null, results);
});
}
|
javascript
|
function getInstanceDetail(client, db, done) {
const tasks = {
client: attach.bind(null, client),
db: attach.bind(null, db),
userInfo: ['client', 'db', getUserInfo],
host: ['client', 'db', getHostInfo],
build: ['client', 'db', getBuildInfo],
cmdLineOpts: ['client', 'db', getCmdLineOpts],
genuineMongoDB: ['build', 'cmdLineOpts', getGenuineMongoDB],
listDatabases: ['client', 'db', 'userInfo', listDatabases],
allowedDatabases: ['userInfo', getAllowedDatabases],
databases: [
'client',
'db',
'listDatabases',
'allowedDatabases',
getDatabases
],
listCollections: ['client', 'db', 'databases', listCollections],
allowedCollections: ['userInfo', getAllowedCollections],
collections: [
'client',
'db',
'listCollections',
'allowedCollections',
getCollections
],
hierarchy: ['databases', 'collections', getHierarchy],
stats: ['databases', getStats]
};
async.auto(tasks, function(err, results) {
if (err) {
// report error
return done(err);
}
// cleanup
results = omit(results, [
'db',
'listDatabases',
'allowedDatabases',
'userInfo',
'listCollections',
'allowedCollections',
'cmdLineOpts'
]);
return done(null, results);
});
}
|
[
"function",
"getInstanceDetail",
"(",
"client",
",",
"db",
",",
"done",
")",
"{",
"const",
"tasks",
"=",
"{",
"client",
":",
"attach",
".",
"bind",
"(",
"null",
",",
"client",
")",
",",
"db",
":",
"attach",
".",
"bind",
"(",
"null",
",",
"db",
")",
",",
"userInfo",
":",
"[",
"'client'",
",",
"'db'",
",",
"getUserInfo",
"]",
",",
"host",
":",
"[",
"'client'",
",",
"'db'",
",",
"getHostInfo",
"]",
",",
"build",
":",
"[",
"'client'",
",",
"'db'",
",",
"getBuildInfo",
"]",
",",
"cmdLineOpts",
":",
"[",
"'client'",
",",
"'db'",
",",
"getCmdLineOpts",
"]",
",",
"genuineMongoDB",
":",
"[",
"'build'",
",",
"'cmdLineOpts'",
",",
"getGenuineMongoDB",
"]",
",",
"listDatabases",
":",
"[",
"'client'",
",",
"'db'",
",",
"'userInfo'",
",",
"listDatabases",
"]",
",",
"allowedDatabases",
":",
"[",
"'userInfo'",
",",
"getAllowedDatabases",
"]",
",",
"databases",
":",
"[",
"'client'",
",",
"'db'",
",",
"'listDatabases'",
",",
"'allowedDatabases'",
",",
"getDatabases",
"]",
",",
"listCollections",
":",
"[",
"'client'",
",",
"'db'",
",",
"'databases'",
",",
"listCollections",
"]",
",",
"allowedCollections",
":",
"[",
"'userInfo'",
",",
"getAllowedCollections",
"]",
",",
"collections",
":",
"[",
"'client'",
",",
"'db'",
",",
"'listCollections'",
",",
"'allowedCollections'",
",",
"getCollections",
"]",
",",
"hierarchy",
":",
"[",
"'databases'",
",",
"'collections'",
",",
"getHierarchy",
"]",
",",
"stats",
":",
"[",
"'databases'",
",",
"getStats",
"]",
"}",
";",
"async",
".",
"auto",
"(",
"tasks",
",",
"function",
"(",
"err",
",",
"results",
")",
"{",
"if",
"(",
"err",
")",
"{",
"return",
"done",
"(",
"err",
")",
";",
"}",
"results",
"=",
"omit",
"(",
"results",
",",
"[",
"'db'",
",",
"'listDatabases'",
",",
"'allowedDatabases'",
",",
"'userInfo'",
",",
"'listCollections'",
",",
"'allowedCollections'",
",",
"'cmdLineOpts'",
"]",
")",
";",
"return",
"done",
"(",
"null",
",",
"results",
")",
";",
"}",
")",
";",
"}"
] |
Retrieves many instance details, such as the build and host info,
databases and collections which the user has access to.
@param {MongoClient} client - The client.
@param {DB} db - database handle from the node driver
@param {Function} done - callback
|
[
"Retrieves",
"many",
"instance",
"details",
"such",
"as",
"the",
"build",
"and",
"host",
"info",
"databases",
"and",
"collections",
"which",
"the",
"user",
"has",
"access",
"to",
"."
] |
49f0e1470df6d0675f6328a2a0a2a58ac31640e9
|
https://github.com/mongodb-js/data-service/blob/49f0e1470df6d0675f6328a2a0a2a58ac31640e9/lib/instance-detail-helper.js#L549-L602
|
train
|
reaktor/express-gauth
|
index.js
|
saveReturnUrlToSession
|
function saveReturnUrlToSession(req, isReturnUrlAllowed) {
const referrer = req.get("referrer")
const isInternalRequest = Boolean(
referrer && url.parse(referrer).hostname === req.hostname
)
const isUrlAllowed = isReturnUrlAllowed(req.originalUrl)
const isSessionSet = Boolean(req.session && req.session.returnTo)
if (!isUrlAllowed || isInternalRequest || isSessionSet) {
return
}
req.session.returnTo = req.originalUrl
}
|
javascript
|
function saveReturnUrlToSession(req, isReturnUrlAllowed) {
const referrer = req.get("referrer")
const isInternalRequest = Boolean(
referrer && url.parse(referrer).hostname === req.hostname
)
const isUrlAllowed = isReturnUrlAllowed(req.originalUrl)
const isSessionSet = Boolean(req.session && req.session.returnTo)
if (!isUrlAllowed || isInternalRequest || isSessionSet) {
return
}
req.session.returnTo = req.originalUrl
}
|
[
"function",
"saveReturnUrlToSession",
"(",
"req",
",",
"isReturnUrlAllowed",
")",
"{",
"const",
"referrer",
"=",
"req",
".",
"get",
"(",
"\"referrer\"",
")",
"const",
"isInternalRequest",
"=",
"Boolean",
"(",
"referrer",
"&&",
"url",
".",
"parse",
"(",
"referrer",
")",
".",
"hostname",
"===",
"req",
".",
"hostname",
")",
"const",
"isUrlAllowed",
"=",
"isReturnUrlAllowed",
"(",
"req",
".",
"originalUrl",
")",
"const",
"isSessionSet",
"=",
"Boolean",
"(",
"req",
".",
"session",
"&&",
"req",
".",
"session",
".",
"returnTo",
")",
"if",
"(",
"!",
"isUrlAllowed",
"||",
"isInternalRequest",
"||",
"isSessionSet",
")",
"{",
"return",
"}",
"req",
".",
"session",
".",
"returnTo",
"=",
"req",
".",
"originalUrl",
"}"
] |
Browser might try to fetch assets already before the "main request" reaches our server. We must tell apart these locations from where the user really tries to go. Also, we should set returnTo only once per session.
|
[
"Browser",
"might",
"try",
"to",
"fetch",
"assets",
"already",
"before",
"the",
"main",
"request",
"reaches",
"our",
"server",
".",
"We",
"must",
"tell",
"apart",
"these",
"locations",
"from",
"where",
"the",
"user",
"really",
"tries",
"to",
"go",
".",
"Also",
"we",
"should",
"set",
"returnTo",
"only",
"once",
"per",
"session",
"."
] |
d23d86d65f68b695431ce99840e7ea7311c0b675
|
https://github.com/reaktor/express-gauth/blob/d23d86d65f68b695431ce99840e7ea7311c0b675/index.js#L175-L189
|
train
|
mongodb-js/runner
|
lib/index.js
|
stop
|
function stop(opts, done) {
debug('stopping...');
killIfRunning(opts, function(err) {
debug('Any running workers have been sent a stop command');
done(err);
});
}
|
javascript
|
function stop(opts, done) {
debug('stopping...');
killIfRunning(opts, function(err) {
debug('Any running workers have been sent a stop command');
done(err);
});
}
|
[
"function",
"stop",
"(",
"opts",
",",
"done",
")",
"{",
"debug",
"(",
"'stopping...'",
")",
";",
"killIfRunning",
"(",
"opts",
",",
"function",
"(",
"err",
")",
"{",
"debug",
"(",
"'Any running workers have been sent a stop command'",
")",
";",
"done",
"(",
"err",
")",
";",
"}",
")",
";",
"}"
] |
Cleans up artifacts from this specific run and then kills the process.
@param {Object} opts
@param {Function} done
@api private
|
[
"Cleans",
"up",
"artifacts",
"from",
"this",
"specific",
"run",
"and",
"then",
"kills",
"the",
"process",
"."
] |
a1b17754a6d079e9059e0d9b9d7efe709c8b0d08
|
https://github.com/mongodb-js/runner/blob/a1b17754a6d079e9059e0d9b9d7efe709c8b0d08/lib/index.js#L254-L261
|
train
|
mongodb-js/runner
|
lib/index.js
|
configure
|
function configure(opts, done) {
delete opts._;
opts = defaults(opts, {
topology: process.env.MONGODB_TOPOLOGY || 'standalone'
});
opts = defaults(opts, {
name: opts.topology
});
opts = defaults(opts, {
logpath: untildify(
process.env.MONGODB_LOGPATH ||
format('~/.mongodb/runner/%s.log', opts.name)
),
pidpath: untildify(process.env.MONGODB_PIDPATH || '~/.mongodb/runner/pid'),
port: process.env.MONGODB_PORT || 27017,
mongodBin: process.env.MONGOD_BIN || 'mongod',
mongosBin: process.env.MONGOS_BIN || 'mongos',
storageEngine: process.env.MONGODB_STORAGE_ENGINE,
auth_mechanism: process.env.MONGODB_AUTH_MECHANISM || 'none',
purge: process.env.MONGODB_PURGE || true
});
// MongoDB < 3.0 doesn't understand the storageEngine argument and
// will fail to start if provided!
if (opts.version < '3.0') {
delete opts.storageEngine;
}
if (opts.topology === 'replicaset') {
opts = defaults(opts, {
arbiters: process.env.MONGODB_ARBITERS || 0,
secondaries: process.env.MONGODB_SECONDARIES || 2,
passives: process.env.MONGODB_PASSIVES || 0
});
}
if (opts.topology === 'cluster') {
opts = defaults(opts, {
shards: process.env.MONGODB_SHARDS || 1, // -> replsets
routers: process.env.MONGODB_ROUTERS || 1, // -> mongoses
configs: process.env.MONGODB_CONFIGS || 1,
shardPort: process.env.MONGODB_SHARDS_PORT || 31000, // -> replsetStartPort
configPort: process.env.MONGODB_CONFIGS_PORT || 35000, // -> configStartPort
arbiters: process.env.MONGODB_ARBITERS || 0,
secondaries: process.env.MONGODB_SECONDARIES || 2,
passives: process.env.MONGODB_PASSIVES || 0
});
}
debug('Ready to process spec', opts);
if (opts.action === 'stop') {
return done();
}
async.series(
[
mkdirp.bind(null, opts.pidpath),
getDbPath.bind(null, opts),
createLogsDirectory.bind(null, opts)
],
done
);
}
|
javascript
|
function configure(opts, done) {
delete opts._;
opts = defaults(opts, {
topology: process.env.MONGODB_TOPOLOGY || 'standalone'
});
opts = defaults(opts, {
name: opts.topology
});
opts = defaults(opts, {
logpath: untildify(
process.env.MONGODB_LOGPATH ||
format('~/.mongodb/runner/%s.log', opts.name)
),
pidpath: untildify(process.env.MONGODB_PIDPATH || '~/.mongodb/runner/pid'),
port: process.env.MONGODB_PORT || 27017,
mongodBin: process.env.MONGOD_BIN || 'mongod',
mongosBin: process.env.MONGOS_BIN || 'mongos',
storageEngine: process.env.MONGODB_STORAGE_ENGINE,
auth_mechanism: process.env.MONGODB_AUTH_MECHANISM || 'none',
purge: process.env.MONGODB_PURGE || true
});
// MongoDB < 3.0 doesn't understand the storageEngine argument and
// will fail to start if provided!
if (opts.version < '3.0') {
delete opts.storageEngine;
}
if (opts.topology === 'replicaset') {
opts = defaults(opts, {
arbiters: process.env.MONGODB_ARBITERS || 0,
secondaries: process.env.MONGODB_SECONDARIES || 2,
passives: process.env.MONGODB_PASSIVES || 0
});
}
if (opts.topology === 'cluster') {
opts = defaults(opts, {
shards: process.env.MONGODB_SHARDS || 1, // -> replsets
routers: process.env.MONGODB_ROUTERS || 1, // -> mongoses
configs: process.env.MONGODB_CONFIGS || 1,
shardPort: process.env.MONGODB_SHARDS_PORT || 31000, // -> replsetStartPort
configPort: process.env.MONGODB_CONFIGS_PORT || 35000, // -> configStartPort
arbiters: process.env.MONGODB_ARBITERS || 0,
secondaries: process.env.MONGODB_SECONDARIES || 2,
passives: process.env.MONGODB_PASSIVES || 0
});
}
debug('Ready to process spec', opts);
if (opts.action === 'stop') {
return done();
}
async.series(
[
mkdirp.bind(null, opts.pidpath),
getDbPath.bind(null, opts),
createLogsDirectory.bind(null, opts)
],
done
);
}
|
[
"function",
"configure",
"(",
"opts",
",",
"done",
")",
"{",
"delete",
"opts",
".",
"_",
";",
"opts",
"=",
"defaults",
"(",
"opts",
",",
"{",
"topology",
":",
"process",
".",
"env",
".",
"MONGODB_TOPOLOGY",
"||",
"'standalone'",
"}",
")",
";",
"opts",
"=",
"defaults",
"(",
"opts",
",",
"{",
"name",
":",
"opts",
".",
"topology",
"}",
")",
";",
"opts",
"=",
"defaults",
"(",
"opts",
",",
"{",
"logpath",
":",
"untildify",
"(",
"process",
".",
"env",
".",
"MONGODB_LOGPATH",
"||",
"format",
"(",
"'~/.mongodb/runner/%s.log'",
",",
"opts",
".",
"name",
")",
")",
",",
"pidpath",
":",
"untildify",
"(",
"process",
".",
"env",
".",
"MONGODB_PIDPATH",
"||",
"'~/.mongodb/runner/pid'",
")",
",",
"port",
":",
"process",
".",
"env",
".",
"MONGODB_PORT",
"||",
"27017",
",",
"mongodBin",
":",
"process",
".",
"env",
".",
"MONGOD_BIN",
"||",
"'mongod'",
",",
"mongosBin",
":",
"process",
".",
"env",
".",
"MONGOS_BIN",
"||",
"'mongos'",
",",
"storageEngine",
":",
"process",
".",
"env",
".",
"MONGODB_STORAGE_ENGINE",
",",
"auth_mechanism",
":",
"process",
".",
"env",
".",
"MONGODB_AUTH_MECHANISM",
"||",
"'none'",
",",
"purge",
":",
"process",
".",
"env",
".",
"MONGODB_PURGE",
"||",
"true",
"}",
")",
";",
"if",
"(",
"opts",
".",
"version",
"<",
"'3.0'",
")",
"{",
"delete",
"opts",
".",
"storageEngine",
";",
"}",
"if",
"(",
"opts",
".",
"topology",
"===",
"'replicaset'",
")",
"{",
"opts",
"=",
"defaults",
"(",
"opts",
",",
"{",
"arbiters",
":",
"process",
".",
"env",
".",
"MONGODB_ARBITERS",
"||",
"0",
",",
"secondaries",
":",
"process",
".",
"env",
".",
"MONGODB_SECONDARIES",
"||",
"2",
",",
"passives",
":",
"process",
".",
"env",
".",
"MONGODB_PASSIVES",
"||",
"0",
"}",
")",
";",
"}",
"if",
"(",
"opts",
".",
"topology",
"===",
"'cluster'",
")",
"{",
"opts",
"=",
"defaults",
"(",
"opts",
",",
"{",
"shards",
":",
"process",
".",
"env",
".",
"MONGODB_SHARDS",
"||",
"1",
",",
"routers",
":",
"process",
".",
"env",
".",
"MONGODB_ROUTERS",
"||",
"1",
",",
"configs",
":",
"process",
".",
"env",
".",
"MONGODB_CONFIGS",
"||",
"1",
",",
"shardPort",
":",
"process",
".",
"env",
".",
"MONGODB_SHARDS_PORT",
"||",
"31000",
",",
"configPort",
":",
"process",
".",
"env",
".",
"MONGODB_CONFIGS_PORT",
"||",
"35000",
",",
"arbiters",
":",
"process",
".",
"env",
".",
"MONGODB_ARBITERS",
"||",
"0",
",",
"secondaries",
":",
"process",
".",
"env",
".",
"MONGODB_SECONDARIES",
"||",
"2",
",",
"passives",
":",
"process",
".",
"env",
".",
"MONGODB_PASSIVES",
"||",
"0",
"}",
")",
";",
"}",
"debug",
"(",
"'Ready to process spec'",
",",
"opts",
")",
";",
"if",
"(",
"opts",
".",
"action",
"===",
"'stop'",
")",
"{",
"return",
"done",
"(",
")",
";",
"}",
"async",
".",
"series",
"(",
"[",
"mkdirp",
".",
"bind",
"(",
"null",
",",
"opts",
".",
"pidpath",
")",
",",
"getDbPath",
".",
"bind",
"(",
"null",
",",
"opts",
")",
",",
"createLogsDirectory",
".",
"bind",
"(",
"null",
",",
"opts",
")",
"]",
",",
"done",
")",
";",
"}"
] |
Populate `opts` as specified by environment specifies or defaults.
TODO (imlucas): Document options.
@param {Object} opts - user specified options
@param {Function} done - callback
@api private
|
[
"Populate",
"opts",
"as",
"specified",
"by",
"environment",
"specifies",
"or",
"defaults",
"."
] |
a1b17754a6d079e9059e0d9b9d7efe709c8b0d08
|
https://github.com/mongodb-js/runner/blob/a1b17754a6d079e9059e0d9b9d7efe709c8b0d08/lib/index.js#L316-L381
|
train
|
mongodb-js/runner
|
mocha/before.js
|
mongodb_runner_mocha_before
|
function mongodb_runner_mocha_before(opts) {
if (typeof opts === 'function') {
// So you can just do `before(require('mongodb-runner/mocha/before'));`
return mongodb_runner_mocha_before({}).apply(this, arguments);
}
opts = opts || {};
defaults(opts, {
port: 27017,
timeout: 10000,
slow: 10000
});
return function(done) {
this.timeout(opts.timeout);
this.slow(opts.slow);
debug('checking if mongodb is running...');
running(function(err, res) {
if (err) {
debug('mongodb detection failed so going to try and start one');
runner({
port: opts.port,
action: 'start'
}, done);
return;
}
if (res && res.length > 0) {
if (res[0].port === opts.port) {
process.env.MONGODB_RUNNER_MOCHA_SKIP_STOP = '1';
debug('mongodb already running on `localhost:%s` '
+ 'so we won\'t start a new one', opts.port);
done();
return;
}
debug('mongodb already running, but its on '
+ '`localhost:%d` and we need `localhost:%s` for '
+ 'the tests so starting up a new one.', res[0].port, opts.port);
runner({
action: 'start',
port: opts.port
}, done);
return;
}
debug('no mongodb running so starting one up');
runner({
action: 'start',
port: opts.port
}, done);
return;
});
};
}
|
javascript
|
function mongodb_runner_mocha_before(opts) {
if (typeof opts === 'function') {
// So you can just do `before(require('mongodb-runner/mocha/before'));`
return mongodb_runner_mocha_before({}).apply(this, arguments);
}
opts = opts || {};
defaults(opts, {
port: 27017,
timeout: 10000,
slow: 10000
});
return function(done) {
this.timeout(opts.timeout);
this.slow(opts.slow);
debug('checking if mongodb is running...');
running(function(err, res) {
if (err) {
debug('mongodb detection failed so going to try and start one');
runner({
port: opts.port,
action: 'start'
}, done);
return;
}
if (res && res.length > 0) {
if (res[0].port === opts.port) {
process.env.MONGODB_RUNNER_MOCHA_SKIP_STOP = '1';
debug('mongodb already running on `localhost:%s` '
+ 'so we won\'t start a new one', opts.port);
done();
return;
}
debug('mongodb already running, but its on '
+ '`localhost:%d` and we need `localhost:%s` for '
+ 'the tests so starting up a new one.', res[0].port, opts.port);
runner({
action: 'start',
port: opts.port
}, done);
return;
}
debug('no mongodb running so starting one up');
runner({
action: 'start',
port: opts.port
}, done);
return;
});
};
}
|
[
"function",
"mongodb_runner_mocha_before",
"(",
"opts",
")",
"{",
"if",
"(",
"typeof",
"opts",
"===",
"'function'",
")",
"{",
"return",
"mongodb_runner_mocha_before",
"(",
"{",
"}",
")",
".",
"apply",
"(",
"this",
",",
"arguments",
")",
";",
"}",
"opts",
"=",
"opts",
"||",
"{",
"}",
";",
"defaults",
"(",
"opts",
",",
"{",
"port",
":",
"27017",
",",
"timeout",
":",
"10000",
",",
"slow",
":",
"10000",
"}",
")",
";",
"return",
"function",
"(",
"done",
")",
"{",
"this",
".",
"timeout",
"(",
"opts",
".",
"timeout",
")",
";",
"this",
".",
"slow",
"(",
"opts",
".",
"slow",
")",
";",
"debug",
"(",
"'checking if mongodb is running...'",
")",
";",
"running",
"(",
"function",
"(",
"err",
",",
"res",
")",
"{",
"if",
"(",
"err",
")",
"{",
"debug",
"(",
"'mongodb detection failed so going to try and start one'",
")",
";",
"runner",
"(",
"{",
"port",
":",
"opts",
".",
"port",
",",
"action",
":",
"'start'",
"}",
",",
"done",
")",
";",
"return",
";",
"}",
"if",
"(",
"res",
"&&",
"res",
".",
"length",
">",
"0",
")",
"{",
"if",
"(",
"res",
"[",
"0",
"]",
".",
"port",
"===",
"opts",
".",
"port",
")",
"{",
"process",
".",
"env",
".",
"MONGODB_RUNNER_MOCHA_SKIP_STOP",
"=",
"'1'",
";",
"debug",
"(",
"'mongodb already running on `localhost:%s` '",
"+",
"'so we won\\'t start a new one'",
",",
"\\'",
")",
";",
"opts",
".",
"port",
"done",
"(",
")",
";",
"}",
"return",
";",
"debug",
"(",
"'mongodb already running, but its on '",
"+",
"'`localhost:%d` and we need `localhost:%s` for '",
"+",
"'the tests so starting up a new one.'",
",",
"res",
"[",
"0",
"]",
".",
"port",
",",
"opts",
".",
"port",
")",
";",
"runner",
"(",
"{",
"action",
":",
"'start'",
",",
"port",
":",
"opts",
".",
"port",
"}",
",",
"done",
")",
";",
"}",
"return",
";",
"debug",
"(",
"'no mongodb running so starting one up'",
")",
";",
"runner",
"(",
"{",
"action",
":",
"'start'",
",",
"port",
":",
"opts",
".",
"port",
"}",
",",
"done",
")",
";",
"}",
")",
";",
"}",
";",
"}"
] |
Start MongoDB on demand before running your tests.
@example
describe('my app', function(){
before(require('mongodb-runner/mocha/before');
it('should connect', function(done){
require('mongodb').connect('mongodb://localhost:27017/', done);
});
});
@param {Object|Function} [opts] - options or the `done` callback.
@return {Function} - Callback for mocha bdd `before` hook.
|
[
"Start",
"MongoDB",
"on",
"demand",
"before",
"running",
"your",
"tests",
"."
] |
a1b17754a6d079e9059e0d9b9d7efe709c8b0d08
|
https://github.com/mongodb-js/runner/blob/a1b17754a6d079e9059e0d9b9d7efe709c8b0d08/mocha/before.js#L20-L73
|
train
|
mongodb-js/runner
|
mocha/after.js
|
mongodb_runner_mocha_after
|
function mongodb_runner_mocha_after(opts) {
if (typeof opts === 'function') {
// So you can just do `after(require('mongodb-runner/mocha/after'));`
return mongodb_runner_mocha_after({}).apply(this, arguments);
}
opts = opts || {};
defaults(opts, {
port: 27017
});
return function(done) {
if (process.env.MONGODB_RUNNER_MOCHA_SKIP_STOP) {
debug('not stopping mongodb as it was not started by mocha/before');
process.env.MONGODB_RUNNER_MOCHA_SKIP_STOP = undefined;
done();
return;
}
debug('stopping mongodb...');
runner({
port: opts.port,
action: 'stop'
}, done);
};
}
|
javascript
|
function mongodb_runner_mocha_after(opts) {
if (typeof opts === 'function') {
// So you can just do `after(require('mongodb-runner/mocha/after'));`
return mongodb_runner_mocha_after({}).apply(this, arguments);
}
opts = opts || {};
defaults(opts, {
port: 27017
});
return function(done) {
if (process.env.MONGODB_RUNNER_MOCHA_SKIP_STOP) {
debug('not stopping mongodb as it was not started by mocha/before');
process.env.MONGODB_RUNNER_MOCHA_SKIP_STOP = undefined;
done();
return;
}
debug('stopping mongodb...');
runner({
port: opts.port,
action: 'stop'
}, done);
};
}
|
[
"function",
"mongodb_runner_mocha_after",
"(",
"opts",
")",
"{",
"if",
"(",
"typeof",
"opts",
"===",
"'function'",
")",
"{",
"return",
"mongodb_runner_mocha_after",
"(",
"{",
"}",
")",
".",
"apply",
"(",
"this",
",",
"arguments",
")",
";",
"}",
"opts",
"=",
"opts",
"||",
"{",
"}",
";",
"defaults",
"(",
"opts",
",",
"{",
"port",
":",
"27017",
"}",
")",
";",
"return",
"function",
"(",
"done",
")",
"{",
"if",
"(",
"process",
".",
"env",
".",
"MONGODB_RUNNER_MOCHA_SKIP_STOP",
")",
"{",
"debug",
"(",
"'not stopping mongodb as it was not started by mocha/before'",
")",
";",
"process",
".",
"env",
".",
"MONGODB_RUNNER_MOCHA_SKIP_STOP",
"=",
"undefined",
";",
"done",
"(",
")",
";",
"return",
";",
"}",
"debug",
"(",
"'stopping mongodb...'",
")",
";",
"runner",
"(",
"{",
"port",
":",
"opts",
".",
"port",
",",
"action",
":",
"'stop'",
"}",
",",
"done",
")",
";",
"}",
";",
"}"
] |
Stop MongoDB on demand after running your tests.
@example
describe('my app', function(){
before(require('mongodb-runner/mocha/before'));
after(require('mongodb-runner/mocha/after'));
it('should connect', function(done){
require('mongodb').connect('mongodb://localhost:27017/', done);
});
});
@param {Object|Function} [opts] - options or the `done` callback.
@return {Function} - Callback for mocha bdd `after` hook.
|
[
"Stop",
"MongoDB",
"on",
"demand",
"after",
"running",
"your",
"tests",
"."
] |
a1b17754a6d079e9059e0d9b9d7efe709c8b0d08
|
https://github.com/mongodb-js/runner/blob/a1b17754a6d079e9059e0d9b9d7efe709c8b0d08/mocha/after.js#L20-L43
|
train
|
salesforce/global-tunnel
|
index.js
|
tryParse
|
function tryParse(url) {
if (!url) {
return null;
}
var conf = {};
var parsed = urlParse(url);
conf.protocol = parsed.protocol;
conf.host = parsed.hostname;
conf.port = parseInt(parsed.port,10);
return conf;
}
|
javascript
|
function tryParse(url) {
if (!url) {
return null;
}
var conf = {};
var parsed = urlParse(url);
conf.protocol = parsed.protocol;
conf.host = parsed.hostname;
conf.port = parseInt(parsed.port,10);
return conf;
}
|
[
"function",
"tryParse",
"(",
"url",
")",
"{",
"if",
"(",
"!",
"url",
")",
"{",
"return",
"null",
";",
"}",
"var",
"conf",
"=",
"{",
"}",
";",
"var",
"parsed",
"=",
"urlParse",
"(",
"url",
")",
";",
"conf",
".",
"protocol",
"=",
"parsed",
".",
"protocol",
";",
"conf",
".",
"host",
"=",
"parsed",
".",
"hostname",
";",
"conf",
".",
"port",
"=",
"parseInt",
"(",
"parsed",
".",
"port",
",",
"10",
")",
";",
"return",
"conf",
";",
"}"
] |
Parses the de facto `http_proxy` environment.
|
[
"Parses",
"the",
"de",
"facto",
"http_proxy",
"environment",
"."
] |
d7feb2ec25029527d2debb21e087db058434b62b
|
https://github.com/salesforce/global-tunnel/blob/d7feb2ec25029527d2debb21e087db058434b62b/index.js#L33-L44
|
train
|
salesforce/global-tunnel
|
lib/agents.js
|
mixinProxying
|
function mixinProxying(agent, proxyOpts) {
agent.proxy = proxyOpts;
var orig = _.pick(agent, 'createConnection', 'addRequest');
// Make the tcp or tls connection go to the proxy, ignoring the
// destination host:port arguments.
agent.createConnection = function(port, host, options) {
return orig.createConnection.call(this,
this.proxy.port, this.proxy.host, options);
};
// tell the proxy where we really want to go by fully-qualifying the path
// part. Force a localAddress if one was configured
agent.addRequest = function(req, host, port, localAddress) {
req.path = this.proxy.innerProtocol + '//' + host + ':' + port + req.path;
if (this.proxy.localAddress) {
localAddress = this.proxy.localAddress;
}
return orig.addRequest.call(this, req, host, port, localAddress);
};
}
|
javascript
|
function mixinProxying(agent, proxyOpts) {
agent.proxy = proxyOpts;
var orig = _.pick(agent, 'createConnection', 'addRequest');
// Make the tcp or tls connection go to the proxy, ignoring the
// destination host:port arguments.
agent.createConnection = function(port, host, options) {
return orig.createConnection.call(this,
this.proxy.port, this.proxy.host, options);
};
// tell the proxy where we really want to go by fully-qualifying the path
// part. Force a localAddress if one was configured
agent.addRequest = function(req, host, port, localAddress) {
req.path = this.proxy.innerProtocol + '//' + host + ':' + port + req.path;
if (this.proxy.localAddress) {
localAddress = this.proxy.localAddress;
}
return orig.addRequest.call(this, req, host, port, localAddress);
};
}
|
[
"function",
"mixinProxying",
"(",
"agent",
",",
"proxyOpts",
")",
"{",
"agent",
".",
"proxy",
"=",
"proxyOpts",
";",
"var",
"orig",
"=",
"_",
".",
"pick",
"(",
"agent",
",",
"'createConnection'",
",",
"'addRequest'",
")",
";",
"agent",
".",
"createConnection",
"=",
"function",
"(",
"port",
",",
"host",
",",
"options",
")",
"{",
"return",
"orig",
".",
"createConnection",
".",
"call",
"(",
"this",
",",
"this",
".",
"proxy",
".",
"port",
",",
"this",
".",
"proxy",
".",
"host",
",",
"options",
")",
";",
"}",
";",
"agent",
".",
"addRequest",
"=",
"function",
"(",
"req",
",",
"host",
",",
"port",
",",
"localAddress",
")",
"{",
"req",
".",
"path",
"=",
"this",
".",
"proxy",
".",
"innerProtocol",
"+",
"'//'",
"+",
"host",
"+",
"':'",
"+",
"port",
"+",
"req",
".",
"path",
";",
"if",
"(",
"this",
".",
"proxy",
".",
"localAddress",
")",
"{",
"localAddress",
"=",
"this",
".",
"proxy",
".",
"localAddress",
";",
"}",
"return",
"orig",
".",
"addRequest",
".",
"call",
"(",
"this",
",",
"req",
",",
"host",
",",
"port",
",",
"localAddress",
")",
";",
"}",
";",
"}"
] |
Override createConnection and addRequest methods on the supplied agent.
http.Agent and https.Agent will set up createConnection in the constructor.
|
[
"Override",
"createConnection",
"and",
"addRequest",
"methods",
"on",
"the",
"supplied",
"agent",
".",
"http",
".",
"Agent",
"and",
"https",
".",
"Agent",
"will",
"set",
"up",
"createConnection",
"in",
"the",
"constructor",
"."
] |
d7feb2ec25029527d2debb21e087db058434b62b
|
https://github.com/salesforce/global-tunnel/blob/d7feb2ec25029527d2debb21e087db058434b62b/lib/agents.js#L36-L57
|
train
|
AmpersandJS/ampersand-view
|
ampersand-view.js
|
function () {
if (this.el && this.el.parentNode) this.el.parentNode.removeChild(this.el);
this._rendered = false;
this._downsertBindings();
return this;
}
|
javascript
|
function () {
if (this.el && this.el.parentNode) this.el.parentNode.removeChild(this.el);
this._rendered = false;
this._downsertBindings();
return this;
}
|
[
"function",
"(",
")",
"{",
"if",
"(",
"this",
".",
"el",
"&&",
"this",
".",
"el",
".",
"parentNode",
")",
"this",
".",
"el",
".",
"parentNode",
".",
"removeChild",
"(",
"this",
".",
"el",
")",
";",
"this",
".",
"_rendered",
"=",
"false",
";",
"this",
".",
"_downsertBindings",
"(",
")",
";",
"return",
"this",
";",
"}"
] |
Removes this view by taking the element out of the DOM, and removing any applicable events listeners.
|
[
"Removes",
"this",
"view",
"by",
"taking",
"the",
"element",
"out",
"of",
"the",
"DOM",
"and",
"removing",
"any",
"applicable",
"events",
"listeners",
"."
] |
283b599dafbfcc3736730758aa9c19b281a77187
|
https://github.com/AmpersandJS/ampersand-view/blob/283b599dafbfcc3736730758aa9c19b281a77187/ampersand-view.js#L157-L162
|
train
|
|
AmpersandJS/ampersand-view
|
ampersand-view.js
|
function (context, templateArg) {
var template = templateArg || this.template;
if (!template) throw new Error('Template string or function needed.');
var newDom = isString(template) ? template : template.call(this, context || this);
if (isString(newDom)) newDom = domify(newDom);
var parent = this.el && this.el.parentNode;
if (parent) parent.replaceChild(newDom, this.el);
if (newDom.nodeName === '#document-fragment') throw new Error('Views can only have one root element, including comment nodes.');
this.el = newDom;
return this;
}
|
javascript
|
function (context, templateArg) {
var template = templateArg || this.template;
if (!template) throw new Error('Template string or function needed.');
var newDom = isString(template) ? template : template.call(this, context || this);
if (isString(newDom)) newDom = domify(newDom);
var parent = this.el && this.el.parentNode;
if (parent) parent.replaceChild(newDom, this.el);
if (newDom.nodeName === '#document-fragment') throw new Error('Views can only have one root element, including comment nodes.');
this.el = newDom;
return this;
}
|
[
"function",
"(",
"context",
",",
"templateArg",
")",
"{",
"var",
"template",
"=",
"templateArg",
"||",
"this",
".",
"template",
";",
"if",
"(",
"!",
"template",
")",
"throw",
"new",
"Error",
"(",
"'Template string or function needed.'",
")",
";",
"var",
"newDom",
"=",
"isString",
"(",
"template",
")",
"?",
"template",
":",
"template",
".",
"call",
"(",
"this",
",",
"context",
"||",
"this",
")",
";",
"if",
"(",
"isString",
"(",
"newDom",
")",
")",
"newDom",
"=",
"domify",
"(",
"newDom",
")",
";",
"var",
"parent",
"=",
"this",
".",
"el",
"&&",
"this",
".",
"el",
".",
"parentNode",
";",
"if",
"(",
"parent",
")",
"parent",
".",
"replaceChild",
"(",
"newDom",
",",
"this",
".",
"el",
")",
";",
"if",
"(",
"newDom",
".",
"nodeName",
"===",
"'#document-fragment'",
")",
"throw",
"new",
"Error",
"(",
"'Views can only have one root element, including comment nodes.'",
")",
";",
"this",
".",
"el",
"=",
"newDom",
";",
"return",
"this",
";",
"}"
] |
Shortcut for doing everything we need to do to render and fully replace current root element. Either define a `template` property of your view or pass in a template directly. The template can either be a string or a function. If it's a function it will be passed the `context` argument.
|
[
"Shortcut",
"for",
"doing",
"everything",
"we",
"need",
"to",
"do",
"to",
"render",
"and",
"fully",
"replace",
"current",
"root",
"element",
".",
"Either",
"define",
"a",
"template",
"property",
"of",
"your",
"view",
"or",
"pass",
"in",
"a",
"template",
"directly",
".",
"The",
"template",
"can",
"either",
"be",
"a",
"string",
"or",
"a",
"function",
".",
"If",
"it",
"s",
"a",
"function",
"it",
"will",
"be",
"passed",
"the",
"context",
"argument",
"."
] |
283b599dafbfcc3736730758aa9c19b281a77187
|
https://github.com/AmpersandJS/ampersand-view/blob/283b599dafbfcc3736730758aa9c19b281a77187/ampersand-view.js#L319-L329
|
train
|
|
apiaryio/fury-adapter-swagger
|
src/json-schema.js
|
checkSchemaHasReferences
|
function checkSchemaHasReferences(schema) {
if (schema.$ref) {
return true;
}
return Object.values(schema).some((value) => {
if (_.isArray(value)) {
return value.some(checkSchemaHasReferences);
} else if (_.isObject(value)) {
return checkSchemaHasReferences(value);
}
return false;
});
}
|
javascript
|
function checkSchemaHasReferences(schema) {
if (schema.$ref) {
return true;
}
return Object.values(schema).some((value) => {
if (_.isArray(value)) {
return value.some(checkSchemaHasReferences);
} else if (_.isObject(value)) {
return checkSchemaHasReferences(value);
}
return false;
});
}
|
[
"function",
"checkSchemaHasReferences",
"(",
"schema",
")",
"{",
"if",
"(",
"schema",
".",
"$ref",
")",
"{",
"return",
"true",
";",
"}",
"return",
"Object",
".",
"values",
"(",
"schema",
")",
".",
"some",
"(",
"(",
"value",
")",
"=>",
"{",
"if",
"(",
"_",
".",
"isArray",
"(",
"value",
")",
")",
"{",
"return",
"value",
".",
"some",
"(",
"checkSchemaHasReferences",
")",
";",
"}",
"else",
"if",
"(",
"_",
".",
"isObject",
"(",
"value",
")",
")",
"{",
"return",
"checkSchemaHasReferences",
"(",
"value",
")",
";",
"}",
"return",
"false",
";",
"}",
")",
";",
"}"
] |
Returns true if the given schema contains any references
|
[
"Returns",
"true",
"if",
"the",
"given",
"schema",
"contains",
"any",
"references"
] |
cc772bbd1ba10fdc61de0e5c069a5ab5abfcefa5
|
https://github.com/apiaryio/fury-adapter-swagger/blob/cc772bbd1ba10fdc61de0e5c069a5ab5abfcefa5/src/json-schema.js#L212-L226
|
train
|
apiaryio/fury-adapter-swagger
|
src/json-schema.js
|
findReferences
|
function findReferences(schema) {
if (schema.$ref) {
return [schema.$ref];
}
let references = [];
if (schema.allOf) {
references = references.concat(...schema.allOf.map(findReferences));
}
if (schema.anyOf) {
references = references.concat(...schema.anyOf.map(findReferences));
}
if (schema.oneOf) {
references = references.concat(...schema.oneOf.map(findReferences));
}
if (schema.not) {
references = references.concat(...findReferences(schema.not));
}
// Array
if (schema.items) {
if (Array.isArray(schema.items)) {
references = references.concat(...schema.items.map(findReferences));
} else {
references = references.concat(findReferences(schema.items));
}
}
if (schema.additionalItems && typeof schema.additionalItems === 'object') {
references = references.concat(findReferences(schema.additionalItems));
}
// Object
if (schema.properties) {
Object.keys(schema.properties).forEach((key) => {
references = references.concat(findReferences(schema.properties[key]));
});
}
if (schema.patternProperties) {
Object.keys(schema.patternProperties).forEach((key) => {
references = references.concat(findReferences(schema.patternProperties[key]));
});
}
if (schema.additionalProperties && typeof schema.additionalProperties === 'object') {
references = references.concat(findReferences(schema.additionalProperties));
}
return references;
}
|
javascript
|
function findReferences(schema) {
if (schema.$ref) {
return [schema.$ref];
}
let references = [];
if (schema.allOf) {
references = references.concat(...schema.allOf.map(findReferences));
}
if (schema.anyOf) {
references = references.concat(...schema.anyOf.map(findReferences));
}
if (schema.oneOf) {
references = references.concat(...schema.oneOf.map(findReferences));
}
if (schema.not) {
references = references.concat(...findReferences(schema.not));
}
// Array
if (schema.items) {
if (Array.isArray(schema.items)) {
references = references.concat(...schema.items.map(findReferences));
} else {
references = references.concat(findReferences(schema.items));
}
}
if (schema.additionalItems && typeof schema.additionalItems === 'object') {
references = references.concat(findReferences(schema.additionalItems));
}
// Object
if (schema.properties) {
Object.keys(schema.properties).forEach((key) => {
references = references.concat(findReferences(schema.properties[key]));
});
}
if (schema.patternProperties) {
Object.keys(schema.patternProperties).forEach((key) => {
references = references.concat(findReferences(schema.patternProperties[key]));
});
}
if (schema.additionalProperties && typeof schema.additionalProperties === 'object') {
references = references.concat(findReferences(schema.additionalProperties));
}
return references;
}
|
[
"function",
"findReferences",
"(",
"schema",
")",
"{",
"if",
"(",
"schema",
".",
"$ref",
")",
"{",
"return",
"[",
"schema",
".",
"$ref",
"]",
";",
"}",
"let",
"references",
"=",
"[",
"]",
";",
"if",
"(",
"schema",
".",
"allOf",
")",
"{",
"references",
"=",
"references",
".",
"concat",
"(",
"...",
"schema",
".",
"allOf",
".",
"map",
"(",
"findReferences",
")",
")",
";",
"}",
"if",
"(",
"schema",
".",
"anyOf",
")",
"{",
"references",
"=",
"references",
".",
"concat",
"(",
"...",
"schema",
".",
"anyOf",
".",
"map",
"(",
"findReferences",
")",
")",
";",
"}",
"if",
"(",
"schema",
".",
"oneOf",
")",
"{",
"references",
"=",
"references",
".",
"concat",
"(",
"...",
"schema",
".",
"oneOf",
".",
"map",
"(",
"findReferences",
")",
")",
";",
"}",
"if",
"(",
"schema",
".",
"not",
")",
"{",
"references",
"=",
"references",
".",
"concat",
"(",
"...",
"findReferences",
"(",
"schema",
".",
"not",
")",
")",
";",
"}",
"if",
"(",
"schema",
".",
"items",
")",
"{",
"if",
"(",
"Array",
".",
"isArray",
"(",
"schema",
".",
"items",
")",
")",
"{",
"references",
"=",
"references",
".",
"concat",
"(",
"...",
"schema",
".",
"items",
".",
"map",
"(",
"findReferences",
")",
")",
";",
"}",
"else",
"{",
"references",
"=",
"references",
".",
"concat",
"(",
"findReferences",
"(",
"schema",
".",
"items",
")",
")",
";",
"}",
"}",
"if",
"(",
"schema",
".",
"additionalItems",
"&&",
"typeof",
"schema",
".",
"additionalItems",
"===",
"'object'",
")",
"{",
"references",
"=",
"references",
".",
"concat",
"(",
"findReferences",
"(",
"schema",
".",
"additionalItems",
")",
")",
";",
"}",
"if",
"(",
"schema",
".",
"properties",
")",
"{",
"Object",
".",
"keys",
"(",
"schema",
".",
"properties",
")",
".",
"forEach",
"(",
"(",
"key",
")",
"=>",
"{",
"references",
"=",
"references",
".",
"concat",
"(",
"findReferences",
"(",
"schema",
".",
"properties",
"[",
"key",
"]",
")",
")",
";",
"}",
")",
";",
"}",
"if",
"(",
"schema",
".",
"patternProperties",
")",
"{",
"Object",
".",
"keys",
"(",
"schema",
".",
"patternProperties",
")",
".",
"forEach",
"(",
"(",
"key",
")",
"=>",
"{",
"references",
"=",
"references",
".",
"concat",
"(",
"findReferences",
"(",
"schema",
".",
"patternProperties",
"[",
"key",
"]",
")",
")",
";",
"}",
")",
";",
"}",
"if",
"(",
"schema",
".",
"additionalProperties",
"&&",
"typeof",
"schema",
".",
"additionalProperties",
"===",
"'object'",
")",
"{",
"references",
"=",
"references",
".",
"concat",
"(",
"findReferences",
"(",
"schema",
".",
"additionalProperties",
")",
")",
";",
"}",
"return",
"references",
";",
"}"
] |
Traverses the entire schema to find all of the references
@returns array of each reference that is found in the schema
|
[
"Traverses",
"the",
"entire",
"schema",
"to",
"find",
"all",
"of",
"the",
"references"
] |
cc772bbd1ba10fdc61de0e5c069a5ab5abfcefa5
|
https://github.com/apiaryio/fury-adapter-swagger/blob/cc772bbd1ba10fdc61de0e5c069a5ab5abfcefa5/src/json-schema.js#L231-L287
|
train
|
abhishekkyd/protractor-html-reporter-2
|
lib/protractor-xml2html-reporter.js
|
getTime
|
function getTime(time) {
var hours = Math.floor(time/3600);
var minutes = Math.floor(time % 3600/60);
var seconds = (time % 3600) % 60;
return hours + 'h ' + minutes + 'min ' + seconds + 's';
}
|
javascript
|
function getTime(time) {
var hours = Math.floor(time/3600);
var minutes = Math.floor(time % 3600/60);
var seconds = (time % 3600) % 60;
return hours + 'h ' + minutes + 'min ' + seconds + 's';
}
|
[
"function",
"getTime",
"(",
"time",
")",
"{",
"var",
"hours",
"=",
"Math",
".",
"floor",
"(",
"time",
"/",
"3600",
")",
";",
"var",
"minutes",
"=",
"Math",
".",
"floor",
"(",
"time",
"%",
"3600",
"/",
"60",
")",
";",
"var",
"seconds",
"=",
"(",
"time",
"%",
"3600",
")",
"%",
"60",
";",
"return",
"hours",
"+",
"'h '",
"+",
"minutes",
"+",
"'min '",
"+",
"seconds",
"+",
"'s'",
";",
"}"
] |
time passed in seconds
|
[
"time",
"passed",
"in",
"seconds"
] |
c053ffc00510496375d950d284e53b44f4614b33
|
https://github.com/abhishekkyd/protractor-html-reporter-2/blob/c053ffc00510496375d950d284e53b44f4614b33/lib/protractor-xml2html-reporter.js#L73-L79
|
train
|
openpgpjs/web-stream-tools
|
lib/util.js
|
isStream
|
function isStream(input) {
if (ReadableStream.prototype.isPrototypeOf(input)) {
return 'web';
}
if (NodeReadableStream && NodeReadableStream.prototype.isPrototypeOf(input)) {
return 'node';
}
return false;
}
|
javascript
|
function isStream(input) {
if (ReadableStream.prototype.isPrototypeOf(input)) {
return 'web';
}
if (NodeReadableStream && NodeReadableStream.prototype.isPrototypeOf(input)) {
return 'node';
}
return false;
}
|
[
"function",
"isStream",
"(",
"input",
")",
"{",
"if",
"(",
"ReadableStream",
".",
"prototype",
".",
"isPrototypeOf",
"(",
"input",
")",
")",
"{",
"return",
"'web'",
";",
"}",
"if",
"(",
"NodeReadableStream",
"&&",
"NodeReadableStream",
".",
"prototype",
".",
"isPrototypeOf",
"(",
"input",
")",
")",
"{",
"return",
"'node'",
";",
"}",
"return",
"false",
";",
"}"
] |
Check whether data is a Stream, and if so of which type
@param {Any} input data to check
@returns {'web'|'node'|false}
|
[
"Check",
"whether",
"data",
"is",
"a",
"Stream",
"and",
"if",
"so",
"of",
"which",
"type"
] |
84a497715c9df271a673f8616318264ab42ab3cc
|
https://github.com/openpgpjs/web-stream-tools/blob/84a497715c9df271a673f8616318264ab42ab3cc/lib/util.js#L8-L16
|
train
|
feedhenry/fh-mbaas-api
|
lib/push.js
|
getPushSettings
|
function getPushSettings(opts) {
assert.ok(opts, 'opts is undefined');
var headers = {
'X-Project-Id': opts.widget,
'X-App-Id': opts.instance
};
fhutils.addAppApiKeyHeader(headers, opts.appapikey);
return {
url: 'https://' + opts.millicore + ':' + opts.port + '/box/api/unifiedpush/mbaas/',
applicationId: "fake", // we have to use fake ID, it will be added by supercore
masterSecret: "fake", // we have to use fake secret, it will be added by supercore
headers: headers
};
}
|
javascript
|
function getPushSettings(opts) {
assert.ok(opts, 'opts is undefined');
var headers = {
'X-Project-Id': opts.widget,
'X-App-Id': opts.instance
};
fhutils.addAppApiKeyHeader(headers, opts.appapikey);
return {
url: 'https://' + opts.millicore + ':' + opts.port + '/box/api/unifiedpush/mbaas/',
applicationId: "fake", // we have to use fake ID, it will be added by supercore
masterSecret: "fake", // we have to use fake secret, it will be added by supercore
headers: headers
};
}
|
[
"function",
"getPushSettings",
"(",
"opts",
")",
"{",
"assert",
".",
"ok",
"(",
"opts",
",",
"'opts is undefined'",
")",
";",
"var",
"headers",
"=",
"{",
"'X-Project-Id'",
":",
"opts",
".",
"widget",
",",
"'X-App-Id'",
":",
"opts",
".",
"instance",
"}",
";",
"fhutils",
".",
"addAppApiKeyHeader",
"(",
"headers",
",",
"opts",
".",
"appapikey",
")",
";",
"return",
"{",
"url",
":",
"'https://'",
"+",
"opts",
".",
"millicore",
"+",
"':'",
"+",
"opts",
".",
"port",
"+",
"'/box/api/unifiedpush/mbaas/'",
",",
"applicationId",
":",
"\"fake\"",
",",
"masterSecret",
":",
"\"fake\"",
",",
"headers",
":",
"headers",
"}",
";",
"}"
] |
Generates settings to the used when creating an AeroGear.sender.
@param {Object} opts [description]
@return {Object}
|
[
"Generates",
"settings",
"to",
"the",
"used",
"when",
"creating",
"an",
"AeroGear",
".",
"sender",
"."
] |
3bd60e54a34f216d5f2d35a31f3e4b96ae19956e
|
https://github.com/feedhenry/fh-mbaas-api/blob/3bd60e54a34f216d5f2d35a31f3e4b96ae19956e/lib/push.js#L17-L33
|
train
|
openpgpjs/web-stream-tools
|
lib/streams.js
|
toStream
|
function toStream(input) {
let streamType = isStream(input);
if (streamType === 'node') {
return nodeToWeb(input);
} else if (streamType) {
return input;
}
return new ReadableStream({
start(controller) {
controller.enqueue(input);
controller.close();
}
});
}
|
javascript
|
function toStream(input) {
let streamType = isStream(input);
if (streamType === 'node') {
return nodeToWeb(input);
} else if (streamType) {
return input;
}
return new ReadableStream({
start(controller) {
controller.enqueue(input);
controller.close();
}
});
}
|
[
"function",
"toStream",
"(",
"input",
")",
"{",
"let",
"streamType",
"=",
"isStream",
"(",
"input",
")",
";",
"if",
"(",
"streamType",
"===",
"'node'",
")",
"{",
"return",
"nodeToWeb",
"(",
"input",
")",
";",
"}",
"else",
"if",
"(",
"streamType",
")",
"{",
"return",
"input",
";",
"}",
"return",
"new",
"ReadableStream",
"(",
"{",
"start",
"(",
"controller",
")",
"{",
"controller",
".",
"enqueue",
"(",
"input",
")",
";",
"controller",
".",
"close",
"(",
")",
";",
"}",
"}",
")",
";",
"}"
] |
Convert data to Stream
@param {ReadableStream|Uint8array|String} input data to convert
@returns {ReadableStream} Converted data
|
[
"Convert",
"data",
"to",
"Stream"
] |
84a497715c9df271a673f8616318264ab42ab3cc
|
https://github.com/openpgpjs/web-stream-tools/blob/84a497715c9df271a673f8616318264ab42ab3cc/lib/streams.js#L12-L25
|
train
|
openpgpjs/web-stream-tools
|
lib/streams.js
|
concat
|
function concat(list) {
if (list.some(isStream)) {
return concatStream(list);
}
if (typeof list[0] === 'string') {
return list.join('');
}
if (NodeBuffer && NodeBuffer.isBuffer(list[0])) {
return NodeBuffer.concat(list);
}
return concatUint8Array(list);
}
|
javascript
|
function concat(list) {
if (list.some(isStream)) {
return concatStream(list);
}
if (typeof list[0] === 'string') {
return list.join('');
}
if (NodeBuffer && NodeBuffer.isBuffer(list[0])) {
return NodeBuffer.concat(list);
}
return concatUint8Array(list);
}
|
[
"function",
"concat",
"(",
"list",
")",
"{",
"if",
"(",
"list",
".",
"some",
"(",
"isStream",
")",
")",
"{",
"return",
"concatStream",
"(",
"list",
")",
";",
"}",
"if",
"(",
"typeof",
"list",
"[",
"0",
"]",
"===",
"'string'",
")",
"{",
"return",
"list",
".",
"join",
"(",
"''",
")",
";",
"}",
"if",
"(",
"NodeBuffer",
"&&",
"NodeBuffer",
".",
"isBuffer",
"(",
"list",
"[",
"0",
"]",
")",
")",
"{",
"return",
"NodeBuffer",
".",
"concat",
"(",
"list",
")",
";",
"}",
"return",
"concatUint8Array",
"(",
"list",
")",
";",
"}"
] |
Concat a list of Uint8Arrays, Strings or Streams
The caller should not mix Uint8Arrays with Strings, but may mix Streams with non-Streams.
@param {Array<Uint8array|String|ReadableStream>} Array of Uint8Arrays/Strings/Streams to concatenate
@returns {Uint8array|String|ReadableStream} Concatenated array
|
[
"Concat",
"a",
"list",
"of",
"Uint8Arrays",
"Strings",
"or",
"Streams",
"The",
"caller",
"should",
"not",
"mix",
"Uint8Arrays",
"with",
"Strings",
"but",
"may",
"mix",
"Streams",
"with",
"non",
"-",
"Streams",
"."
] |
84a497715c9df271a673f8616318264ab42ab3cc
|
https://github.com/openpgpjs/web-stream-tools/blob/84a497715c9df271a673f8616318264ab42ab3cc/lib/streams.js#L33-L44
|
train
|
openpgpjs/web-stream-tools
|
lib/streams.js
|
concatStream
|
function concatStream(list) {
list = list.map(toStream);
const transform = transformWithCancel(async function(reason) {
await Promise.all(transforms.map(stream => cancel(stream, reason)));
});
let prev = Promise.resolve();
const transforms = list.map((stream, i) => transformPair(stream, (readable, writable) => {
prev = prev.then(() => pipe(readable, transform.writable, {
preventClose: i !== list.length - 1
}));
return prev;
}));
return transform.readable;
}
|
javascript
|
function concatStream(list) {
list = list.map(toStream);
const transform = transformWithCancel(async function(reason) {
await Promise.all(transforms.map(stream => cancel(stream, reason)));
});
let prev = Promise.resolve();
const transforms = list.map((stream, i) => transformPair(stream, (readable, writable) => {
prev = prev.then(() => pipe(readable, transform.writable, {
preventClose: i !== list.length - 1
}));
return prev;
}));
return transform.readable;
}
|
[
"function",
"concatStream",
"(",
"list",
")",
"{",
"list",
"=",
"list",
".",
"map",
"(",
"toStream",
")",
";",
"const",
"transform",
"=",
"transformWithCancel",
"(",
"async",
"function",
"(",
"reason",
")",
"{",
"await",
"Promise",
".",
"all",
"(",
"transforms",
".",
"map",
"(",
"stream",
"=>",
"cancel",
"(",
"stream",
",",
"reason",
")",
")",
")",
";",
"}",
")",
";",
"let",
"prev",
"=",
"Promise",
".",
"resolve",
"(",
")",
";",
"const",
"transforms",
"=",
"list",
".",
"map",
"(",
"(",
"stream",
",",
"i",
")",
"=>",
"transformPair",
"(",
"stream",
",",
"(",
"readable",
",",
"writable",
")",
"=>",
"{",
"prev",
"=",
"prev",
".",
"then",
"(",
"(",
")",
"=>",
"pipe",
"(",
"readable",
",",
"transform",
".",
"writable",
",",
"{",
"preventClose",
":",
"i",
"!==",
"list",
".",
"length",
"-",
"1",
"}",
")",
")",
";",
"return",
"prev",
";",
"}",
")",
")",
";",
"return",
"transform",
".",
"readable",
";",
"}"
] |
Concat a list of Streams
@param {Array<ReadableStream|Uint8array|String>} list Array of Uint8Arrays/Strings/Streams to concatenate
@returns {ReadableStream} Concatenated list
|
[
"Concat",
"a",
"list",
"of",
"Streams"
] |
84a497715c9df271a673f8616318264ab42ab3cc
|
https://github.com/openpgpjs/web-stream-tools/blob/84a497715c9df271a673f8616318264ab42ab3cc/lib/streams.js#L51-L64
|
train
|
openpgpjs/web-stream-tools
|
lib/streams.js
|
getWriter
|
function getWriter(input) {
const writer = input.getWriter();
const releaseLock = writer.releaseLock;
writer.releaseLock = () => {
writer.closed.catch(function() {});
releaseLock.call(writer);
};
return writer;
}
|
javascript
|
function getWriter(input) {
const writer = input.getWriter();
const releaseLock = writer.releaseLock;
writer.releaseLock = () => {
writer.closed.catch(function() {});
releaseLock.call(writer);
};
return writer;
}
|
[
"function",
"getWriter",
"(",
"input",
")",
"{",
"const",
"writer",
"=",
"input",
".",
"getWriter",
"(",
")",
";",
"const",
"releaseLock",
"=",
"writer",
".",
"releaseLock",
";",
"writer",
".",
"releaseLock",
"=",
"(",
")",
"=>",
"{",
"writer",
".",
"closed",
".",
"catch",
"(",
"function",
"(",
")",
"{",
"}",
")",
";",
"releaseLock",
".",
"call",
"(",
"writer",
")",
";",
"}",
";",
"return",
"writer",
";",
"}"
] |
Get a Writer
@param {WritableStream} input
@returns {WritableStreamDefaultWriter}
|
[
"Get",
"a",
"Writer"
] |
84a497715c9df271a673f8616318264ab42ab3cc
|
https://github.com/openpgpjs/web-stream-tools/blob/84a497715c9df271a673f8616318264ab42ab3cc/lib/streams.js#L80-L88
|
train
|
openpgpjs/web-stream-tools
|
lib/streams.js
|
pipe
|
async function pipe(input, target, options) {
input = toStream(input);
try {
if (input[externalBuffer]) {
const writer = getWriter(target);
for (let i = 0; i < input[externalBuffer].length; i++) {
await writer.ready;
await writer.write(input[externalBuffer][i]);
}
writer.releaseLock();
}
return await input.pipeTo(target, options);
} catch(e) {}
}
|
javascript
|
async function pipe(input, target, options) {
input = toStream(input);
try {
if (input[externalBuffer]) {
const writer = getWriter(target);
for (let i = 0; i < input[externalBuffer].length; i++) {
await writer.ready;
await writer.write(input[externalBuffer][i]);
}
writer.releaseLock();
}
return await input.pipeTo(target, options);
} catch(e) {}
}
|
[
"async",
"function",
"pipe",
"(",
"input",
",",
"target",
",",
"options",
")",
"{",
"input",
"=",
"toStream",
"(",
"input",
")",
";",
"try",
"{",
"if",
"(",
"input",
"[",
"externalBuffer",
"]",
")",
"{",
"const",
"writer",
"=",
"getWriter",
"(",
"target",
")",
";",
"for",
"(",
"let",
"i",
"=",
"0",
";",
"i",
"<",
"input",
"[",
"externalBuffer",
"]",
".",
"length",
";",
"i",
"++",
")",
"{",
"await",
"writer",
".",
"ready",
";",
"await",
"writer",
".",
"write",
"(",
"input",
"[",
"externalBuffer",
"]",
"[",
"i",
"]",
")",
";",
"}",
"writer",
".",
"releaseLock",
"(",
")",
";",
"}",
"return",
"await",
"input",
".",
"pipeTo",
"(",
"target",
",",
"options",
")",
";",
"}",
"catch",
"(",
"e",
")",
"{",
"}",
"}"
] |
Pipe a readable stream to a writable stream. Don't throw on input stream errors, but forward them to the output stream.
@param {ReadableStream|Uint8array|String} input
@param {WritableStream} target
@param {Object} (optional) options
@returns {Promise<undefined>} Promise indicating when piping has finished (input stream closed or errored)
@async
|
[
"Pipe",
"a",
"readable",
"stream",
"to",
"a",
"writable",
"stream",
".",
"Don",
"t",
"throw",
"on",
"input",
"stream",
"errors",
"but",
"forward",
"them",
"to",
"the",
"output",
"stream",
"."
] |
84a497715c9df271a673f8616318264ab42ab3cc
|
https://github.com/openpgpjs/web-stream-tools/blob/84a497715c9df271a673f8616318264ab42ab3cc/lib/streams.js#L98-L111
|
train
|
openpgpjs/web-stream-tools
|
lib/streams.js
|
transformRaw
|
function transformRaw(input, options) {
const transformStream = new TransformStream(options);
pipe(input, transformStream.writable);
return transformStream.readable;
}
|
javascript
|
function transformRaw(input, options) {
const transformStream = new TransformStream(options);
pipe(input, transformStream.writable);
return transformStream.readable;
}
|
[
"function",
"transformRaw",
"(",
"input",
",",
"options",
")",
"{",
"const",
"transformStream",
"=",
"new",
"TransformStream",
"(",
"options",
")",
";",
"pipe",
"(",
"input",
",",
"transformStream",
".",
"writable",
")",
";",
"return",
"transformStream",
".",
"readable",
";",
"}"
] |
Pipe a readable stream through a transform stream.
@param {ReadableStream|Uint8array|String} input
@param {Object} (optional) options
@returns {ReadableStream} transformed stream
|
[
"Pipe",
"a",
"readable",
"stream",
"through",
"a",
"transform",
"stream",
"."
] |
84a497715c9df271a673f8616318264ab42ab3cc
|
https://github.com/openpgpjs/web-stream-tools/blob/84a497715c9df271a673f8616318264ab42ab3cc/lib/streams.js#L119-L123
|
train
|
openpgpjs/web-stream-tools
|
lib/streams.js
|
transformWithCancel
|
function transformWithCancel(cancel) {
let pulled = false;
let backpressureChangePromiseResolve;
let outputController;
return {
readable: new ReadableStream({
start(controller) {
outputController = controller;
},
pull() {
if (backpressureChangePromiseResolve) {
backpressureChangePromiseResolve();
} else {
pulled = true;
}
},
cancel
}, {highWaterMark: 0}),
writable: new WritableStream({
write: async function(chunk) {
outputController.enqueue(chunk);
if (!pulled) {
await new Promise(resolve => {
backpressureChangePromiseResolve = resolve;
});
backpressureChangePromiseResolve = null;
} else {
pulled = false;
}
},
close: outputController.close.bind(outputController),
abort: outputController.error.bind(outputController)
})
};
}
|
javascript
|
function transformWithCancel(cancel) {
let pulled = false;
let backpressureChangePromiseResolve;
let outputController;
return {
readable: new ReadableStream({
start(controller) {
outputController = controller;
},
pull() {
if (backpressureChangePromiseResolve) {
backpressureChangePromiseResolve();
} else {
pulled = true;
}
},
cancel
}, {highWaterMark: 0}),
writable: new WritableStream({
write: async function(chunk) {
outputController.enqueue(chunk);
if (!pulled) {
await new Promise(resolve => {
backpressureChangePromiseResolve = resolve;
});
backpressureChangePromiseResolve = null;
} else {
pulled = false;
}
},
close: outputController.close.bind(outputController),
abort: outputController.error.bind(outputController)
})
};
}
|
[
"function",
"transformWithCancel",
"(",
"cancel",
")",
"{",
"let",
"pulled",
"=",
"false",
";",
"let",
"backpressureChangePromiseResolve",
";",
"let",
"outputController",
";",
"return",
"{",
"readable",
":",
"new",
"ReadableStream",
"(",
"{",
"start",
"(",
"controller",
")",
"{",
"outputController",
"=",
"controller",
";",
"}",
",",
"pull",
"(",
")",
"{",
"if",
"(",
"backpressureChangePromiseResolve",
")",
"{",
"backpressureChangePromiseResolve",
"(",
")",
";",
"}",
"else",
"{",
"pulled",
"=",
"true",
";",
"}",
"}",
",",
"cancel",
"}",
",",
"{",
"highWaterMark",
":",
"0",
"}",
")",
",",
"writable",
":",
"new",
"WritableStream",
"(",
"{",
"write",
":",
"async",
"function",
"(",
"chunk",
")",
"{",
"outputController",
".",
"enqueue",
"(",
"chunk",
")",
";",
"if",
"(",
"!",
"pulled",
")",
"{",
"await",
"new",
"Promise",
"(",
"resolve",
"=>",
"{",
"backpressureChangePromiseResolve",
"=",
"resolve",
";",
"}",
")",
";",
"backpressureChangePromiseResolve",
"=",
"null",
";",
"}",
"else",
"{",
"pulled",
"=",
"false",
";",
"}",
"}",
",",
"close",
":",
"outputController",
".",
"close",
".",
"bind",
"(",
"outputController",
")",
",",
"abort",
":",
"outputController",
".",
"error",
".",
"bind",
"(",
"outputController",
")",
"}",
")",
"}",
";",
"}"
] |
Create a cancelable TransformStream.
@param {Function} cancel
@returns {TransformStream}
|
[
"Create",
"a",
"cancelable",
"TransformStream",
"."
] |
84a497715c9df271a673f8616318264ab42ab3cc
|
https://github.com/openpgpjs/web-stream-tools/blob/84a497715c9df271a673f8616318264ab42ab3cc/lib/streams.js#L130-L164
|
train
|
openpgpjs/web-stream-tools
|
lib/streams.js
|
transform
|
function transform(input, process = () => undefined, finish = () => undefined) {
if (isStream(input)) {
return transformRaw(input, {
async transform(value, controller) {
try {
const result = await process(value);
if (result !== undefined) controller.enqueue(result);
} catch(e) {
controller.error(e);
}
},
async flush(controller) {
try {
const result = await finish();
if (result !== undefined) controller.enqueue(result);
} catch(e) {
controller.error(e);
}
}
});
}
const result1 = process(input);
const result2 = finish();
if (result1 !== undefined && result2 !== undefined) return concat([result1, result2]);
return result1 !== undefined ? result1 : result2;
}
|
javascript
|
function transform(input, process = () => undefined, finish = () => undefined) {
if (isStream(input)) {
return transformRaw(input, {
async transform(value, controller) {
try {
const result = await process(value);
if (result !== undefined) controller.enqueue(result);
} catch(e) {
controller.error(e);
}
},
async flush(controller) {
try {
const result = await finish();
if (result !== undefined) controller.enqueue(result);
} catch(e) {
controller.error(e);
}
}
});
}
const result1 = process(input);
const result2 = finish();
if (result1 !== undefined && result2 !== undefined) return concat([result1, result2]);
return result1 !== undefined ? result1 : result2;
}
|
[
"function",
"transform",
"(",
"input",
",",
"process",
"=",
"(",
")",
"=>",
"undefined",
",",
"finish",
"=",
"(",
")",
"=>",
"undefined",
")",
"{",
"if",
"(",
"isStream",
"(",
"input",
")",
")",
"{",
"return",
"transformRaw",
"(",
"input",
",",
"{",
"async",
"transform",
"(",
"value",
",",
"controller",
")",
"{",
"try",
"{",
"const",
"result",
"=",
"await",
"process",
"(",
"value",
")",
";",
"if",
"(",
"result",
"!==",
"undefined",
")",
"controller",
".",
"enqueue",
"(",
"result",
")",
";",
"}",
"catch",
"(",
"e",
")",
"{",
"controller",
".",
"error",
"(",
"e",
")",
";",
"}",
"}",
",",
"async",
"flush",
"(",
"controller",
")",
"{",
"try",
"{",
"const",
"result",
"=",
"await",
"finish",
"(",
")",
";",
"if",
"(",
"result",
"!==",
"undefined",
")",
"controller",
".",
"enqueue",
"(",
"result",
")",
";",
"}",
"catch",
"(",
"e",
")",
"{",
"controller",
".",
"error",
"(",
"e",
")",
";",
"}",
"}",
"}",
")",
";",
"}",
"const",
"result1",
"=",
"process",
"(",
"input",
")",
";",
"const",
"result2",
"=",
"finish",
"(",
")",
";",
"if",
"(",
"result1",
"!==",
"undefined",
"&&",
"result2",
"!==",
"undefined",
")",
"return",
"concat",
"(",
"[",
"result1",
",",
"result2",
"]",
")",
";",
"return",
"result1",
"!==",
"undefined",
"?",
"result1",
":",
"result2",
";",
"}"
] |
Transform a stream using helper functions which are called on each chunk, and on stream close, respectively.
@param {ReadableStream|Uint8array|String} input
@param {Function} process
@param {Function} finish
@returns {ReadableStream|Uint8array|String}
|
[
"Transform",
"a",
"stream",
"using",
"helper",
"functions",
"which",
"are",
"called",
"on",
"each",
"chunk",
"and",
"on",
"stream",
"close",
"respectively",
"."
] |
84a497715c9df271a673f8616318264ab42ab3cc
|
https://github.com/openpgpjs/web-stream-tools/blob/84a497715c9df271a673f8616318264ab42ab3cc/lib/streams.js#L173-L198
|
train
|
openpgpjs/web-stream-tools
|
lib/streams.js
|
transformPair
|
function transformPair(input, fn) {
let incomingTransformController;
const incoming = new TransformStream({
start(controller) {
incomingTransformController = controller;
}
});
const pipeDonePromise = pipe(input, incoming.writable);
const outgoing = transformWithCancel(async function() {
incomingTransformController.error(new Error('Readable side was canceled.'));
await pipeDonePromise;
await new Promise(setTimeout);
});
fn(incoming.readable, outgoing.writable);
return outgoing.readable;
}
|
javascript
|
function transformPair(input, fn) {
let incomingTransformController;
const incoming = new TransformStream({
start(controller) {
incomingTransformController = controller;
}
});
const pipeDonePromise = pipe(input, incoming.writable);
const outgoing = transformWithCancel(async function() {
incomingTransformController.error(new Error('Readable side was canceled.'));
await pipeDonePromise;
await new Promise(setTimeout);
});
fn(incoming.readable, outgoing.writable);
return outgoing.readable;
}
|
[
"function",
"transformPair",
"(",
"input",
",",
"fn",
")",
"{",
"let",
"incomingTransformController",
";",
"const",
"incoming",
"=",
"new",
"TransformStream",
"(",
"{",
"start",
"(",
"controller",
")",
"{",
"incomingTransformController",
"=",
"controller",
";",
"}",
"}",
")",
";",
"const",
"pipeDonePromise",
"=",
"pipe",
"(",
"input",
",",
"incoming",
".",
"writable",
")",
";",
"const",
"outgoing",
"=",
"transformWithCancel",
"(",
"async",
"function",
"(",
")",
"{",
"incomingTransformController",
".",
"error",
"(",
"new",
"Error",
"(",
"'Readable side was canceled.'",
")",
")",
";",
"await",
"pipeDonePromise",
";",
"await",
"new",
"Promise",
"(",
"setTimeout",
")",
";",
"}",
")",
";",
"fn",
"(",
"incoming",
".",
"readable",
",",
"outgoing",
".",
"writable",
")",
";",
"return",
"outgoing",
".",
"readable",
";",
"}"
] |
Transform a stream using a helper function which is passed a readable and a writable stream.
This function also maintains the possibility to cancel the input stream,
and does so on cancelation of the output stream, despite cancelation
normally being impossible when the input stream is being read from.
@param {ReadableStream|Uint8array|String} input
@param {Function} fn
@returns {ReadableStream}
|
[
"Transform",
"a",
"stream",
"using",
"a",
"helper",
"function",
"which",
"is",
"passed",
"a",
"readable",
"and",
"a",
"writable",
"stream",
".",
"This",
"function",
"also",
"maintains",
"the",
"possibility",
"to",
"cancel",
"the",
"input",
"stream",
"and",
"does",
"so",
"on",
"cancelation",
"of",
"the",
"output",
"stream",
"despite",
"cancelation",
"normally",
"being",
"impossible",
"when",
"the",
"input",
"stream",
"is",
"being",
"read",
"from",
"."
] |
84a497715c9df271a673f8616318264ab42ab3cc
|
https://github.com/openpgpjs/web-stream-tools/blob/84a497715c9df271a673f8616318264ab42ab3cc/lib/streams.js#L209-L226
|
train
|
openpgpjs/web-stream-tools
|
lib/streams.js
|
passiveClone
|
function passiveClone(input) {
if (isStream(input)) {
return new ReadableStream({
start(controller) {
const transformed = transformPair(input, async (readable, writable) => {
const reader = getReader(readable);
const writer = getWriter(writable);
try {
while (true) {
await writer.ready;
const { done, value } = await reader.read();
if (done) {
try { controller.close(); } catch(e) {}
await writer.close();
return;
}
try { controller.enqueue(value); } catch(e) {}
await writer.write(value);
}
} catch(e) {
controller.error(e);
await writer.abort(e);
}
});
overwrite(input, transformed);
}
});
}
return slice(input);
}
|
javascript
|
function passiveClone(input) {
if (isStream(input)) {
return new ReadableStream({
start(controller) {
const transformed = transformPair(input, async (readable, writable) => {
const reader = getReader(readable);
const writer = getWriter(writable);
try {
while (true) {
await writer.ready;
const { done, value } = await reader.read();
if (done) {
try { controller.close(); } catch(e) {}
await writer.close();
return;
}
try { controller.enqueue(value); } catch(e) {}
await writer.write(value);
}
} catch(e) {
controller.error(e);
await writer.abort(e);
}
});
overwrite(input, transformed);
}
});
}
return slice(input);
}
|
[
"function",
"passiveClone",
"(",
"input",
")",
"{",
"if",
"(",
"isStream",
"(",
"input",
")",
")",
"{",
"return",
"new",
"ReadableStream",
"(",
"{",
"start",
"(",
"controller",
")",
"{",
"const",
"transformed",
"=",
"transformPair",
"(",
"input",
",",
"async",
"(",
"readable",
",",
"writable",
")",
"=>",
"{",
"const",
"reader",
"=",
"getReader",
"(",
"readable",
")",
";",
"const",
"writer",
"=",
"getWriter",
"(",
"writable",
")",
";",
"try",
"{",
"while",
"(",
"true",
")",
"{",
"await",
"writer",
".",
"ready",
";",
"const",
"{",
"done",
",",
"value",
"}",
"=",
"await",
"reader",
".",
"read",
"(",
")",
";",
"if",
"(",
"done",
")",
"{",
"try",
"{",
"controller",
".",
"close",
"(",
")",
";",
"}",
"catch",
"(",
"e",
")",
"{",
"}",
"await",
"writer",
".",
"close",
"(",
")",
";",
"return",
";",
"}",
"try",
"{",
"controller",
".",
"enqueue",
"(",
"value",
")",
";",
"}",
"catch",
"(",
"e",
")",
"{",
"}",
"await",
"writer",
".",
"write",
"(",
"value",
")",
";",
"}",
"}",
"catch",
"(",
"e",
")",
"{",
"controller",
".",
"error",
"(",
"e",
")",
";",
"await",
"writer",
".",
"abort",
"(",
"e",
")",
";",
"}",
"}",
")",
";",
"overwrite",
"(",
"input",
",",
"transformed",
")",
";",
"}",
"}",
")",
";",
"}",
"return",
"slice",
"(",
"input",
")",
";",
"}"
] |
Clone a Stream for reading it twice. Data will arrive at the same rate as the input stream is being read.
Reading from the clone will NOT pull from the input stream. Data only arrives when reading the input stream.
The input stream will NOT be canceled if the clone is canceled, only if the input stream are canceled.
If the input stream is canceled, the clone will be errored.
@param {ReadableStream|Uint8array|String} input
@returns {ReadableStream|Uint8array|String} cloned input
|
[
"Clone",
"a",
"Stream",
"for",
"reading",
"it",
"twice",
".",
"Data",
"will",
"arrive",
"at",
"the",
"same",
"rate",
"as",
"the",
"input",
"stream",
"is",
"being",
"read",
".",
"Reading",
"from",
"the",
"clone",
"will",
"NOT",
"pull",
"from",
"the",
"input",
"stream",
".",
"Data",
"only",
"arrives",
"when",
"reading",
"the",
"input",
"stream",
".",
"The",
"input",
"stream",
"will",
"NOT",
"be",
"canceled",
"if",
"the",
"clone",
"is",
"canceled",
"only",
"if",
"the",
"input",
"stream",
"are",
"canceled",
".",
"If",
"the",
"input",
"stream",
"is",
"canceled",
"the",
"clone",
"will",
"be",
"errored",
"."
] |
84a497715c9df271a673f8616318264ab42ab3cc
|
https://github.com/openpgpjs/web-stream-tools/blob/84a497715c9df271a673f8616318264ab42ab3cc/lib/streams.js#L291-L320
|
train
|
openpgpjs/web-stream-tools
|
lib/streams.js
|
slice
|
function slice(input, begin=0, end=Infinity) {
if (isStream(input)) {
if (begin >= 0 && end >= 0) {
let bytesRead = 0;
return transformRaw(input, {
transform(value, controller) {
if (bytesRead < end) {
if (bytesRead + value.length >= begin) {
controller.enqueue(slice(value, Math.max(begin - bytesRead, 0), end - bytesRead));
}
bytesRead += value.length;
} else {
controller.terminate();
}
}
});
}
if (begin < 0 && (end < 0 || end === Infinity)) {
let lastBytes = [];
return transform(input, value => {
if (value.length >= -begin) lastBytes = [value];
else lastBytes.push(value);
}, () => slice(concat(lastBytes), begin, end));
}
if (begin === 0 && end < 0) {
let lastBytes;
return transform(input, value => {
const returnValue = lastBytes ? concat([lastBytes, value]) : value;
if (returnValue.length >= -end) {
lastBytes = slice(returnValue, end);
return slice(returnValue, begin, end);
} else {
lastBytes = returnValue;
}
});
}
console.warn(`stream.slice(input, ${begin}, ${end}) not implemented efficiently.`);
return fromAsync(async () => slice(await readToEnd(input), begin, end));
}
if (input[externalBuffer]) {
input = concat(input[externalBuffer].concat([input]));
}
if (isUint8Array(input) && !(NodeBuffer && NodeBuffer.isBuffer(input))) {
if (end === Infinity) end = input.length;
return input.subarray(begin, end);
}
return input.slice(begin, end);
}
|
javascript
|
function slice(input, begin=0, end=Infinity) {
if (isStream(input)) {
if (begin >= 0 && end >= 0) {
let bytesRead = 0;
return transformRaw(input, {
transform(value, controller) {
if (bytesRead < end) {
if (bytesRead + value.length >= begin) {
controller.enqueue(slice(value, Math.max(begin - bytesRead, 0), end - bytesRead));
}
bytesRead += value.length;
} else {
controller.terminate();
}
}
});
}
if (begin < 0 && (end < 0 || end === Infinity)) {
let lastBytes = [];
return transform(input, value => {
if (value.length >= -begin) lastBytes = [value];
else lastBytes.push(value);
}, () => slice(concat(lastBytes), begin, end));
}
if (begin === 0 && end < 0) {
let lastBytes;
return transform(input, value => {
const returnValue = lastBytes ? concat([lastBytes, value]) : value;
if (returnValue.length >= -end) {
lastBytes = slice(returnValue, end);
return slice(returnValue, begin, end);
} else {
lastBytes = returnValue;
}
});
}
console.warn(`stream.slice(input, ${begin}, ${end}) not implemented efficiently.`);
return fromAsync(async () => slice(await readToEnd(input), begin, end));
}
if (input[externalBuffer]) {
input = concat(input[externalBuffer].concat([input]));
}
if (isUint8Array(input) && !(NodeBuffer && NodeBuffer.isBuffer(input))) {
if (end === Infinity) end = input.length;
return input.subarray(begin, end);
}
return input.slice(begin, end);
}
|
[
"function",
"slice",
"(",
"input",
",",
"begin",
"=",
"0",
",",
"end",
"=",
"Infinity",
")",
"{",
"if",
"(",
"isStream",
"(",
"input",
")",
")",
"{",
"if",
"(",
"begin",
">=",
"0",
"&&",
"end",
">=",
"0",
")",
"{",
"let",
"bytesRead",
"=",
"0",
";",
"return",
"transformRaw",
"(",
"input",
",",
"{",
"transform",
"(",
"value",
",",
"controller",
")",
"{",
"if",
"(",
"bytesRead",
"<",
"end",
")",
"{",
"if",
"(",
"bytesRead",
"+",
"value",
".",
"length",
">=",
"begin",
")",
"{",
"controller",
".",
"enqueue",
"(",
"slice",
"(",
"value",
",",
"Math",
".",
"max",
"(",
"begin",
"-",
"bytesRead",
",",
"0",
")",
",",
"end",
"-",
"bytesRead",
")",
")",
";",
"}",
"bytesRead",
"+=",
"value",
".",
"length",
";",
"}",
"else",
"{",
"controller",
".",
"terminate",
"(",
")",
";",
"}",
"}",
"}",
")",
";",
"}",
"if",
"(",
"begin",
"<",
"0",
"&&",
"(",
"end",
"<",
"0",
"||",
"end",
"===",
"Infinity",
")",
")",
"{",
"let",
"lastBytes",
"=",
"[",
"]",
";",
"return",
"transform",
"(",
"input",
",",
"value",
"=>",
"{",
"if",
"(",
"value",
".",
"length",
">=",
"-",
"begin",
")",
"lastBytes",
"=",
"[",
"value",
"]",
";",
"else",
"lastBytes",
".",
"push",
"(",
"value",
")",
";",
"}",
",",
"(",
")",
"=>",
"slice",
"(",
"concat",
"(",
"lastBytes",
")",
",",
"begin",
",",
"end",
")",
")",
";",
"}",
"if",
"(",
"begin",
"===",
"0",
"&&",
"end",
"<",
"0",
")",
"{",
"let",
"lastBytes",
";",
"return",
"transform",
"(",
"input",
",",
"value",
"=>",
"{",
"const",
"returnValue",
"=",
"lastBytes",
"?",
"concat",
"(",
"[",
"lastBytes",
",",
"value",
"]",
")",
":",
"value",
";",
"if",
"(",
"returnValue",
".",
"length",
">=",
"-",
"end",
")",
"{",
"lastBytes",
"=",
"slice",
"(",
"returnValue",
",",
"end",
")",
";",
"return",
"slice",
"(",
"returnValue",
",",
"begin",
",",
"end",
")",
";",
"}",
"else",
"{",
"lastBytes",
"=",
"returnValue",
";",
"}",
"}",
")",
";",
"}",
"console",
".",
"warn",
"(",
"`",
"${",
"begin",
"}",
"${",
"end",
"}",
"`",
")",
";",
"return",
"fromAsync",
"(",
"async",
"(",
")",
"=>",
"slice",
"(",
"await",
"readToEnd",
"(",
"input",
")",
",",
"begin",
",",
"end",
")",
")",
";",
"}",
"if",
"(",
"input",
"[",
"externalBuffer",
"]",
")",
"{",
"input",
"=",
"concat",
"(",
"input",
"[",
"externalBuffer",
"]",
".",
"concat",
"(",
"[",
"input",
"]",
")",
")",
";",
"}",
"if",
"(",
"isUint8Array",
"(",
"input",
")",
"&&",
"!",
"(",
"NodeBuffer",
"&&",
"NodeBuffer",
".",
"isBuffer",
"(",
"input",
")",
")",
")",
"{",
"if",
"(",
"end",
"===",
"Infinity",
")",
"end",
"=",
"input",
".",
"length",
";",
"return",
"input",
".",
"subarray",
"(",
"begin",
",",
"end",
")",
";",
"}",
"return",
"input",
".",
"slice",
"(",
"begin",
",",
"end",
")",
";",
"}"
] |
Return a stream pointing to a part of the input stream.
@param {ReadableStream|Uint8array|String} input
@returns {ReadableStream|Uint8array|String} clone
|
[
"Return",
"a",
"stream",
"pointing",
"to",
"a",
"part",
"of",
"the",
"input",
"stream",
"."
] |
84a497715c9df271a673f8616318264ab42ab3cc
|
https://github.com/openpgpjs/web-stream-tools/blob/84a497715c9df271a673f8616318264ab42ab3cc/lib/streams.js#L348-L395
|
train
|
openpgpjs/web-stream-tools
|
lib/streams.js
|
cancel
|
async function cancel(input, reason) {
if (isStream(input) && input.cancel) {
return input.cancel(reason);
}
}
|
javascript
|
async function cancel(input, reason) {
if (isStream(input) && input.cancel) {
return input.cancel(reason);
}
}
|
[
"async",
"function",
"cancel",
"(",
"input",
",",
"reason",
")",
"{",
"if",
"(",
"isStream",
"(",
"input",
")",
"&&",
"input",
".",
"cancel",
")",
"{",
"return",
"input",
".",
"cancel",
"(",
"reason",
")",
";",
"}",
"}"
] |
Cancel a stream.
@param {ReadableStream|Uint8array|String} input
@param {Any} reason
@returns {Promise<Any>} indicates when the stream has been canceled
@async
|
[
"Cancel",
"a",
"stream",
"."
] |
84a497715c9df271a673f8616318264ab42ab3cc
|
https://github.com/openpgpjs/web-stream-tools/blob/84a497715c9df271a673f8616318264ab42ab3cc/lib/streams.js#L418-L422
|
train
|
openpgpjs/web-stream-tools
|
lib/streams.js
|
fromAsync
|
function fromAsync(fn) {
return new ReadableStream({
pull: async controller => {
try {
controller.enqueue(await fn());
controller.close();
} catch(e) {
controller.error(e);
}
}
});
}
|
javascript
|
function fromAsync(fn) {
return new ReadableStream({
pull: async controller => {
try {
controller.enqueue(await fn());
controller.close();
} catch(e) {
controller.error(e);
}
}
});
}
|
[
"function",
"fromAsync",
"(",
"fn",
")",
"{",
"return",
"new",
"ReadableStream",
"(",
"{",
"pull",
":",
"async",
"controller",
"=>",
"{",
"try",
"{",
"controller",
".",
"enqueue",
"(",
"await",
"fn",
"(",
")",
")",
";",
"controller",
".",
"close",
"(",
")",
";",
"}",
"catch",
"(",
"e",
")",
"{",
"controller",
".",
"error",
"(",
"e",
")",
";",
"}",
"}",
"}",
")",
";",
"}"
] |
Convert an async function to a Stream. When the function returns, its return value is enqueued to the stream.
@param {Function} fn
@returns {ReadableStream}
|
[
"Convert",
"an",
"async",
"function",
"to",
"a",
"Stream",
".",
"When",
"the",
"function",
"returns",
"its",
"return",
"value",
"is",
"enqueued",
"to",
"the",
"stream",
"."
] |
84a497715c9df271a673f8616318264ab42ab3cc
|
https://github.com/openpgpjs/web-stream-tools/blob/84a497715c9df271a673f8616318264ab42ab3cc/lib/streams.js#L429-L440
|
train
|
feedhenry/fh-mbaas-api
|
lib/db.js
|
mongoConnectionStringOS3
|
function mongoConnectionStringOS3(cb) {
debug('Running in OpenShift 3, requesting db connection string from MBaaS');
mbaasClient.app.databaseConnectionString({
"domain": config.fhmbaas.domain,
"environment": config.fhmbaas.environment
}, function retrieved(err, resp) {
if (err) {
return cb(err);
}
process.env.FH_MONGODB_CONN_URL = resp.url;
return cb(undefined, resp.url);
});
}
|
javascript
|
function mongoConnectionStringOS3(cb) {
debug('Running in OpenShift 3, requesting db connection string from MBaaS');
mbaasClient.app.databaseConnectionString({
"domain": config.fhmbaas.domain,
"environment": config.fhmbaas.environment
}, function retrieved(err, resp) {
if (err) {
return cb(err);
}
process.env.FH_MONGODB_CONN_URL = resp.url;
return cb(undefined, resp.url);
});
}
|
[
"function",
"mongoConnectionStringOS3",
"(",
"cb",
")",
"{",
"debug",
"(",
"'Running in OpenShift 3, requesting db connection string from MBaaS'",
")",
";",
"mbaasClient",
".",
"app",
".",
"databaseConnectionString",
"(",
"{",
"\"domain\"",
":",
"config",
".",
"fhmbaas",
".",
"domain",
",",
"\"environment\"",
":",
"config",
".",
"fhmbaas",
".",
"environment",
"}",
",",
"function",
"retrieved",
"(",
"err",
",",
"resp",
")",
"{",
"if",
"(",
"err",
")",
"{",
"return",
"cb",
"(",
"err",
")",
";",
"}",
"process",
".",
"env",
".",
"FH_MONGODB_CONN_URL",
"=",
"resp",
".",
"url",
";",
"return",
"cb",
"(",
"undefined",
",",
"resp",
".",
"url",
")",
";",
"}",
")",
";",
"}"
] |
use mbaas client set process.env.FH_MONGODB_CONN_URL to cache it
|
[
"use",
"mbaas",
"client",
"set",
"process",
".",
"env",
".",
"FH_MONGODB_CONN_URL",
"to",
"cache",
"it"
] |
3bd60e54a34f216d5f2d35a31f3e4b96ae19956e
|
https://github.com/feedhenry/fh-mbaas-api/blob/3bd60e54a34f216d5f2d35a31f3e4b96ae19956e/lib/db.js#L46-L59
|
train
|
feedhenry/fh-mbaas-api
|
lib/forms/index.js
|
findFieldDefinition
|
function findFieldDefinition() {
var foundField;
if (!(params.fieldId || params.fieldCode)) {
return undefined;
}
//Iterating through each of the pages to find a matching field.
_.each(self.form.pages, function(page) {
_.each(page.fields, function(field) {
var fieldId = field._id;
var fieldCode = field.fieldCode;
if (fieldId === params.fieldId || fieldCode === params.fieldCode) {
foundField = field;
}
});
});
return foundField;
}
|
javascript
|
function findFieldDefinition() {
var foundField;
if (!(params.fieldId || params.fieldCode)) {
return undefined;
}
//Iterating through each of the pages to find a matching field.
_.each(self.form.pages, function(page) {
_.each(page.fields, function(field) {
var fieldId = field._id;
var fieldCode = field.fieldCode;
if (fieldId === params.fieldId || fieldCode === params.fieldCode) {
foundField = field;
}
});
});
return foundField;
}
|
[
"function",
"findFieldDefinition",
"(",
")",
"{",
"var",
"foundField",
";",
"if",
"(",
"!",
"(",
"params",
".",
"fieldId",
"||",
"params",
".",
"fieldCode",
")",
")",
"{",
"return",
"undefined",
";",
"}",
"_",
".",
"each",
"(",
"self",
".",
"form",
".",
"pages",
",",
"function",
"(",
"page",
")",
"{",
"_",
".",
"each",
"(",
"page",
".",
"fields",
",",
"function",
"(",
"field",
")",
"{",
"var",
"fieldId",
"=",
"field",
".",
"_id",
";",
"var",
"fieldCode",
"=",
"field",
".",
"fieldCode",
";",
"if",
"(",
"fieldId",
"===",
"params",
".",
"fieldId",
"||",
"fieldCode",
"===",
"params",
".",
"fieldCode",
")",
"{",
"foundField",
"=",
"field",
";",
"}",
"}",
")",
";",
"}",
")",
";",
"return",
"foundField",
";",
"}"
] |
Finding the JSON definition of a field to add data to.
|
[
"Finding",
"the",
"JSON",
"definition",
"of",
"a",
"field",
"to",
"add",
"data",
"to",
"."
] |
3bd60e54a34f216d5f2d35a31f3e4b96ae19956e
|
https://github.com/feedhenry/fh-mbaas-api/blob/3bd60e54a34f216d5f2d35a31f3e4b96ae19956e/lib/forms/index.js#L243-L262
|
train
|
feedhenry/fh-mbaas-api
|
lib/forms/index.js
|
processInputValue
|
function processInputValue() {
var value = params.value;
var index = params.index || 0;
var fieldType = field.type;
//Checking for a value.
if (typeof(value) === "undefined" || value === null) {
return "No value entered.";
}
/**
* File-base fields (photo, signature and file) need to stream the file to the mongo server.
*/
if (fieldType === "photo" || fieldType === "signature" || fieldType === "file") {
//The stream must be a paused stream.
var fileURI = value.fileStream;
delete value.fileStream;
//It must be possible to stream the object to the database.
var isString=(typeof(fileURI) === "string");
if (!isString) {
return "Expected a string URI object when streaming a file-based field ";
}
if (!(value.fileName && value.fileSize && value.fileType)) {
return "Invalid file parameters. Params: " + JSON.stringify(value);
}
//Generating a random file hash name.
var hashName = "filePlaceHolder" + Date.now() + Math.floor(Math.random() * 10000000000000);
var fileUpdateTime = Date.now;
self.filesToUpload[hashName] = {
fieldId: field._id,
fileStream: fileURI
};
value.hashName = hashName;
value.fileUpdateTime = fileUpdateTime;
}
self.fieldValues[field._id] = self.fieldValues[field._id] || [];
self.fieldValues[field._id][index] = value;
return undefined;
}
|
javascript
|
function processInputValue() {
var value = params.value;
var index = params.index || 0;
var fieldType = field.type;
//Checking for a value.
if (typeof(value) === "undefined" || value === null) {
return "No value entered.";
}
/**
* File-base fields (photo, signature and file) need to stream the file to the mongo server.
*/
if (fieldType === "photo" || fieldType === "signature" || fieldType === "file") {
//The stream must be a paused stream.
var fileURI = value.fileStream;
delete value.fileStream;
//It must be possible to stream the object to the database.
var isString=(typeof(fileURI) === "string");
if (!isString) {
return "Expected a string URI object when streaming a file-based field ";
}
if (!(value.fileName && value.fileSize && value.fileType)) {
return "Invalid file parameters. Params: " + JSON.stringify(value);
}
//Generating a random file hash name.
var hashName = "filePlaceHolder" + Date.now() + Math.floor(Math.random() * 10000000000000);
var fileUpdateTime = Date.now;
self.filesToUpload[hashName] = {
fieldId: field._id,
fileStream: fileURI
};
value.hashName = hashName;
value.fileUpdateTime = fileUpdateTime;
}
self.fieldValues[field._id] = self.fieldValues[field._id] || [];
self.fieldValues[field._id][index] = value;
return undefined;
}
|
[
"function",
"processInputValue",
"(",
")",
"{",
"var",
"value",
"=",
"params",
".",
"value",
";",
"var",
"index",
"=",
"params",
".",
"index",
"||",
"0",
";",
"var",
"fieldType",
"=",
"field",
".",
"type",
";",
"if",
"(",
"typeof",
"(",
"value",
")",
"===",
"\"undefined\"",
"||",
"value",
"===",
"null",
")",
"{",
"return",
"\"No value entered.\"",
";",
"}",
"if",
"(",
"fieldType",
"===",
"\"photo\"",
"||",
"fieldType",
"===",
"\"signature\"",
"||",
"fieldType",
"===",
"\"file\"",
")",
"{",
"var",
"fileURI",
"=",
"value",
".",
"fileStream",
";",
"delete",
"value",
".",
"fileStream",
";",
"var",
"isString",
"=",
"(",
"typeof",
"(",
"fileURI",
")",
"===",
"\"string\"",
")",
";",
"if",
"(",
"!",
"isString",
")",
"{",
"return",
"\"Expected a string URI object when streaming a file-based field \"",
";",
"}",
"if",
"(",
"!",
"(",
"value",
".",
"fileName",
"&&",
"value",
".",
"fileSize",
"&&",
"value",
".",
"fileType",
")",
")",
"{",
"return",
"\"Invalid file parameters. Params: \"",
"+",
"JSON",
".",
"stringify",
"(",
"value",
")",
";",
"}",
"var",
"hashName",
"=",
"\"filePlaceHolder\"",
"+",
"Date",
".",
"now",
"(",
")",
"+",
"Math",
".",
"floor",
"(",
"Math",
".",
"random",
"(",
")",
"*",
"10000000000000",
")",
";",
"var",
"fileUpdateTime",
"=",
"Date",
".",
"now",
";",
"self",
".",
"filesToUpload",
"[",
"hashName",
"]",
"=",
"{",
"fieldId",
":",
"field",
".",
"_id",
",",
"fileStream",
":",
"fileURI",
"}",
";",
"value",
".",
"hashName",
"=",
"hashName",
";",
"value",
".",
"fileUpdateTime",
"=",
"fileUpdateTime",
";",
"}",
"self",
".",
"fieldValues",
"[",
"field",
".",
"_id",
"]",
"=",
"self",
".",
"fieldValues",
"[",
"field",
".",
"_id",
"]",
"||",
"[",
"]",
";",
"self",
".",
"fieldValues",
"[",
"field",
".",
"_id",
"]",
"[",
"index",
"]",
"=",
"value",
";",
"return",
"undefined",
";",
"}"
] |
Adding a value to an index.
Most inputs are validated by the rules engine, but file inputs need to be a file location on the local app.
It is advisible to download the file to local storage first.
@returns {error/undefined}
|
[
"Adding",
"a",
"value",
"to",
"an",
"index",
".",
"Most",
"inputs",
"are",
"validated",
"by",
"the",
"rules",
"engine",
"but",
"file",
"inputs",
"need",
"to",
"be",
"a",
"file",
"location",
"on",
"the",
"local",
"app",
".",
"It",
"is",
"advisible",
"to",
"download",
"the",
"file",
"to",
"local",
"storage",
"first",
"."
] |
3bd60e54a34f216d5f2d35a31f3e4b96ae19956e
|
https://github.com/feedhenry/fh-mbaas-api/blob/3bd60e54a34f216d5f2d35a31f3e4b96ae19956e/lib/forms/index.js#L270-L315
|
train
|
feedhenry/fh-mbaas-api
|
lib/api.js
|
FHapi
|
function FHapi(cfg) {
var api = {
getVersion: function() {
//Getting The Version of fh-mbaas-api
return packageJSON.version;
},
cache: require('./cache')(cfg),
db: require('./db')(cfg),
events: new EventEmitter(),
forms: require('./forms')(cfg),
log: false,
stringify: false,
parse: false,
push: require('./push')(cfg),
call: require('./call')(cfg),
util: false,
redisPort: cfg.redis.port || '6379',
redisHost: cfg.redis.host || 'localhost',
session: require('./session')(cfg),
stats: require('./stats')(cfg),
sync: sync,
act: require('./act')(cfg),
service: require('./act')(cfg),
sec: sec.security,
auth: require('./auth')(cfg),
host: require('./host'),
permission_map: require('fh-db').permission_map,
hash: function(opts, callback) {
var p = {
act: 'hash',
params: opts
};
sec.security(p, callback);
},
web: require('./web')(cfg)
};
api.sync.setEventEmitter(api.events);
var redisUrl = 'redis://' + api.redisHost + ':' + api.redisPort;
getDBUrl(api, function(err, connectionString) {
if (err) {
console.warn('Warning! Could not get a mongodb connection string. Sync will not work. (', err, ')');
return;
} else if (!connectionString) {
console.warn('Warning! Could not get a mongodb connection string. Sync will not work. If running in a Dynofarm/FeedHenry MBaaS, ensure the database is upgraded');
return;
}
var poolSize = parseInt(process.env.SYNC_MONGODB_POOLSIZE) || 50;
api.sync.connect(connectionString, {poolSize: poolSize}, redisUrl, function(err) {
if (err) {
console.error('Error starting the sync server (', err, ')');
}
});
});
api.mbaasExpress = function(opts) {
opts = opts || {};
opts.api = api;
return require('fh-mbaas-express')(opts);
};
api.shutdown = function(cb) {
// Sync service has a setInterval loop running which will prevent fh-mbaas-api from exiting cleanly.
// Call stopAll to ensure Sync exits clenaly.
api.sync.stopAll(cb);
};
return api;
}
|
javascript
|
function FHapi(cfg) {
var api = {
getVersion: function() {
//Getting The Version of fh-mbaas-api
return packageJSON.version;
},
cache: require('./cache')(cfg),
db: require('./db')(cfg),
events: new EventEmitter(),
forms: require('./forms')(cfg),
log: false,
stringify: false,
parse: false,
push: require('./push')(cfg),
call: require('./call')(cfg),
util: false,
redisPort: cfg.redis.port || '6379',
redisHost: cfg.redis.host || 'localhost',
session: require('./session')(cfg),
stats: require('./stats')(cfg),
sync: sync,
act: require('./act')(cfg),
service: require('./act')(cfg),
sec: sec.security,
auth: require('./auth')(cfg),
host: require('./host'),
permission_map: require('fh-db').permission_map,
hash: function(opts, callback) {
var p = {
act: 'hash',
params: opts
};
sec.security(p, callback);
},
web: require('./web')(cfg)
};
api.sync.setEventEmitter(api.events);
var redisUrl = 'redis://' + api.redisHost + ':' + api.redisPort;
getDBUrl(api, function(err, connectionString) {
if (err) {
console.warn('Warning! Could not get a mongodb connection string. Sync will not work. (', err, ')');
return;
} else if (!connectionString) {
console.warn('Warning! Could not get a mongodb connection string. Sync will not work. If running in a Dynofarm/FeedHenry MBaaS, ensure the database is upgraded');
return;
}
var poolSize = parseInt(process.env.SYNC_MONGODB_POOLSIZE) || 50;
api.sync.connect(connectionString, {poolSize: poolSize}, redisUrl, function(err) {
if (err) {
console.error('Error starting the sync server (', err, ')');
}
});
});
api.mbaasExpress = function(opts) {
opts = opts || {};
opts.api = api;
return require('fh-mbaas-express')(opts);
};
api.shutdown = function(cb) {
// Sync service has a setInterval loop running which will prevent fh-mbaas-api from exiting cleanly.
// Call stopAll to ensure Sync exits clenaly.
api.sync.stopAll(cb);
};
return api;
}
|
[
"function",
"FHapi",
"(",
"cfg",
")",
"{",
"var",
"api",
"=",
"{",
"getVersion",
":",
"function",
"(",
")",
"{",
"return",
"packageJSON",
".",
"version",
";",
"}",
",",
"cache",
":",
"require",
"(",
"'./cache'",
")",
"(",
"cfg",
")",
",",
"db",
":",
"require",
"(",
"'./db'",
")",
"(",
"cfg",
")",
",",
"events",
":",
"new",
"EventEmitter",
"(",
")",
",",
"forms",
":",
"require",
"(",
"'./forms'",
")",
"(",
"cfg",
")",
",",
"log",
":",
"false",
",",
"stringify",
":",
"false",
",",
"parse",
":",
"false",
",",
"push",
":",
"require",
"(",
"'./push'",
")",
"(",
"cfg",
")",
",",
"call",
":",
"require",
"(",
"'./call'",
")",
"(",
"cfg",
")",
",",
"util",
":",
"false",
",",
"redisPort",
":",
"cfg",
".",
"redis",
".",
"port",
"||",
"'6379'",
",",
"redisHost",
":",
"cfg",
".",
"redis",
".",
"host",
"||",
"'localhost'",
",",
"session",
":",
"require",
"(",
"'./session'",
")",
"(",
"cfg",
")",
",",
"stats",
":",
"require",
"(",
"'./stats'",
")",
"(",
"cfg",
")",
",",
"sync",
":",
"sync",
",",
"act",
":",
"require",
"(",
"'./act'",
")",
"(",
"cfg",
")",
",",
"service",
":",
"require",
"(",
"'./act'",
")",
"(",
"cfg",
")",
",",
"sec",
":",
"sec",
".",
"security",
",",
"auth",
":",
"require",
"(",
"'./auth'",
")",
"(",
"cfg",
")",
",",
"host",
":",
"require",
"(",
"'./host'",
")",
",",
"permission_map",
":",
"require",
"(",
"'fh-db'",
")",
".",
"permission_map",
",",
"hash",
":",
"function",
"(",
"opts",
",",
"callback",
")",
"{",
"var",
"p",
"=",
"{",
"act",
":",
"'hash'",
",",
"params",
":",
"opts",
"}",
";",
"sec",
".",
"security",
"(",
"p",
",",
"callback",
")",
";",
"}",
",",
"web",
":",
"require",
"(",
"'./web'",
")",
"(",
"cfg",
")",
"}",
";",
"api",
".",
"sync",
".",
"setEventEmitter",
"(",
"api",
".",
"events",
")",
";",
"var",
"redisUrl",
"=",
"'redis://'",
"+",
"api",
".",
"redisHost",
"+",
"':'",
"+",
"api",
".",
"redisPort",
";",
"getDBUrl",
"(",
"api",
",",
"function",
"(",
"err",
",",
"connectionString",
")",
"{",
"if",
"(",
"err",
")",
"{",
"console",
".",
"warn",
"(",
"'Warning! Could not get a mongodb connection string. Sync will not work. ('",
",",
"err",
",",
"')'",
")",
";",
"return",
";",
"}",
"else",
"if",
"(",
"!",
"connectionString",
")",
"{",
"console",
".",
"warn",
"(",
"'Warning! Could not get a mongodb connection string. Sync will not work. If running in a Dynofarm/FeedHenry MBaaS, ensure the database is upgraded'",
")",
";",
"return",
";",
"}",
"var",
"poolSize",
"=",
"parseInt",
"(",
"process",
".",
"env",
".",
"SYNC_MONGODB_POOLSIZE",
")",
"||",
"50",
";",
"api",
".",
"sync",
".",
"connect",
"(",
"connectionString",
",",
"{",
"poolSize",
":",
"poolSize",
"}",
",",
"redisUrl",
",",
"function",
"(",
"err",
")",
"{",
"if",
"(",
"err",
")",
"{",
"console",
".",
"error",
"(",
"'Error starting the sync server ('",
",",
"err",
",",
"')'",
")",
";",
"}",
"}",
")",
";",
"}",
")",
";",
"api",
".",
"mbaasExpress",
"=",
"function",
"(",
"opts",
")",
"{",
"opts",
"=",
"opts",
"||",
"{",
"}",
";",
"opts",
".",
"api",
"=",
"api",
";",
"return",
"require",
"(",
"'fh-mbaas-express'",
")",
"(",
"opts",
")",
";",
"}",
";",
"api",
".",
"shutdown",
"=",
"function",
"(",
"cb",
")",
"{",
"api",
".",
"sync",
".",
"stopAll",
"(",
"cb",
")",
";",
"}",
";",
"return",
"api",
";",
"}"
] |
Main FHapi constructor function..
|
[
"Main",
"FHapi",
"constructor",
"function",
".."
] |
3bd60e54a34f216d5f2d35a31f3e4b96ae19956e
|
https://github.com/feedhenry/fh-mbaas-api/blob/3bd60e54a34f216d5f2d35a31f3e4b96ae19956e/lib/api.js#L29-L100
|
train
|
openpgpjs/web-stream-tools
|
lib/reader.js
|
Reader
|
function Reader(input) {
this.stream = input;
if (input[externalBuffer]) {
this[externalBuffer] = input[externalBuffer].slice();
}
let streamType = streams.isStream(input);
if (streamType === 'node') {
input = streams.nodeToWeb(input);
}
if (streamType) {
const reader = input.getReader();
this._read = reader.read.bind(reader);
this._releaseLock = () => {
reader.closed.catch(function() {});
reader.releaseLock();
};
return;
}
let doneReading = false;
this._read = async () => {
if (doneReading || doneReadingSet.has(input)) {
return { value: undefined, done: true };
}
doneReading = true;
return { value: input, done: false };
};
this._releaseLock = () => {
if (doneReading) {
try {
doneReadingSet.add(input);
} catch(e) {}
}
};
}
|
javascript
|
function Reader(input) {
this.stream = input;
if (input[externalBuffer]) {
this[externalBuffer] = input[externalBuffer].slice();
}
let streamType = streams.isStream(input);
if (streamType === 'node') {
input = streams.nodeToWeb(input);
}
if (streamType) {
const reader = input.getReader();
this._read = reader.read.bind(reader);
this._releaseLock = () => {
reader.closed.catch(function() {});
reader.releaseLock();
};
return;
}
let doneReading = false;
this._read = async () => {
if (doneReading || doneReadingSet.has(input)) {
return { value: undefined, done: true };
}
doneReading = true;
return { value: input, done: false };
};
this._releaseLock = () => {
if (doneReading) {
try {
doneReadingSet.add(input);
} catch(e) {}
}
};
}
|
[
"function",
"Reader",
"(",
"input",
")",
"{",
"this",
".",
"stream",
"=",
"input",
";",
"if",
"(",
"input",
"[",
"externalBuffer",
"]",
")",
"{",
"this",
"[",
"externalBuffer",
"]",
"=",
"input",
"[",
"externalBuffer",
"]",
".",
"slice",
"(",
")",
";",
"}",
"let",
"streamType",
"=",
"streams",
".",
"isStream",
"(",
"input",
")",
";",
"if",
"(",
"streamType",
"===",
"'node'",
")",
"{",
"input",
"=",
"streams",
".",
"nodeToWeb",
"(",
"input",
")",
";",
"}",
"if",
"(",
"streamType",
")",
"{",
"const",
"reader",
"=",
"input",
".",
"getReader",
"(",
")",
";",
"this",
".",
"_read",
"=",
"reader",
".",
"read",
".",
"bind",
"(",
"reader",
")",
";",
"this",
".",
"_releaseLock",
"=",
"(",
")",
"=>",
"{",
"reader",
".",
"closed",
".",
"catch",
"(",
"function",
"(",
")",
"{",
"}",
")",
";",
"reader",
".",
"releaseLock",
"(",
")",
";",
"}",
";",
"return",
";",
"}",
"let",
"doneReading",
"=",
"false",
";",
"this",
".",
"_read",
"=",
"async",
"(",
")",
"=>",
"{",
"if",
"(",
"doneReading",
"||",
"doneReadingSet",
".",
"has",
"(",
"input",
")",
")",
"{",
"return",
"{",
"value",
":",
"undefined",
",",
"done",
":",
"true",
"}",
";",
"}",
"doneReading",
"=",
"true",
";",
"return",
"{",
"value",
":",
"input",
",",
"done",
":",
"false",
"}",
";",
"}",
";",
"this",
".",
"_releaseLock",
"=",
"(",
")",
"=>",
"{",
"if",
"(",
"doneReading",
")",
"{",
"try",
"{",
"doneReadingSet",
".",
"add",
"(",
"input",
")",
";",
"}",
"catch",
"(",
"e",
")",
"{",
"}",
"}",
"}",
";",
"}"
] |
A wrapper class over the native ReadableStreamDefaultReader.
This additionally implements pushing back data on the stream, which
lets us implement peeking and a host of convenience functions.
It also lets you read data other than streams, such as a Uint8Array.
@class
|
[
"A",
"wrapper",
"class",
"over",
"the",
"native",
"ReadableStreamDefaultReader",
".",
"This",
"additionally",
"implements",
"pushing",
"back",
"data",
"on",
"the",
"stream",
"which",
"lets",
"us",
"implement",
"peeking",
"and",
"a",
"host",
"of",
"convenience",
"functions",
".",
"It",
"also",
"lets",
"you",
"read",
"data",
"other",
"than",
"streams",
"such",
"as",
"a",
"Uint8Array",
"."
] |
84a497715c9df271a673f8616318264ab42ab3cc
|
https://github.com/openpgpjs/web-stream-tools/blob/84a497715c9df271a673f8616318264ab42ab3cc/lib/reader.js#L13-L46
|
train
|
rei/rei-cedar
|
build/vue-docs.js
|
createMarkdownTemplate
|
function createMarkdownTemplate(file, vueObj) {
let json2mdTemplate = [], mdTablesTemplate;
mdTablesTemplate = buildTables(vueObj)
if(mdTablesTemplate.length > 0) {
json2mdTemplate = json2mdTemplate.concat(mdTablesTemplate)
}
return json2md(json2mdTemplate)
}
|
javascript
|
function createMarkdownTemplate(file, vueObj) {
let json2mdTemplate = [], mdTablesTemplate;
mdTablesTemplate = buildTables(vueObj)
if(mdTablesTemplate.length > 0) {
json2mdTemplate = json2mdTemplate.concat(mdTablesTemplate)
}
return json2md(json2mdTemplate)
}
|
[
"function",
"createMarkdownTemplate",
"(",
"file",
",",
"vueObj",
")",
"{",
"let",
"json2mdTemplate",
"=",
"[",
"]",
",",
"mdTablesTemplate",
";",
"mdTablesTemplate",
"=",
"buildTables",
"(",
"vueObj",
")",
"if",
"(",
"mdTablesTemplate",
".",
"length",
">",
"0",
")",
"{",
"json2mdTemplate",
"=",
"json2mdTemplate",
".",
"concat",
"(",
"mdTablesTemplate",
")",
"}",
"return",
"json2md",
"(",
"json2mdTemplate",
")",
"}"
] |
take json object returned from vue-docgen-api and create markdown template
|
[
"take",
"json",
"object",
"returned",
"from",
"vue",
"-",
"docgen",
"-",
"api",
"and",
"create",
"markdown",
"template"
] |
5ddcce5ccda8fee41235483760332ad5e63c5455
|
https://github.com/rei/rei-cedar/blob/5ddcce5ccda8fee41235483760332ad5e63c5455/build/vue-docs.js#L173-L184
|
train
|
rei/rei-cedar
|
build/vue-docs.js
|
buildTables
|
function buildTables(vueObj) {
let updatedTemplate = [{h3: "<button class='title'>PROPS, METHODS, EVENTS, SLOTS</button>"}]
let mdTable
mdTable = tableFromProps(vueObj["props"])
if(mdTable != null) {
updatedTemplate.push(mdTable)
}
mdTable = tableFromMethods(vueObj["methods"])
if(mdTable != null) {
updatedTemplate.push(mdTable)
}
mdTable = tableFromEvents(vueObj["events"])
if(mdTable != null) {
updatedTemplate.push(mdTable)
}
mdTable = tableFromSlots(vueObj["slots"])
if(mdTable != null) {
updatedTemplate.push(mdTable)
}
return updatedTemplate.length > 1 ? updatedTemplate : []
}
|
javascript
|
function buildTables(vueObj) {
let updatedTemplate = [{h3: "<button class='title'>PROPS, METHODS, EVENTS, SLOTS</button>"}]
let mdTable
mdTable = tableFromProps(vueObj["props"])
if(mdTable != null) {
updatedTemplate.push(mdTable)
}
mdTable = tableFromMethods(vueObj["methods"])
if(mdTable != null) {
updatedTemplate.push(mdTable)
}
mdTable = tableFromEvents(vueObj["events"])
if(mdTable != null) {
updatedTemplate.push(mdTable)
}
mdTable = tableFromSlots(vueObj["slots"])
if(mdTable != null) {
updatedTemplate.push(mdTable)
}
return updatedTemplate.length > 1 ? updatedTemplate : []
}
|
[
"function",
"buildTables",
"(",
"vueObj",
")",
"{",
"let",
"updatedTemplate",
"=",
"[",
"{",
"h3",
":",
"\"<button class='title'>PROPS, METHODS, EVENTS, SLOTS</button>\"",
"}",
"]",
"let",
"mdTable",
"mdTable",
"=",
"tableFromProps",
"(",
"vueObj",
"[",
"\"props\"",
"]",
")",
"if",
"(",
"mdTable",
"!=",
"null",
")",
"{",
"updatedTemplate",
".",
"push",
"(",
"mdTable",
")",
"}",
"mdTable",
"=",
"tableFromMethods",
"(",
"vueObj",
"[",
"\"methods\"",
"]",
")",
"if",
"(",
"mdTable",
"!=",
"null",
")",
"{",
"updatedTemplate",
".",
"push",
"(",
"mdTable",
")",
"}",
"mdTable",
"=",
"tableFromEvents",
"(",
"vueObj",
"[",
"\"events\"",
"]",
")",
"if",
"(",
"mdTable",
"!=",
"null",
")",
"{",
"updatedTemplate",
".",
"push",
"(",
"mdTable",
")",
"}",
"mdTable",
"=",
"tableFromSlots",
"(",
"vueObj",
"[",
"\"slots\"",
"]",
")",
"if",
"(",
"mdTable",
"!=",
"null",
")",
"{",
"updatedTemplate",
".",
"push",
"(",
"mdTable",
")",
"}",
"return",
"updatedTemplate",
".",
"length",
">",
"1",
"?",
"updatedTemplate",
":",
"[",
"]",
"}"
] |
build tables for Vue props, methods, events, and slots
|
[
"build",
"tables",
"for",
"Vue",
"props",
"methods",
"events",
"and",
"slots"
] |
5ddcce5ccda8fee41235483760332ad5e63c5455
|
https://github.com/rei/rei-cedar/blob/5ddcce5ccda8fee41235483760332ad5e63c5455/build/vue-docs.js#L187-L212
|
train
|
rei/rei-cedar
|
build/vue-docs.js
|
tableFromProps
|
function tableFromProps(propsObj) {
const headers = ["Prop Name", "Type", "Default", "Require", "Description"]
let rows = []
// construct rows of table from object of properties
for(const prop in propsObj) {
// Don't document properties with `@ignore` tag
if (propsObj[prop].tags.ignore) {
continue
}
let cols = []
cols.push(`${prop}`) // property name
cols.push(propsObj[prop]["type"] ? propsObj[prop]["type"]["name"].replace(/\|/g, ',') : 'unknown') // type of the property
cols.push(propsObj[prop]["defaultValue"] ? propsObj[prop]["defaultValue"]["value"] : 'n/a') // property default value
cols.push(propsObj[prop]["required"] ? 'true' : 'false') // property is required
cols.push(`${propsObj[prop]["description"]}`) // description of the property
rows.push(cols)
}
return rows.length > 0 ? {table: {headers, rows}} : null
}
|
javascript
|
function tableFromProps(propsObj) {
const headers = ["Prop Name", "Type", "Default", "Require", "Description"]
let rows = []
// construct rows of table from object of properties
for(const prop in propsObj) {
// Don't document properties with `@ignore` tag
if (propsObj[prop].tags.ignore) {
continue
}
let cols = []
cols.push(`${prop}`) // property name
cols.push(propsObj[prop]["type"] ? propsObj[prop]["type"]["name"].replace(/\|/g, ',') : 'unknown') // type of the property
cols.push(propsObj[prop]["defaultValue"] ? propsObj[prop]["defaultValue"]["value"] : 'n/a') // property default value
cols.push(propsObj[prop]["required"] ? 'true' : 'false') // property is required
cols.push(`${propsObj[prop]["description"]}`) // description of the property
rows.push(cols)
}
return rows.length > 0 ? {table: {headers, rows}} : null
}
|
[
"function",
"tableFromProps",
"(",
"propsObj",
")",
"{",
"const",
"headers",
"=",
"[",
"\"Prop Name\"",
",",
"\"Type\"",
",",
"\"Default\"",
",",
"\"Require\"",
",",
"\"Description\"",
"]",
"let",
"rows",
"=",
"[",
"]",
"for",
"(",
"const",
"prop",
"in",
"propsObj",
")",
"{",
"if",
"(",
"propsObj",
"[",
"prop",
"]",
".",
"tags",
".",
"ignore",
")",
"{",
"continue",
"}",
"let",
"cols",
"=",
"[",
"]",
"cols",
".",
"push",
"(",
"`",
"${",
"prop",
"}",
"`",
")",
"cols",
".",
"push",
"(",
"propsObj",
"[",
"prop",
"]",
"[",
"\"type\"",
"]",
"?",
"propsObj",
"[",
"prop",
"]",
"[",
"\"type\"",
"]",
"[",
"\"name\"",
"]",
".",
"replace",
"(",
"/",
"\\|",
"/",
"g",
",",
"','",
")",
":",
"'unknown'",
")",
"cols",
".",
"push",
"(",
"propsObj",
"[",
"prop",
"]",
"[",
"\"defaultValue\"",
"]",
"?",
"propsObj",
"[",
"prop",
"]",
"[",
"\"defaultValue\"",
"]",
"[",
"\"value\"",
"]",
":",
"'n/a'",
")",
"cols",
".",
"push",
"(",
"propsObj",
"[",
"prop",
"]",
"[",
"\"required\"",
"]",
"?",
"'true'",
":",
"'false'",
")",
"cols",
".",
"push",
"(",
"`",
"${",
"propsObj",
"[",
"prop",
"]",
"[",
"\"description\"",
"]",
"}",
"`",
")",
"rows",
".",
"push",
"(",
"cols",
")",
"}",
"return",
"rows",
".",
"length",
">",
"0",
"?",
"{",
"table",
":",
"{",
"headers",
",",
"rows",
"}",
"}",
":",
"null",
"}"
] |
auxilary function to create table from `props` property of json2md object
|
[
"auxilary",
"function",
"to",
"create",
"table",
"from",
"props",
"property",
"of",
"json2md",
"object"
] |
5ddcce5ccda8fee41235483760332ad5e63c5455
|
https://github.com/rei/rei-cedar/blob/5ddcce5ccda8fee41235483760332ad5e63c5455/build/vue-docs.js#L215-L237
|
train
|
rei/rei-cedar
|
build/vue-docs.js
|
tableFromMethods
|
function tableFromMethods(methodsArr) {
const headers = ["Method Name", "Type", "Parameters", "Description"]
let rows = []
// construct rows of table array of methods
methodsArr.forEach((method) => {
let cols = []
cols.push(method["name"]) // method name
let paramList = ''
method["params"].forEach((param) => {
paramList += `${param["name"]}: ${param["type"]["name"]} - ${param["description"]}\n`
})
cols.push(paramList) // list of method parameters
cols.push(`${method["description"]}`) // description of the method
rows.push(cols);
})
return rows.length > 0 ? {table: {headers, rows}} : null
}
|
javascript
|
function tableFromMethods(methodsArr) {
const headers = ["Method Name", "Type", "Parameters", "Description"]
let rows = []
// construct rows of table array of methods
methodsArr.forEach((method) => {
let cols = []
cols.push(method["name"]) // method name
let paramList = ''
method["params"].forEach((param) => {
paramList += `${param["name"]}: ${param["type"]["name"]} - ${param["description"]}\n`
})
cols.push(paramList) // list of method parameters
cols.push(`${method["description"]}`) // description of the method
rows.push(cols);
})
return rows.length > 0 ? {table: {headers, rows}} : null
}
|
[
"function",
"tableFromMethods",
"(",
"methodsArr",
")",
"{",
"const",
"headers",
"=",
"[",
"\"Method Name\"",
",",
"\"Type\"",
",",
"\"Parameters\"",
",",
"\"Description\"",
"]",
"let",
"rows",
"=",
"[",
"]",
"methodsArr",
".",
"forEach",
"(",
"(",
"method",
")",
"=>",
"{",
"let",
"cols",
"=",
"[",
"]",
"cols",
".",
"push",
"(",
"method",
"[",
"\"name\"",
"]",
")",
"let",
"paramList",
"=",
"''",
"method",
"[",
"\"params\"",
"]",
".",
"forEach",
"(",
"(",
"param",
")",
"=>",
"{",
"paramList",
"+=",
"`",
"${",
"param",
"[",
"\"name\"",
"]",
"}",
"${",
"param",
"[",
"\"type\"",
"]",
"[",
"\"name\"",
"]",
"}",
"${",
"param",
"[",
"\"description\"",
"]",
"}",
"\\n",
"`",
"}",
")",
"cols",
".",
"push",
"(",
"paramList",
")",
"cols",
".",
"push",
"(",
"`",
"${",
"method",
"[",
"\"description\"",
"]",
"}",
"`",
")",
"rows",
".",
"push",
"(",
"cols",
")",
";",
"}",
")",
"return",
"rows",
".",
"length",
">",
"0",
"?",
"{",
"table",
":",
"{",
"headers",
",",
"rows",
"}",
"}",
":",
"null",
"}"
] |
auxilary function to create table from `methods` property of json2md object
|
[
"auxilary",
"function",
"to",
"create",
"table",
"from",
"methods",
"property",
"of",
"json2md",
"object"
] |
5ddcce5ccda8fee41235483760332ad5e63c5455
|
https://github.com/rei/rei-cedar/blob/5ddcce5ccda8fee41235483760332ad5e63c5455/build/vue-docs.js#L240-L260
|
train
|
rei/rei-cedar
|
build/vue-docs.js
|
tableFromEvents
|
function tableFromEvents(eventsObj) {
const headers = ["Event Name", "Type", "Description"]
let rows = []
for(const evt in eventsObj) {
let cols = []
cols.push(`${evt}`) // event name
let typeList = ''
eventsObj[evt]["type"]["names"].forEach((type, idx, arr) => {
typeList += `${type}${arr[idx+1] ? `|` : ''}`
})
cols.push(typeList) // list of event types
cols.push(`${eventsObj[evt]["description"]}`) // description of the event
rows.push(cols);
}
return rows.length > 0 ? {table: {headers, rows}} : null
}
|
javascript
|
function tableFromEvents(eventsObj) {
const headers = ["Event Name", "Type", "Description"]
let rows = []
for(const evt in eventsObj) {
let cols = []
cols.push(`${evt}`) // event name
let typeList = ''
eventsObj[evt]["type"]["names"].forEach((type, idx, arr) => {
typeList += `${type}${arr[idx+1] ? `|` : ''}`
})
cols.push(typeList) // list of event types
cols.push(`${eventsObj[evt]["description"]}`) // description of the event
rows.push(cols);
}
return rows.length > 0 ? {table: {headers, rows}} : null
}
|
[
"function",
"tableFromEvents",
"(",
"eventsObj",
")",
"{",
"const",
"headers",
"=",
"[",
"\"Event Name\"",
",",
"\"Type\"",
",",
"\"Description\"",
"]",
"let",
"rows",
"=",
"[",
"]",
"for",
"(",
"const",
"evt",
"in",
"eventsObj",
")",
"{",
"let",
"cols",
"=",
"[",
"]",
"cols",
".",
"push",
"(",
"`",
"${",
"evt",
"}",
"`",
")",
"let",
"typeList",
"=",
"''",
"eventsObj",
"[",
"evt",
"]",
"[",
"\"type\"",
"]",
"[",
"\"names\"",
"]",
".",
"forEach",
"(",
"(",
"type",
",",
"idx",
",",
"arr",
")",
"=>",
"{",
"typeList",
"+=",
"`",
"${",
"type",
"}",
"${",
"arr",
"[",
"idx",
"+",
"1",
"]",
"?",
"`",
"`",
":",
"''",
"}",
"`",
"}",
")",
"cols",
".",
"push",
"(",
"typeList",
")",
"cols",
".",
"push",
"(",
"`",
"${",
"eventsObj",
"[",
"evt",
"]",
"[",
"\"description\"",
"]",
"}",
"`",
")",
"rows",
".",
"push",
"(",
"cols",
")",
";",
"}",
"return",
"rows",
".",
"length",
">",
"0",
"?",
"{",
"table",
":",
"{",
"headers",
",",
"rows",
"}",
"}",
":",
"null",
"}"
] |
auxilary function to create table from `events` property of json2md object
|
[
"auxilary",
"function",
"to",
"create",
"table",
"from",
"events",
"property",
"of",
"json2md",
"object"
] |
5ddcce5ccda8fee41235483760332ad5e63c5455
|
https://github.com/rei/rei-cedar/blob/5ddcce5ccda8fee41235483760332ad5e63c5455/build/vue-docs.js#L263-L283
|
train
|
rei/rei-cedar
|
build/vue-docs.js
|
tableFromSlots
|
function tableFromSlots(slotsObj) {
const headers = ["Slot", "Description"]
let rows = []
for(const slot in slotsObj) {
let cols = []
cols.push(`${slot}`) // name of the slot
cols.push(`${slotsObj[slot]["description"] || ''}`) // description of the slot
rows.push(cols)
}
return rows.length > 0 ? {table: {headers, rows}} : null
}
|
javascript
|
function tableFromSlots(slotsObj) {
const headers = ["Slot", "Description"]
let rows = []
for(const slot in slotsObj) {
let cols = []
cols.push(`${slot}`) // name of the slot
cols.push(`${slotsObj[slot]["description"] || ''}`) // description of the slot
rows.push(cols)
}
return rows.length > 0 ? {table: {headers, rows}} : null
}
|
[
"function",
"tableFromSlots",
"(",
"slotsObj",
")",
"{",
"const",
"headers",
"=",
"[",
"\"Slot\"",
",",
"\"Description\"",
"]",
"let",
"rows",
"=",
"[",
"]",
"for",
"(",
"const",
"slot",
"in",
"slotsObj",
")",
"{",
"let",
"cols",
"=",
"[",
"]",
"cols",
".",
"push",
"(",
"`",
"${",
"slot",
"}",
"`",
")",
"cols",
".",
"push",
"(",
"`",
"${",
"slotsObj",
"[",
"slot",
"]",
"[",
"\"description\"",
"]",
"||",
"''",
"}",
"`",
")",
"rows",
".",
"push",
"(",
"cols",
")",
"}",
"return",
"rows",
".",
"length",
">",
"0",
"?",
"{",
"table",
":",
"{",
"headers",
",",
"rows",
"}",
"}",
":",
"null",
"}"
] |
auxilary function to create table from `slots` property of json2md object
|
[
"auxilary",
"function",
"to",
"create",
"table",
"from",
"slots",
"property",
"of",
"json2md",
"object"
] |
5ddcce5ccda8fee41235483760332ad5e63c5455
|
https://github.com/rei/rei-cedar/blob/5ddcce5ccda8fee41235483760332ad5e63c5455/build/vue-docs.js#L286-L299
|
train
|
rei/rei-cedar
|
build/component-docs-build.js
|
buildAPIs
|
function buildAPIs(vueObj) {
const funcArray = [propsAPIObject, methodsAPIObject, eventsAPIObject, slotsAPIObject]
const compAPIObj = funcArray.reduce((apiObj, curFn) => {
const obj = curFn(vueObj)
if (obj !== null) {
Object.assign(apiObj, obj)
}
return apiObj
}, {})
return compAPIObj
}
|
javascript
|
function buildAPIs(vueObj) {
const funcArray = [propsAPIObject, methodsAPIObject, eventsAPIObject, slotsAPIObject]
const compAPIObj = funcArray.reduce((apiObj, curFn) => {
const obj = curFn(vueObj)
if (obj !== null) {
Object.assign(apiObj, obj)
}
return apiObj
}, {})
return compAPIObj
}
|
[
"function",
"buildAPIs",
"(",
"vueObj",
")",
"{",
"const",
"funcArray",
"=",
"[",
"propsAPIObject",
",",
"methodsAPIObject",
",",
"eventsAPIObject",
",",
"slotsAPIObject",
"]",
"const",
"compAPIObj",
"=",
"funcArray",
".",
"reduce",
"(",
"(",
"apiObj",
",",
"curFn",
")",
"=>",
"{",
"const",
"obj",
"=",
"curFn",
"(",
"vueObj",
")",
"if",
"(",
"obj",
"!==",
"null",
")",
"{",
"Object",
".",
"assign",
"(",
"apiObj",
",",
"obj",
")",
"}",
"return",
"apiObj",
"}",
",",
"{",
"}",
")",
"return",
"compAPIObj",
"}"
] |
build data objects for Vue props, methods, events, and slots
@param {Object} vueObj -- JSON object returned by vue-docgen-api library
@returns {Object} -- obJect representing different parts of component API
|
[
"build",
"data",
"objects",
"for",
"Vue",
"props",
"methods",
"events",
"and",
"slots"
] |
5ddcce5ccda8fee41235483760332ad5e63c5455
|
https://github.com/rei/rei-cedar/blob/5ddcce5ccda8fee41235483760332ad5e63c5455/build/component-docs-build.js#L83-L96
|
train
|
rei/rei-cedar
|
build/component-docs-build.js
|
propsAPIObject
|
function propsAPIObject(vueObj) {
const propsObj = vueObj["props"] || {}
let props = []
// construct array of objects for props
for (const prop in propsObj) {
if (propsObj.hasOwnProperty(prop)) {
// Don't document properties with `@ignore` tag
if (propsObj[prop].tags.ignore) {
continue
}
// object representing a single prop
const ele = {
"name": `${prop}`,
"type": propsObj[prop]["type"] ? propsObj[prop]["type"]["name"] : 'unknown',
"default": (propsObj[prop]["defaultValue"] && propsObj[prop]["defaultValue"] !== " ") ?
propsObj[prop]["defaultValue"]["value"] : 'n/a',
"description": `${propsObj[prop]["description"] || 'MISSING DESCRIPTION'}`
}
props.push(ele)
}
}
return props.length > 0 ? {props} : null
}
|
javascript
|
function propsAPIObject(vueObj) {
const propsObj = vueObj["props"] || {}
let props = []
// construct array of objects for props
for (const prop in propsObj) {
if (propsObj.hasOwnProperty(prop)) {
// Don't document properties with `@ignore` tag
if (propsObj[prop].tags.ignore) {
continue
}
// object representing a single prop
const ele = {
"name": `${prop}`,
"type": propsObj[prop]["type"] ? propsObj[prop]["type"]["name"] : 'unknown',
"default": (propsObj[prop]["defaultValue"] && propsObj[prop]["defaultValue"] !== " ") ?
propsObj[prop]["defaultValue"]["value"] : 'n/a',
"description": `${propsObj[prop]["description"] || 'MISSING DESCRIPTION'}`
}
props.push(ele)
}
}
return props.length > 0 ? {props} : null
}
|
[
"function",
"propsAPIObject",
"(",
"vueObj",
")",
"{",
"const",
"propsObj",
"=",
"vueObj",
"[",
"\"props\"",
"]",
"||",
"{",
"}",
"let",
"props",
"=",
"[",
"]",
"for",
"(",
"const",
"prop",
"in",
"propsObj",
")",
"{",
"if",
"(",
"propsObj",
".",
"hasOwnProperty",
"(",
"prop",
")",
")",
"{",
"if",
"(",
"propsObj",
"[",
"prop",
"]",
".",
"tags",
".",
"ignore",
")",
"{",
"continue",
"}",
"const",
"ele",
"=",
"{",
"\"name\"",
":",
"`",
"${",
"prop",
"}",
"`",
",",
"\"type\"",
":",
"propsObj",
"[",
"prop",
"]",
"[",
"\"type\"",
"]",
"?",
"propsObj",
"[",
"prop",
"]",
"[",
"\"type\"",
"]",
"[",
"\"name\"",
"]",
":",
"'unknown'",
",",
"\"default\"",
":",
"(",
"propsObj",
"[",
"prop",
"]",
"[",
"\"defaultValue\"",
"]",
"&&",
"propsObj",
"[",
"prop",
"]",
"[",
"\"defaultValue\"",
"]",
"!==",
"\" \"",
")",
"?",
"propsObj",
"[",
"prop",
"]",
"[",
"\"defaultValue\"",
"]",
"[",
"\"value\"",
"]",
":",
"'n/a'",
",",
"\"description\"",
":",
"`",
"${",
"propsObj",
"[",
"prop",
"]",
"[",
"\"description\"",
"]",
"||",
"'MISSING DESCRIPTION'",
"}",
"`",
"}",
"props",
".",
"push",
"(",
"ele",
")",
"}",
"}",
"return",
"props",
".",
"length",
">",
"0",
"?",
"{",
"props",
"}",
":",
"null",
"}"
] |
Create object representing component props
@param {Object} -- JSON object from vue-docgen-api library
@returns {Object} -- Object for component props that goes into Cedar Data Object
|
[
"Create",
"object",
"representing",
"component",
"props"
] |
5ddcce5ccda8fee41235483760332ad5e63c5455
|
https://github.com/rei/rei-cedar/blob/5ddcce5ccda8fee41235483760332ad5e63c5455/build/component-docs-build.js#L103-L128
|
train
|
rei/rei-cedar
|
build/component-docs-build.js
|
methodsAPIObject
|
function methodsAPIObject(vueObj) {
const methodsArr = vueObj["methods"] || []
let methods = []
// construct array of objects for public methods
methodsArr.forEach((method) => {
const ele = {
"name": `${method["name"]}`,
"parameters": `${method["params"].reduce((paramList, param) => {
paramList += `${param["name"]}: ${param["type"]["name"]} - ${param["description"]}\n`
return paramList
}, '')}`,
"description": `${method["description"] || 'MISSING DESCRIPTION'}`
}
methods.push(ele)
})
return methods.length > 0 ? {methods} : null
}
|
javascript
|
function methodsAPIObject(vueObj) {
const methodsArr = vueObj["methods"] || []
let methods = []
// construct array of objects for public methods
methodsArr.forEach((method) => {
const ele = {
"name": `${method["name"]}`,
"parameters": `${method["params"].reduce((paramList, param) => {
paramList += `${param["name"]}: ${param["type"]["name"]} - ${param["description"]}\n`
return paramList
}, '')}`,
"description": `${method["description"] || 'MISSING DESCRIPTION'}`
}
methods.push(ele)
})
return methods.length > 0 ? {methods} : null
}
|
[
"function",
"methodsAPIObject",
"(",
"vueObj",
")",
"{",
"const",
"methodsArr",
"=",
"vueObj",
"[",
"\"methods\"",
"]",
"||",
"[",
"]",
"let",
"methods",
"=",
"[",
"]",
"methodsArr",
".",
"forEach",
"(",
"(",
"method",
")",
"=>",
"{",
"const",
"ele",
"=",
"{",
"\"name\"",
":",
"`",
"${",
"method",
"[",
"\"name\"",
"]",
"}",
"`",
",",
"\"parameters\"",
":",
"`",
"${",
"method",
"[",
"\"params\"",
"]",
".",
"reduce",
"(",
"(",
"paramList",
",",
"param",
")",
"=>",
"{",
"paramList",
"+=",
"`",
"${",
"param",
"[",
"\"name\"",
"]",
"}",
"${",
"param",
"[",
"\"type\"",
"]",
"[",
"\"name\"",
"]",
"}",
"${",
"param",
"[",
"\"description\"",
"]",
"}",
"\\n",
"`",
"return",
"paramList",
"}",
",",
"''",
")",
"}",
"`",
",",
"\"description\"",
":",
"`",
"${",
"method",
"[",
"\"description\"",
"]",
"||",
"'MISSING DESCRIPTION'",
"}",
"`",
"}",
"methods",
".",
"push",
"(",
"ele",
")",
"}",
")",
"return",
"methods",
".",
"length",
">",
"0",
"?",
"{",
"methods",
"}",
":",
"null",
"}"
] |
Create object representing component public methods
@param {Object} -- JSON object from vue-docgen-api library
@returns {Object} -- Object for component methods that goes into Cedar Data Object
|
[
"Create",
"object",
"representing",
"component",
"public",
"methods"
] |
5ddcce5ccda8fee41235483760332ad5e63c5455
|
https://github.com/rei/rei-cedar/blob/5ddcce5ccda8fee41235483760332ad5e63c5455/build/component-docs-build.js#L135-L152
|
train
|
rei/rei-cedar
|
build/component-docs-build.js
|
eventsAPIObject
|
function eventsAPIObject(vueObj) {
const eventsObj = vueObj["events"] || {}
let events = []
for (const evt in eventsObj) {
if (eventsObj.hasOwnProperty(evt)) {
const ele = {
"name": `${evt}`,
"type": `${eventsObj[evt]["type"]["names"].reduce((typeList, type, idx, arr) => {
typeList += `${type}${arr[idx+1] ? `|` : ''}`
return typeList
}, '')}`,
"description": `${eventsObj[evt]["description"] || 'MISSING DESCRIPTION'}`
}
events.push(ele)
}
}
return events.length > 0 ? {events} : null
}
|
javascript
|
function eventsAPIObject(vueObj) {
const eventsObj = vueObj["events"] || {}
let events = []
for (const evt in eventsObj) {
if (eventsObj.hasOwnProperty(evt)) {
const ele = {
"name": `${evt}`,
"type": `${eventsObj[evt]["type"]["names"].reduce((typeList, type, idx, arr) => {
typeList += `${type}${arr[idx+1] ? `|` : ''}`
return typeList
}, '')}`,
"description": `${eventsObj[evt]["description"] || 'MISSING DESCRIPTION'}`
}
events.push(ele)
}
}
return events.length > 0 ? {events} : null
}
|
[
"function",
"eventsAPIObject",
"(",
"vueObj",
")",
"{",
"const",
"eventsObj",
"=",
"vueObj",
"[",
"\"events\"",
"]",
"||",
"{",
"}",
"let",
"events",
"=",
"[",
"]",
"for",
"(",
"const",
"evt",
"in",
"eventsObj",
")",
"{",
"if",
"(",
"eventsObj",
".",
"hasOwnProperty",
"(",
"evt",
")",
")",
"{",
"const",
"ele",
"=",
"{",
"\"name\"",
":",
"`",
"${",
"evt",
"}",
"`",
",",
"\"type\"",
":",
"`",
"${",
"eventsObj",
"[",
"evt",
"]",
"[",
"\"type\"",
"]",
"[",
"\"names\"",
"]",
".",
"reduce",
"(",
"(",
"typeList",
",",
"type",
",",
"idx",
",",
"arr",
")",
"=>",
"{",
"typeList",
"+=",
"`",
"${",
"type",
"}",
"${",
"arr",
"[",
"idx",
"+",
"1",
"]",
"?",
"`",
"`",
":",
"''",
"}",
"`",
"return",
"typeList",
"}",
",",
"''",
")",
"}",
"`",
",",
"\"description\"",
":",
"`",
"${",
"eventsObj",
"[",
"evt",
"]",
"[",
"\"description\"",
"]",
"||",
"'MISSING DESCRIPTION'",
"}",
"`",
"}",
"events",
".",
"push",
"(",
"ele",
")",
"}",
"}",
"return",
"events",
".",
"length",
">",
"0",
"?",
"{",
"events",
"}",
":",
"null",
"}"
] |
Create object representing component events
@param {Object} -- JSON object from vue-docgen-api library
@returns {Object} -- Object for component events that goes into Cedar Data Object
|
[
"Create",
"object",
"representing",
"component",
"events"
] |
5ddcce5ccda8fee41235483760332ad5e63c5455
|
https://github.com/rei/rei-cedar/blob/5ddcce5ccda8fee41235483760332ad5e63c5455/build/component-docs-build.js#L159-L177
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.