_id
stringlengths 2
6
| title
stringlengths 0
58
| partition
stringclasses 3
values | text
stringlengths 52
373k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q2400
|
resourceId
|
train
|
function resourceId(resourceFile, relativeTo, resourceCollection = '') {
const pathKeys = relativePathArray(resourceFile.path, relativeTo).map(
keyname
);
const resourceBits = [];
if (resourceCollection && resourceCollection.length !== 0) {
resourceBits.push(resourceCollection);
}
return resourceBits
.concat(pathKeys)
.concat([keyname(resourceFile.path)])
.join('.');
}
|
javascript
|
{
"resource": ""
}
|
q2401
|
isPathChild
|
train
|
function isPathChild(pathA, pathB) {
const relPath = relative(normalizePath(pathA), normalizePath(pathB));
return relPath === '..';
}
|
javascript
|
{
"resource": ""
}
|
q2402
|
ZReadable
|
train
|
function ZReadable(options) {
if(options) {
if(options.readableObjectMode) {
options.objectMode = true;
}
//Add support for iojs simplified stream constructor
if(typeof options.read === 'function') {
this._read = options.read;
}
}
Readable.call(this, options);
streamMixins.call(this, Readable.prototype, options);
readableMixins.call(this, options);
}
|
javascript
|
{
"resource": ""
}
|
q2403
|
paginateList
|
train
|
function paginateList(formSubmissionModel, params, callback) {
logger.debug("paginateList", params);
var query = params.query || {};
var paginate = params.paginate || {};
var fieldModel = params.fieldModel;
//Sorting can be defined by the user
var sortBy = params.sortBy || {
submissionCompletedTimestamp: -1
};
formSubmissionModel.paginate(query, {
page: paginate.page,
limit: paginate.limit,
select: CONSTANTS.SUBMISSION_SUMMARY_FIELD_SELECTION,
populate: {"path": "formFields.fieldId", "model": fieldModel, "select": "_id type name"},
sortBy: sortBy,
lean: true
}, function(err, submissionsResult) {
//Returning pagination metadata. Useful for displaying tables etc.
var paginationResult = _.extend({
pages: submissionsResult.pages,
total: submissionsResult.total
}, params);
handleListResult(err, paginationResult, submissionsResult.docs, callback);
});
}
|
javascript
|
{
"resource": ""
}
|
q2404
|
nonPaginateList
|
train
|
function nonPaginateList(formSubmissionModel, params, callback) {
logger.debug("nonPaginateList", params);
var submissionQuery = formSubmissionModel.find(params.query || {});
var fieldModel = params.fieldModel;
//Sorting can be defined by the user
var sortBy = params.sortBy || {
submissionCompletedTimestamp: -1
};
//If the full submission is not required, then limit the response payload size.
if (!params.includeFullSubmission) {
submissionQuery.select({"formSubmittedAgainst.name": 1, "_id": 1, "formId": 1, "appId": 1, "appEnvironment": 1, "formFields": 1});
}
submissionQuery.sort(sortBy)
.populate({"path": "formFields.fieldId", "model": fieldModel, "select": "_id type name"})
//Assigning a lean query to not parse the response as a mongoose document. This is to improve query performance.
.lean()
.exec(function(err, foundSubmissions) {
handleListResult(err, params, foundSubmissions, callback);
});
}
|
javascript
|
{
"resource": ""
}
|
q2405
|
updateMetadata
|
train
|
function updateMetadata(metadata, form) {
metadata.files[form.id] = {
name: form.name,
path: path.join(ZIP_SUBFOLDER_NAME, form.id + '.json')
};
}
|
javascript
|
{
"resource": ""
}
|
q2406
|
writeFormsToZip
|
train
|
function writeFormsToZip(forms, callback) {
var zip = archiver('zip')
, metadata = {};
metadata.exportCreated = new Date();
metadata.files = {};
function processForms() {
// Process all forms
_.each(forms, function(form) {
// Update metadata on the fly
updateMetadata(metadata, form);
zip.append(JSON.stringify(form), {
name: path.join(ZIP_SUBFOLDER_NAME, form.id + '.json')
});
});
// Last step: write the metadata file in the root folder
zip.append(JSON.stringify(metadata), {
name: METADATA_FILE_NAME
});
zip.finalize();
}
process.nextTick(processForms);
callback(null, zip);
}
|
javascript
|
{
"resource": ""
}
|
q2407
|
registerPartials
|
train
|
function registerPartials(src, options, prefix = '') {
return readFiles(src.glob, options).then(partialFiles => {
partialFiles.forEach(partialFile => {
const partialKey = resourceId(partialFile, src.basedir, prefix);
if (options.handlebars.partials.hasOwnProperty(partialKey)) {
DrizzleError.error(
new DrizzleError(
`Partial key '${partialKey}' already
registered on Handlebars instance: is this intentional?`,
DrizzleError.LEVELS.WARN
),
options
);
}
options.handlebars.registerPartial(partialKey, partialFile.contents);
});
});
}
|
javascript
|
{
"resource": ""
}
|
q2408
|
preparePartials
|
train
|
function preparePartials(options) {
return Promise.all([
registerPartials(options.src.templates, options), // Partials as partials
registerPartials(options.src.patterns, options, 'patterns') // Patterns
]).then(() => options, error => DrizzleError.error(error, options.debug));
}
|
javascript
|
{
"resource": ""
}
|
q2409
|
submissionToPDF
|
train
|
function submissionToPDF(params, cb) {
logger.debug("renderPDF submissionToPDF", params);
params = params || {};
var maxConcurrentPhantomPerWorker = params.maxConcurrentPhantomPerWorker || config.get().maxConcurrentPhantomPerWorker;
if (!params.submission || !params.submission.formSubmittedAgainst || !params.options || !params.options.location) {
return cb("Invalid Submission Data. Expected a submission and location parameter.");
}
pdfGenerationQueue = pdfGenerationQueue || createPDFGenerationQueue(maxConcurrentPhantomPerWorker);
//Adding the export task to the queue
pdfGenerationQueue.push(params, cb);
}
|
javascript
|
{
"resource": ""
}
|
q2410
|
write
|
train
|
function write(filepath, contents) {
return mkdirp(path.dirname(filepath)).then(() => {
return writeFile(filepath, contents);
});
}
|
javascript
|
{
"resource": ""
}
|
q2411
|
writePage
|
train
|
function writePage(resourceId, resourceObj, pathPrefix) {
const outputPath = resourcePath(resourceId, pathPrefix);
resourceObj.outputPath = outputPath;
return write(outputPath, resourceObj.contents);
}
|
javascript
|
{
"resource": ""
}
|
q2412
|
Adapter
|
train
|
function Adapter() {
expect(this).to.be.an(
'object',
'The Adapter\'s constructor can be only invoked from specialized' +
'classes\' constructors'
);
expect(this.constructor).to.be.a(
'function',
'The Adapter\'s constructor can be only invoked from specialized' +
'classes\' constructors'
);
expect(this.constructor).to.not.equal(
Adapter,
'The Adapter is an abstract class and cannot be directly initialized'
);
expect(this).to.be.instanceof(
Adapter,
'The Adapter\'s constructor can be only invoked from specialized' +
'classes\' constructors'
);
}
|
javascript
|
{
"resource": ""
}
|
q2413
|
validateDuplicateName
|
train
|
function validateDuplicateName(cb) {
var themeId = themeData._id;
var themeName = themeData.name;
if (!themeName) {
return cb(new Error("No theme name passed."));
}
var query = {};
//If there is a theme id, then the query to the theme model must exclude the current theme id that is being updated.
if (themeId) {
query.name = themeName;
//Excluding the themeId that is being updated.
query["_id"] = {"$nin": [themeId]};
} else { //Just checking that the theme name exists as a theme is being created
query.name = themeName;
}
themeModel.count(query, function(err, count) {
if (err) {
return cb(err);
}
//If the number of found theme is > 0, then there is another theme with the same name. Do not save the theme.
if (count > 0) {
return cb(new Error("Theme with name " + themeName + " already exists."));
} else {//No duplicates, can proceed with saving the theme.
return cb();
}
});
}
|
javascript
|
{
"resource": ""
}
|
q2414
|
MethodDictionary
|
train
|
function MethodDictionary(methods) {
expect(arguments).to.have.length.below(
2,
'Invalid arguments length when creating a new MethodDictionary (it has ' +
'to be passed less than 2 arguments)'
);
if (methods) {
expect(methods).to.be.an(
'object',
'Invalid argument type when creating a new MethodDictionary (it has to ' +
'be an object)'
);
for (var method in methods) {
_addMethod(this, methods[method], method);
}
}
Object.preventExtensions(this);
Object.seal(this);
}
|
javascript
|
{
"resource": ""
}
|
q2415
|
_addMethod
|
train
|
function _addMethod(methodDictionary, func, name) {
expect(func).to.be.a(
'function',
'Invalid argument "func" when adding a method called "' + name + '" in a ' +
'MethodDictionary (it has to be a function)'
);
Object.defineProperty(methodDictionary, name, {
value: func,
enumerable: true,
writable: false,
configurable: false
});
}
|
javascript
|
{
"resource": ""
}
|
q2416
|
concat
|
train
|
function concat(methodDictionary, func, name) {
expect(arguments).to.have.length(
3,
'Invalid arguments length when concatenating a MethodDictionary (it has ' +
'to be passed 3 arguments)'
);
expect(methodDictionary).to.be.instanceof(
MethodDictionary,
'Invalid argument "methodDictionary" when concatenating a ' +
'MethodDictionary (it has to be a MethodDictionary)'
);
expect(func).to.be.a(
'function',
'Invalid argument "func" when concatenating a MethodDictionary ' +
'(it has to be a function)'
);
expect(name).to.be.a(
'string',
'Invalid argument "name" when concatenating a MethodDictionary ' +
'(it has to be a string)'
);
expect(methodDictionary).to.not.have.ownProperty(
name,
'Duplicated method name "' + name + '"'
);
var currentMethods = {};
for (var currentMethod in methodDictionary) {
currentMethods[currentMethod] = methodDictionary[currentMethod];
}
currentMethods[name] = func;
return new MethodDictionary(currentMethods);
}
|
javascript
|
{
"resource": ""
}
|
q2417
|
renderCollection
|
train
|
function renderCollection(patterns, drizzleData, collectionKey) {
const layoutKey = drizzleData.options.layouts.collection;
let layoutObj;
try {
// DeepObj will throw if it fails, which is good and fine...
layoutObj = deepObj(idKeys(layoutKey), drizzleData.templates, false);
} catch (e) {
// But Make this error more friendly and specific
DrizzleError.error(
new DrizzleError(
`Could not find partial for default collection layout
'${layoutKey}'. Check 'options.layouts.collection' and/or
'options.src.templates' values to make sure they are OK`,
DrizzleError.LEVELS.ERROR
),
drizzleData.options.debug
);
}
patterns.collection.contents = applyTemplate(
layoutObj.contents,
resourceContext(patterns.collection, drizzleData),
drizzleData.options
);
return patterns;
}
|
javascript
|
{
"resource": ""
}
|
q2418
|
getDbName
|
train
|
function getDbName(uri) {
var parsedMongooseUri = mongoUriParser.parse(uri);
var dbName = parsedMongooseUri.database;
return dbName;
}
|
javascript
|
{
"resource": ""
}
|
q2419
|
train
|
function(message, signaling_key) {
if (signaling_key.byteLength != 52) {
throw new Error("Got invalid length signaling_key");
}
if (message.byteLength < 1 + 16 + 10) {
throw new Error("Got invalid length message");
}
if (message[0] != 1) {
throw new Error("Got bad version number: " + message[0]);
}
var aes_key = signaling_key.slice(0, 32);
var mac_key = signaling_key.slice(32, 32 + 20);
var iv = message.slice(1, 17);
var ciphertext = message.slice(1 + 16, message.byteLength - 10);
var ivAndCiphertext = message.slice(0, message.byteLength - 10);
var mac = message.slice(message.byteLength - 10, message.byteLength);
libsignal.crypto.verifyMAC(ivAndCiphertext, mac_key, mac, 10);
return libsignal.crypto.decrypt(aes_key, ciphertext, iv);
}
|
javascript
|
{
"resource": ""
}
|
|
q2420
|
isglob
|
train
|
function isglob(str, { strict = true } = {}) {
if (str === '') return false;
let match, rgx = strict ? STRICT : RELAXED;
while ((match = rgx.exec(str))) {
if (match[2]) return true;
let idx = match.index + match[0].length;
// if an open bracket/brace/paren is escaped,
// set the index to the next closing character
let open = match[1];
let close = open ? CHARS[open] : null;
if (open && close) {
let n = str.indexOf(close, idx);
if (n !== -1) idx = n + 1;
}
str = str.slice(idx);
}
return false;
}
|
javascript
|
{
"resource": ""
}
|
q2421
|
parent
|
train
|
function parent(str, { strict = false } = {}) {
str = path.normalize(str).replace(/\/|\\/, '/');
// special case for strings ending in enclosure containing path separator
if (/[\{\[].*[\/]*.*[\}\]]$/.test(str)) str += '/';
// preserves full path in case of trailing path separator
str += 'a';
do {str = path.dirname(str)}
while (isglob(str, {strict}) || /(^|[^\\])([\{\[]|\([^\)]+$)/.test(str));
// remove escape chars and return result
return str.replace(/\\([\*\?\|\[\]\(\)\{\}])/g, '$1');
}
|
javascript
|
{
"resource": ""
}
|
q2422
|
ClassicWritable
|
train
|
function ClassicWritable(stream, options) {
var self = this;
PassThrough.call(this, options);
classicMixins.call(this, stream, options);
stream.on('error', function(error) {
self.emit('error', error);
});
self._isClosed = false;
stream.on('close', function() {
self._isClosed = true;
});
if (!stream.end) stream.end = () => { /* do nothing */ };
this._zSuperObj.pipe.call(this, stream);
}
|
javascript
|
{
"resource": ""
}
|
q2423
|
buildSingleFilterQueryObject
|
train
|
function buildSingleFilterQueryObject(paginationFilter) {
return function(submissionQueryField) {
var fieldQuery = {
};
//formId and _id fields are ObjectIds. They must be valid in order to search for them.
if (submissionQueryField === 'formId' || submissionQueryField === '_id') {
if (mongoose.Types.ObjectId.isValid(paginationFilter)) {
//Cannot use $regex when searching for ObjectIDs
fieldQuery[submissionQueryField] = new mongoose.Types.ObjectId(paginationFilter);
} else {
//not a valid objectId, don't want to search for it.
return null;
}
} else {
fieldQuery[submissionQueryField] = {$regex: paginationFilter, $options: 'i'};
}
return fieldQuery;
};
}
|
javascript
|
{
"resource": ""
}
|
q2424
|
validateParams
|
train
|
function validateParams(dataSource, cb) {
var dataSourceValidator = validate(dataSource);
//The data source parameter should have an ID property.
dataSourceValidator.has(CONSTANTS.DATA_SOURCE_ID, function(err) {
if (err) {
return cb(buildErrorResponse({error: new Error("An ID Parameter Is Required To Update A Data Source"), code: ERROR_CODES.FH_FORMS_INVALID_PARAMETERS}));
}
//Data Source Cache Should Not Be Updated With This Function
dataSourceValidator.hasno(CONSTANTS.DATA_SOURCE_DATA, CONSTANTS.DATA_SOURCE_CACHE, function() {
if (err) {
return cb(buildErrorResponse({error: new Error("Updating A Data Source Should Not Include Cache Data"), code: ERROR_CODES.FH_FORMS_INVALID_PARAMETERS}));
}
cb(undefined, dataSource);
});
});
}
|
javascript
|
{
"resource": ""
}
|
q2425
|
findDataSource
|
train
|
function findDataSource(connections, dataSource, cb) {
var query = {
};
//Searching By ID.
query[CONSTANTS.DATA_SOURCE_ID] = dataSource[CONSTANTS.DATA_SOURCE_ID];
//Looking up a full data source document as we are updating
lookUpDataSources(connections, {
query: query,
lean: false
}, function(err, dataSources) {
if (err) {
return cb(buildErrorResponse({
error: err,
userDetail: "Unexpected Error When Searching For A Data Source",
code: ERROR_CODES.FH_FORMS_UNEXPECTED_ERROR
}));
}
//Should only be one data source
if (dataSources.length !== 1) {
return cb(buildErrorResponse({
error: new Error("Data Source Not Found"),
systemDetail: "Requested ID: " + dataSource[CONSTANTS.DATA_SOURCE_ID],
code: ERROR_CODES.FH_FORMS_NOT_FOUND
}));
}
return cb(undefined, dataSource, dataSources[0]);
});
}
|
javascript
|
{
"resource": ""
}
|
q2426
|
shiftLinesLeft
|
train
|
function shiftLinesLeft(text) {
// Determine type of linebreak
let linebreak = determineLinebreaks(text);
if (linebreak === "") return text;
let lines = [];
lines = text.split(linebreak);
// Find amount to shift lines
let commonPrefix = null;
for (let i = 0; i < lines.length; i++) {
if (!lines[i].length) continue;
let whitespace = lines[i].match(/^\s*/);
if (whitespace) whitespace = whitespace[0];
else whitespace = "";
if (commonPrefix === null || commonPrefix.startsWith(whitespace)) commonPrefix = whitespace;
}
// Shift lines and return result
text = "";
let shift = commonPrefix.length;
for (let i = 0; i < lines.length; i++) {
if (lines[i].length) {
lines[i] = lines[i].slice(shift, lines[i].length);
}
text += lines[i] + linebreak;
}
return text;
}
|
javascript
|
{
"resource": ""
}
|
q2427
|
removeLeadingLinebreaks
|
train
|
function removeLeadingLinebreaks(text) {
let linebreak = determineLinebreaks(text);
if (linebreak === "") return text;
while (text.startsWith(linebreak)) {
text = text.slice(linebreak.length);
}
return text;
}
|
javascript
|
{
"resource": ""
}
|
q2428
|
removeTrailingLinebreaks
|
train
|
function removeTrailingLinebreaks(text) {
let linebreak = determineLinebreaks(text);
if (linebreak === "") return text;
while (text.endsWith(linebreak)) {
text = text.slice(0, 0 - linebreak.length);
}
return text;
}
|
javascript
|
{
"resource": ""
}
|
q2429
|
trimLines
|
train
|
function trimLines(text) {
let linebreak = determineLinebreaks(text);
if (linebreak === "") return text;
let lines = [];
lines = text.split(linebreak);
text = "";
for (let i = 0; i < lines.length; i++) {
let line = lines[i];
while (true) {
let lastChar = line.slice(line.length-1, line.length);
let repeat = false;
switch (lastChar) {
case " ":
case "\t":
line = line.slice(0, -1)
repeat = true;
}
if (!repeat) break;
}
text += line + linebreak;
}
return text;
}
|
javascript
|
{
"resource": ""
}
|
q2430
|
promiseWrap
|
train
|
function promiseWrap(x) {
if(x.then && typeof x.then === "function") {
return x;
}
var deferred = q.defer();
deferred.resolve(x);
return deferred.promise;
}
|
javascript
|
{
"resource": ""
}
|
q2431
|
cacheSubmissionFile
|
train
|
function cacheSubmissionFile(params, fileToStreamTo, cb) {
//If the files are remote, use the mbaas client
logger.debug("cacheFiles: cacheSubmissionFile submission", {params: params, fileToStreamTo: fileToStreamTo});
if (params.options.filesAreRemote) {
return downloadFileFromMbaas(params, fileToStreamTo, cb);
}
//File is in the local database, load it there
getSubmissionFile(params.connections, params.options, {
_id: params.fileId
}, function(err, fileDetails) {
if (err) {
logger.error("cacheFiles: cacheSubmissionFile getSubmissionFile", {error: err});
return cb(err);
}
fileDetails.stream.on('error', function(err) {
logger.error("cacheFiles: cacheSubmissionFile Error Streaming File With Id: " + params.fileId);
return cb(err);
});
fileDetails.stream.on('end', function() {
logger.debug("cacheFiles: cacheSubmissionFile Stream Complete For File With Id: " + params.fileId);
return cb(undefined, fileDetails);
});
//Streaming The File
fileDetails.stream.pipe(fileToStreamTo);
fileDetails.stream.resume();
});
}
|
javascript
|
{
"resource": ""
}
|
q2432
|
mergeSubmissionFiles
|
train
|
function mergeSubmissionFiles(params, cb) {
logger.debug("cacheFiles: mergeSubmissionFiles", params);
var submission = params.submission;
//No submission, cannot continue.
if (!_.isObject(submission)) {
return cb(buildErrorResponse({
err: new Error("Expected Submission Object To Cache Files For"),
msg: "Error Exporting Submission As PDF",
httpCode: 500
}));
}
var form = submission.formSubmittedAgainst;
var fileFieldDetails = _.filter(submission.formFields, function(field) {
return isFileFieldType(field.fieldId.type);
});
fileFieldDetails = _.flatten(fileFieldDetails);
var fieldTypes = _.groupBy(fileFieldDetails, function(field) {
return field.fieldId.type;
});
//Files That Have To Be Loaded From Mbaas
var mbaasTypes = _.union(fieldTypes.photo, fieldTypes.signature);
//Only Interested In The File Ids To Download
var filesToDownload = _.map(mbaasTypes, function(field) {
return _.map(field.fieldValues, function(fieldValue) {
return fieldValue.groupId;
});
});
filesToDownload = _.flatten(filesToDownload);
logger.debug("cacheFiles: mergeSubmissionFiles filesToDownload", filesToDownload);
//Now download all of the files..
async.mapSeries(filesToDownload, function(fileIdOrUrl, cb) {
//Only want the callback to be called once.
cb = _.once(cb);
var fileUri = path.join(params.options.pdfExportDir, 'image_binary_' + fileIdOrUrl);
var localFile = fs.createWriteStream(fileUri);
//Loading The Submission File From THe Database / MbaaS
cacheSubmissionFile({
fileId: fileIdOrUrl,
connections: params.connections,
options: params.options,
submission: submission
}, localFile, function(err, fileDetails) {
if (err) {
logger.error("cacheFiles: cacheSubmissionFile", {error: err});
}
fileDetails = fileDetails || {};
fileDetails.url = "file://" + fileUri;
fileDetails.fileId = fileIdOrUrl;
logger.debug("cacheFiles: mergeSubmissionFiles fileDetails", fileDetails);
return cb(err, _.omit(fileDetails, "stream"));
});
}, function(err, cachedFiles) {
if (err) {
return cb(buildErrorResponse({
error: err,
msg: "Error Cacheing Files For Submission Export",
code: constants.ERROR_CODES.FH_FORMS_ERR_CODE_PDF_GENERATION,
httpCode: 500
}));
}
//No Errors, all files are now cached for the submission
submission.formSubmittedAgainst = populateSubmissionFileData({
form: form,
submissionFiles: cachedFiles,
downloadUrl: params.options.downloadUrl,
fileUriPath: params.options.fileUriPath,
submission: submission
});
logger.debug("cacheFiles: mergeSubmissionFiles submission", submission);
cb(undefined, submission);
});
}
|
javascript
|
{
"resource": ""
}
|
q2433
|
removeCachedFiles
|
train
|
function removeCachedFiles(params, cb) {
//Cleaning Up Any Files Cached To Render The Submission.
logger.debug("cacheFiles: mergeSubmissionFiles Removing Cached Files For PDF Generation: ", params.submissionFiles);
async.eachSeries(params.submissionFiles || [], function(submissionFileDetails, cb) {
logger.debug("Removing Cached File: ", submissionFileDetails);
fs.unlink(submissionFileDetails.url, function(err) {
if (err) {
logger.error('Error Removing File At' + submissionFileDetails.url);
}
return cb();
});
}, cb);
}
|
javascript
|
{
"resource": ""
}
|
q2434
|
walkPages
|
train
|
function walkPages(pages, drizzleData, writePromises = []) {
if (isPage(pages)) {
return writePage(
pages.id,
pages,
drizzleData.options.dest.pages,
drizzleData.options.keys.pages.plural
);
}
for (var pageKey in pages) {
writePromises = writePromises.concat(
walkPages(pages[pageKey], drizzleData, writePromises)
);
}
return writePromises;
}
|
javascript
|
{
"resource": ""
}
|
q2435
|
writePages
|
train
|
function writePages(drizzleData) {
return Promise.all(walkPages(drizzleData.pages, drizzleData)).then(
() => drizzleData,
error => DrizzleError.error(error, drizzleData.options.debug)
);
}
|
javascript
|
{
"resource": ""
}
|
q2436
|
deleteAppForms
|
train
|
function deleteAppForms(cb) {
var appId = params.appId;
async.series([
function(cb) { // first remove the form itself
var appFormsModel = models.get(connections.mongooseConnection, models.MODELNAMES.APP_FORMS);
appFormsModel.find({appId: appId}).remove().exec(cb);
},
function(cb) { // remove deleted from from any groups
groups.removeAppFromAllGroups(connections, appId, cb);
},
function deleteThemeRefrences(cb) {
var appThemeModel = models.get(connections.mongooseConnection, models.MODELNAMES.APP_THEMES);
appThemeModel.find({"appId":appId}).remove().exec(cb);
},
function deleteAppConfigReferences(cb) {
var appConfigModel = models.get(connections.mongooseConnection, models.MODELNAMES.APP_CONFIG);
appConfigModel.find({"appId":appId}).remove().exec(cb);
}
], function(err) {
if (err) {
return cb(err);
}
return cb(null);
});
}
|
javascript
|
{
"resource": ""
}
|
q2437
|
renderPDF
|
train
|
function renderPDF(params, cb) {
var template = params.template;
var session = params.session;
var form = params.form;
var submission = params.submission;
submission.formName = form.name;
var studioLocation = params.location;
var generationTimestamp = params.generationTimestamp || Date.now();
var page;
session.createPage()
.then(function(_page) {
logger.debug('page created');
page = _page;
// A4 aspect ratio
page.property('paperSize', {format: 'A4'})
.then(function() {
logger.debug('page aspect ratio set');
//Can't load css files over https. Needs to be http.
if (typeof(studioLocation) === "string") {
studioLocation = studioLocation.replace("https://", "http://");
}
form = processForm(form, submission);
var html = template({
form: form.form,
location: studioLocation,
subexport: form.sub,
js: params.js,
css: params.css
});
// inject html as we don't have a server to serve html to phantom
page.setContent(html, null)
.then(function(status) {
logger.debug('content set. status', status);
var file = path.join(params.pdfExportDir, form.sub.formSubmittedAgainst._id + '_' + form.sub._id + '_' + generationTimestamp + '.pdf');
page.render(file)
.then(function() {
logger.info('Rendered pdf:', {file: file});
page.close();
page = null;
destroyPhantomSession(session, function() {
return cb(null, file);
});
});
});
});
})
.catch(function(e1) {
logger.error('Exception rendering pdf', {exception: e1});
try {
if (page) {
page.close();
}
} catch (e2) {
// silent
logger.warn('Error closing page after phantom exception', {exception: e2});
}
return cb('Exception rendering pdf:' + e1.toString());
});
}
|
javascript
|
{
"resource": ""
}
|
q2438
|
ZDuplex
|
train
|
function ZDuplex(options) {
if(options) {
if(options.objectMode) {
options.readableObjectMode = true;
options.writableObjectMode = true;
}
if(options.readableObjectMode && options.writableObjectMode) {
options.objectMode = true;
}
//Add support for iojs simplified stream constructor
if(typeof options.read === 'function') {
this._read = options.read;
}
if(typeof options.write === 'function') {
this._write = options.write;
}
if(typeof options.flush === 'function') {
this._flush = options.flush;
}
}
Duplex.call(this, options);
// Register listeners for finish (v0.10) and prefinish (v0.12) to run _duplexPrefinish
this._duplexFinished = false;
this.once('finish', this._duplexPrefinish.bind(this));
this.once('prefinish', this._duplexPrefinish.bind(this));
// note: exclamation marks are used to convert to booleans
if(options && !options.objectMode && (!options.readableObjectMode) !== (!options.writableObjectMode)) {
this._writableState.objectMode = !!options.writableObjectMode;
this._readableState.objectMode = !!options.readableObjectMode;
}
if(options && options.readableObjectMode) {
this._readableState.highWaterMark = 16;
}
if(options && options.writableObjectMode) {
this._writableState.highWaterMark = 16;
}
streamMixins.call(this, Duplex.prototype, options);
readableMixins.call(this, options);
writableMixins.call(this, options);
}
|
javascript
|
{
"resource": ""
}
|
q2439
|
normalizePaths
|
train
|
function normalizePaths(opts) {
for (var srcKey in opts.src) {
if (!path.isAbsolute(opts.src[srcKey].glob)) {
opts.src[srcKey].glob = path.resolve(opts.src[srcKey].glob);
}
if (!path.isAbsolute(opts.src[srcKey].basedir)) {
opts.src[srcKey].basedir = path.resolve(opts.src[srcKey].basedir);
}
}
}
|
javascript
|
{
"resource": ""
}
|
q2440
|
init
|
train
|
function init(options = {}, handlebars) {
const opts = deepExtend({}, defaults, options);
normalizePaths(opts);
opts.handlebars = handlebars || Handlebars.create();
return Promise.resolve(opts);
}
|
javascript
|
{
"resource": ""
}
|
q2441
|
buildSubmissionReceivedMessage
|
train
|
function buildSubmissionReceivedMessage(subscribers, formName, formSubmission) {
var msg = {};
msg.subscribers = subscribers;
msg.formId = formSubmission.formId;
msg.appId = formSubmission.appId;
msg.attachmentUrl = getAttachmentUrl(formSubmission);
msg.formName = formName || "UNKNOWN FORM NAME";
msg.submissionStatus = formSubmission.status;
msg.appEnvironment = formSubmission.appEnvironment;
msg.submissionStarted = formSubmission.submissionStartedTimestamp;
msg.submissionCompleted = formSubmission.submissionCompletedTimestamp;
msg.submissionId = formSubmission._id;
msg.deviceIPAddress = formSubmission.deviceIPAddress;
msg.deviceId = formSubmission.deviceId;
msg.submittedFields = [];
var form = formSubmission.formSubmittedAgainst;
// build helper structures
var fieldPageMap = {};
var fieldSectionMap = {};
var sectionsInPage = {};
form.pages.forEach(function(page) {
var currentSectionId = 'initial';
sectionsInPage[page._id] = [currentSectionId];
page.fields.forEach(function(field) {
fieldPageMap[field._id] = page._id;
if (field.type === 'sectionBreak') {
currentSectionId = field._id;
sectionsInPage[page._id].push(currentSectionId);
} else {
fieldSectionMap[field._id] = currentSectionId;
}
});
});
// get structured form fields
var pages = getStructuredFields(formSubmission, fieldPageMap, fieldSectionMap);
// construct message
form.pages.forEach(function(page) {
//is this page in submission?
if (pages[page._id]) {
var sections = sectionsInPage[page._id];
sections.forEach(function(section) {
var repSections = pages[page._id][section];
if (repSections) {
if (repSections.length === 1) {
repSections[0].forEach(function(formField) {
msg.submittedFields.push(getFieldMsg(formField, formSubmission));
});
} else {
repSections.forEach(function(repSection, index) {
msg.submittedFields.push(getField(section, formSubmission).name + ' - ' + (index + 1) + ':');
repSection.forEach(function(formField) {
msg.submittedFields.push(getFieldMsg(formField, formSubmission));
});
});
}
}
});
}
});
return msg;
}
|
javascript
|
{
"resource": ""
}
|
q2442
|
EntityNotFoundError
|
train
|
function EntityNotFoundError(entity, innerError) {
/**
* The name of the entity that was not found.
* @type {?string}
*/
this.entity = entity;
/**
* The inner error that generated the current error.
* @type {?Error}
*/
this.innerError = innerError;
expect(arguments).to.have.length.below(
3,
'Invalid arguments length when creating a new EntityNotFoundError (it ' +
'has to be passed less than 3 arguments)'
);
this.name = 'EntityNotFoundError';
this.message = 'Cannot find Entity';
if (entity) {
expect(entity).to.be.a(
'string',
'Invalid argument "entity" when creating a new EntityNotFoundError ' +
'(it has to be a string)'
);
this.message += ' "' + entity + '"';
}
this.stack = (new Error(this.message)).stack;
if (innerError) {
expect(innerError).to.be.an.instanceof(
Error,
'Invalid argument "innerError" when creating a new EntityNotFoundError ' +
'(it has to be an Error)'
);
this.stack += '\n\n' + innerError.stack;
}
}
|
javascript
|
{
"resource": ""
}
|
q2443
|
AttributeTypeNotFoundError
|
train
|
function AttributeTypeNotFoundError(type, innerError) {
/**
* The attribute type that was not found.
* @type {?string}
*/
this.type = type;
/**
* The inner error that generated the current error.
* @type {?Error}
*/
this.innerError = innerError;
expect(arguments).to.have.length.below(
3,
'Invalid arguments length when creating a new ' +
'AttributeTypeNotFoundError (it has to be passed less than 3 arguments)'
);
this.name = 'AttributeTypeNotFoundError';
this.message = 'Cannot find Attribute type';
if (type) {
expect(type).to.be.a(
'string',
'Invalid argument "type" when creating a new ' +
'AttributeTypeNotFoundError (it has to be a string)'
);
this.message += ' "' + type + '"';
}
this.stack = (new Error(this.message)).stack;
if (innerError) {
expect(innerError).to.be.an.instanceof(
Error,
'Invalid argument "innerError" when creating a new ' +
'AttributeTypeNotFoundError (it has to be an Error)'
);
this.stack += '\n\n' + innerError.stack;
}
}
|
javascript
|
{
"resource": ""
}
|
q2444
|
ValidationError
|
train
|
function ValidationError(
validationMessage,
entity,
attribute,
position,
innerError
) {
/**
* The validation message to be included in the error.
* @type {?string}
*/
this.validationMessage = validationMessage;
/**
* The name of the entity that was not validated.
* @type {?string}
*/
this.entity = entity;
/**
* The name of the attribute that was not validated.
* @type {?string}
*/
this.attribute = attribute;
/**
* The position of the item in the attribute that was not validated.
* @type {?(string|number)}
*/
this.position = position;
/**
* The inner error that generated the current error.
* @type {?Error}
*/
this.innerError = innerError;
expect(arguments).to.have.length.below(
6,
'Invalid arguments length when creating a new ' +
'AttributeTypeNotFoundError (it has to be passed less than 6 arguments)'
);
this.name = 'ValidationError';
this.message = 'Error when validating an attribute';
if (attribute) {
expect(attribute).to.be.a(
'string',
'Invalid argument "attribute" when creating a new ValidationError (it ' +
'has to be a string)'
);
this.message += ' called "' + attribute + '"';
}
this.message += ' of an entity';
if (entity) {
expect(entity).to.be.a(
'string',
'Invalid argument "entity" when creating a new ValidationError (it has ' +
'to be a string)'
);
this.message += ' called "' + entity + '"';
}
if (position) {
expect(['string', 'number']).to.include(
typeof position,
'Invalid argument "position" when creating a new ValidationError (it ' +
'has to be a string or a number)'
);
this.message += ' in position ' + position;
}
if (validationMessage) {
expect(validationMessage).to.be.a(
'string',
'Invalid argument "validationMessage" when creating a new ' +
'ValidationError (it has to be a string)'
);
this.message += ': ' + validationMessage;
}
this.stack = (new Error(this.message)).stack;
if (innerError) {
expect(innerError).to.be.an.instanceof(
Error,
'Invalid argument "innerError" when creating a new ' +
'ValidationError (it has to be an Error)'
);
this.stack += '\n\n' + innerError.stack;
}
}
|
javascript
|
{
"resource": ""
}
|
q2445
|
AdapterNotFoundError
|
train
|
function AdapterNotFoundError(adapterName, innerError) {
/**
* The name of the adapter that was not found.
* @type {?string}
*/
this.adapterName = adapterName;
/**
* The inner error that generated the current error.
* @type {?Error}
*/
this.innerError = innerError;
expect(arguments).to.have.length.below(
3,
'Invalid arguments length when creating a new ' +
'EntityNotFoundError (it has to be passed less than 3 arguments)'
);
this.name = 'AdapterNotFoundError';
this.message = 'Cannot find Adapter';
if (adapterName) {
expect(adapterName).to.be.a(
'string',
'Invalid argument "adapterName" when creating a new ' +
'AdapterNotFoundError (it has to be a string)'
);
this.message += ' "' + adapterName + '"';
}
this.stack = (new Error(this.message)).stack;
if (innerError) {
expect(innerError).to.be.an.instanceof(
Error,
'Invalid argument "innerError" when creating a new ' +
'AdapterNotFoundError (it has to be an Error)'
);
this.stack += '\n\n' + innerError.stack;
}
}
|
javascript
|
{
"resource": ""
}
|
q2446
|
AssociationAttribute
|
train
|
function AssociationAttribute() {
/**
* It is a readonly property with the Entity that is associated with the
* current AssociationAttribute.
* @name
* module:back4app-entity/models/attributes/types.AssociationAttribute#Entity
* @type {!Class}
* @readonly
* @throws {module:back4app-entity/models/errors.EntityNotFoundError}
* @example
* var associationAttribute = new AssociationAttribute(
* 'associationAttribute',
* MyEntity
* );
* console.log(associationAttribute.Entity == MyEntity) // Logs "true"
*/
this.Entity = null;
var _Entity = null;
Object.defineProperty(this, 'Entity', {
get: function () {
if (typeof _Entity === 'string') {
_Entity = models.Entity.getSpecialization(_Entity);
}
return _Entity;
},
set: function () {
throw new Error(
'Entity property of an AssociationAttribute instance cannot be changed'
);
},
enumerable: true,
configurable: false
});
var argumentsArray = Array.prototype.slice.call(arguments);
expect(argumentsArray).to.have.length.within(
1,
5,
'Invalid arguments length when creating an AssociationAttribute (it has ' +
'to be passed from 1 to 5 arguments)'
);
if (argumentsArray.length === 1) {
var associationAttribute = argumentsArray[0];
expect(associationAttribute).to.be.an(
'object',
'Invalid argument type when creating an Attribute (it has to be an ' +
'object)'
);
associationAttribute = objects.copy(associationAttribute);
_Entity = associationAttribute.entity;
if (_Entity) {
delete associationAttribute.entity;
} else {
expect(associationAttribute).to.have.ownProperty(
'Entity',
'Property "entity" or "Entity" is required when creating an ' +
'AssociationAttribute'
);
_Entity = associationAttribute.Entity;
delete associationAttribute.Entity;
}
argumentsArray[0] = associationAttribute;
} else {
_Entity = argumentsArray.splice(1, 1)[0];
}
if (typeof _Entity !== 'string') {
expect(_Entity).to.be.a(
'function',
'Invalid argument "entity" when creating an AssociationAttribute (it ' +
'has to be a Class)'
);
expect(classes.isGeneral(models.Entity, _Entity)).to.equal(
true,
'Invalid argument "entity" when creating an AssociationAttribute (it ' +
'has to be a subclass of Entity)'
);
}
Attribute.apply(this, argumentsArray);
}
|
javascript
|
{
"resource": ""
}
|
q2447
|
IntersperseStream
|
train
|
function IntersperseStream(seperator, options) {
if(typeof seperator === 'object') {
options = seperator;
seperator = null;
}
options = !options ? {} : options;
Transform.call(this, options);
this._intersperseBuffer = null;
this._intersperseSeperator = (seperator === null || seperator === undefined) ? '\n' : seperator;
}
|
javascript
|
{
"resource": ""
}
|
q2448
|
parseAll
|
train
|
function parseAll(options) {
return Promise.all([
parseData(options),
parsePages(options),
parsePatterns(options),
parseTemplates(options)
]).then(
allData => parseTree(allData, options),
error => DrizzleError.error(error, options.debug)
);
}
|
javascript
|
{
"resource": ""
}
|
q2449
|
registerAccount
|
train
|
async function registerAccount(options) {
options = options || {};
const atlasClient = options.atlasClient || await AtlasClient.factory();
const name = options.name || defaultName;
const registrationId = libsignal.keyhelper.generateRegistrationId();
const password = generatePassword();
const signalingKey = generateSignalingKey();
const response = await atlasClient.fetch('/v1/provision/account', {
method: 'PUT',
json: {
signalingKey: signalingKey.toString('base64'),
supportsSms: false,
fetchesMessages: true,
registrationId,
name,
password
}
});
const addr = response.userId;
const username = `${addr}.${response.deviceId}`;
const identity = libsignal.keyhelper.generateIdentityKeyPair();
await storage.clearSessionStore();
await storage.removeOurIdentity();
await storage.removeIdentity(addr);
await storage.saveIdentity(addr, identity.pubKey);
await storage.saveOurIdentity(identity);
await storage.putState('addr', addr);
await storage.putState('serverUrl', response.serverUrl);
await storage.putState('deviceId', response.deviceId);
await storage.putState('name', name);
await storage.putState('username', username);
await storage.putState('password', password);
await storage.putState('registrationId', registrationId);
await storage.putState('signalingKey', signalingKey);
const sc = new SignalClient(username, password, response.serverUrl);
await sc.registerKeys(await sc.generateKeys());
}
|
javascript
|
{
"resource": ""
}
|
q2450
|
checkValueSubmitted
|
train
|
function checkValueSubmitted(submittedField, fieldDefinition, visible, cb) {
if (!fieldDefinition.required) {
return cb(undefined, null);
}
var valueSubmitted = submittedField && submittedField.fieldValues && (submittedField.fieldValues.length > 0);
//No value submitted is only an error if the field is visible.
//If the field value has been marked as not required, then don't fail a no-value submission
var valueRequired = requiredFieldMap[fieldDefinition._id] && requiredFieldMap[fieldDefinition._id].valueRequired;
if (!valueSubmitted && visible && valueRequired) {
return cb(undefined, "No value submitted for field " + fieldDefinition.name);
}
return cb(undefined, null);
}
|
javascript
|
{
"resource": ""
}
|
q2451
|
isSafeString
|
train
|
function isSafeString(str) {
var escape = ['&', '<', '>', '"', ''', '`'];
if (typeof str !== "string" || (escape.some(function(specialChar) {
return str.indexOf(specialChar) >= 0;
}))) {
return true;
}
}
|
javascript
|
{
"resource": ""
}
|
q2452
|
validatorDropDown
|
train
|
function validatorDropDown(fieldValue, fieldDefinition, previousFieldValues, cb) {
if (typeof(fieldValue) !== "string") {
return cb(new Error("Expected submission to be string but got " + typeof(fieldValue)));
}
fieldDefinition.fieldOptions = fieldDefinition.fieldOptions || {};
fieldDefinition.fieldOptions.definition = fieldDefinition.fieldOptions.definition || {};
//Check values exists in the field definition
if (!fieldDefinition.fieldOptions.definition.options) {
return cb(new Error("No options exist for field " + fieldDefinition.name));
}
//Finding the selected option
var found = _.find(fieldDefinition.fieldOptions.definition.options, function(dropdownOption) {
//check if fieldValue and the label need to be escaped
isSafeString(fieldValue) ? null : fieldValue = _.escape(fieldValue);
isSafeString(dropdownOption.label) ? null : dropdownOption.label = _.escape(dropdownOption.label);
return dropdownOption.label === fieldValue;
});
//Valid option, can return
if (found) {
return cb();
}
//If the option is empty and the field is required, then the blank option is being submitted
//The blank option is not valid for a required field.
if (found === "" && fieldDefinition.required && fieldDefinition.fieldOptions.definition.include_blank_option) {
return cb(new Error("The Blank Option is not valid. Please select a value."));
} else {
//Otherwise, it is an invalid option
return cb(new Error("Invalid option specified: " + fieldValue));
}
}
|
javascript
|
{
"resource": ""
}
|
q2453
|
validatorRadio
|
train
|
function validatorRadio(fieldValue, fieldDefinition, previousFieldValues, cb) {
if (typeof(fieldValue) !== "string") {
return cb(new Error("Expected submission to be string but got " + typeof(fieldValue)));
}
//Check value exists in the field definition
if (!fieldDefinition.fieldOptions.definition.options) {
return cb(new Error("No options exist for field " + fieldDefinition.name));
}
async.some(fieldDefinition.fieldOptions.definition.options, function(dropdownOption, cb) {
//check if fieldValue and the label need to be escaped
isSafeString(fieldValue) ? null : fieldValue = _.escape(fieldValue);
isSafeString(dropdownOption.label) ? null : dropdownOption.label = _.escape(dropdownOption.label);
return cb(dropdownOption.label === fieldValue);
}, function(found) {
if (!found) {
return cb(new Error("Invalid option specified: " + fieldValue));
} else {
return cb();
}
});
}
|
javascript
|
{
"resource": ""
}
|
q2454
|
validatorBarcode
|
train
|
function validatorBarcode(fieldValue, fieldDefinition, previousFieldValues, cb) {
if (typeof(fieldValue) !== "object" || fieldValue === null) {
return cb(new Error("Expected object but got " + typeof(fieldValue)));
}
if (typeof(fieldValue.text) !== "string" || fieldValue.text.length === 0) {
return cb(new Error("Expected text parameter."));
}
if (typeof(fieldValue.format) !== "string" || fieldValue.format.length === 0) {
return cb(new Error("Expected format parameter."));
}
return cb();
}
|
javascript
|
{
"resource": ""
}
|
q2455
|
convertDSCacheToFieldOptions
|
train
|
function convertDSCacheToFieldOptions(fieldType, cacheEntries) {
//Radio and Dropdown only allow the first option to be selected
//Checkboxes can have multiple options selected.
var alreadySelected = false;
return _.map(cacheEntries, function(cacheEntry, index) {
var valToReturn = {
key: cacheEntry.key || index,
label: cacheEntry.value,
checked: cacheEntry.selected && (!alreadySelected || fieldType === CONSTANTS.FORM_CONSTANTS.FIELD_TYPE_CHECKBOXES)
};
if (valToReturn.checked) {
alreadySelected = true;
}
return valToReturn;
});
}
|
javascript
|
{
"resource": ""
}
|
q2456
|
isGlob
|
train
|
function isGlob(candidate) {
if (typeof candidate === 'string' && candidate.length > 0) {
return true;
}
if (Array.isArray(candidate) && candidate.length > 0) {
return candidate.every(candidateEl => typeof candidateEl === 'string');
}
return false;
}
|
javascript
|
{
"resource": ""
}
|
q2457
|
parseField
|
train
|
function parseField(fieldKey, fieldData, options) {
let parseFn = contents => ({ contents: contents });
let contents = fieldData;
// Check to see if options.fieldParsers contains this key
if (options.fieldParsers.hasOwnProperty(fieldKey)) {
const parserKey = options.fieldParsers[fieldKey];
parseFn = options.parsers[parserKey].parseFn;
contents = typeof fieldData === 'string' ? fieldData : fieldData.contents;
}
// Check to see if there is a manually-added parser in the data
if (typeof fieldData === 'object' && fieldData.hasOwnProperty('parser')) {
if (options.parsers.hasOwnProperty(fieldData.parser)) {
parseFn = options.parsers[fieldData.parser].parseFn;
} else {
DrizzleError.error(
new DrizzleError(
`parser '${fieldData.parser}' set on field '${fieldKey}' not defined`,
DrizzleError.LEVELS.WARN
),
options.debug
);
}
contents = fieldData.contents;
if (!fieldData.hasOwnProperty('contents')) {
// TODO again
}
}
return parseFn(contents);
}
|
javascript
|
{
"resource": ""
}
|
q2458
|
readFiles
|
train
|
function readFiles(
glob,
{ parsers = {}, encoding = 'utf-8', globOpts = {} } = {}
) {
return getFiles(glob, globOpts).then(paths => {
return Promise.all(
paths.map(filepath => {
return readFile(filepath, encoding).then(fileData => {
const parser = matchParser(filepath, parsers);
fileData = parser(fileData, filepath);
if (typeof fileData === 'string') {
fileData = { contents: fileData };
}
return Object.assign(fileData, { path: filepath });
});
})
);
});
}
|
javascript
|
{
"resource": ""
}
|
q2459
|
importTheme
|
train
|
function importTheme(req, res, next) {
req.appformsResultPayload = req.appformsResultPayload || {};
var themeData = (req.appformsResultPayload.data && req.appformsResultPayload.type === constants.resultTypes.themeTemplate) ? req.appformsResultPayload.data : undefined ;
var importThemeParams = {
theme: themeData,
name: req.body.name,
description: req.body.description,
userEmail: req.user.email
};
forms.cloneTheme(_.extend(req.connectionOptions, importThemeParams), resultHandler(constants.resultTypes.themes, req, next));
}
|
javascript
|
{
"resource": ""
}
|
q2460
|
exportThemes
|
train
|
function exportThemes(req, res, next) {
var options = req.connectionOptions;
forms.exportThemes(options, resultHandler(constants.resultTypes.themes, req, next));
}
|
javascript
|
{
"resource": ""
}
|
q2461
|
importThemes
|
train
|
function importThemes(req, res, next) {
var options = req.connectionOptions;
var themesToImport = req.body || [];
if (!_.isArray(req.body)) {
return next("Expected An Array Of Themes");
}
forms.importThemes(options, themesToImport, resultHandler(constants.resultTypes.themes, req, next));
}
|
javascript
|
{
"resource": ""
}
|
q2462
|
getDepInfo
|
train
|
function getDepInfo (dep, currFile, customDepResolve) {
let depResolve
if (!customDepResolve) {
depResolve = defaultDepResolve
} else {
depResolve = (dep, currFile) => {
// parse defaultResolve as third param
return customDepResolve(dep, currFile, defaultDepResolve)
}
}
let currType = getParseType(currFile)
let type = getParseType(dep) || currType
let info = {
parent: currFile, // parent file path
type: type, // current file type (js/css)
raw: dep, // raw dependency name (require('./xxx') => './xxx')
name: null, // formated dependency name ('~@alife/xxx' => '@alife/xxx')
module: null, // module name (only external module)
file: null // resolved file name (only relative file)
}
info.name = depResolve(dep, currFile)
if (!info.name.startsWith('.')) {
if (info.name.startsWith('@')) {
info.module = info.name.split('/', 2).join('/')
} else {
info.module = info.name.split('/', 1)[0]
}
} else {
info.file = fileResolve(info.name, currFile)
if (!info.file) {
throw new ResolveError(info.name, currFile)
}
}
return info
}
|
javascript
|
{
"resource": ""
}
|
q2463
|
analyzeFile
|
train
|
function analyzeFile (file, content, filter, depResolve, depth, result) {
depth = typeof depth === 'number' ? depth : Infinity
result = result || {}
if (depth < 1) {
return
}
if (file in result) {
return
}
debug('analyze file: file = %s, depth = %s', file, depth)
let deps = parseFile(file, content) || []
let item = result[file] = {
deps: [],
relatives: [],
modules: []
}
// filter deps
if (filter) {
deps = deps.filter(dep => filter(dep, file))
}
// convert
deps.forEach(dep => {
let info = getDepInfo(dep, file, depResolve)
item.deps.push(info)
if (info.module && item.modules.indexOf(info.module) < 0) {
item.modules.push(info.module)
} else if (info.file && item.relatives.indexOf(info.file) < 0) {
item.relatives.push(info.file)
// deep first traversing
analyzeFile(info.file, fs.readFileSync(info.file, 'utf8'), filter, depResolve, depth - 1, result)
}
})
return result
}
|
javascript
|
{
"resource": ""
}
|
q2464
|
analyze
|
train
|
function analyze (entry, options) {
options = options || {}
let result = {}
// support array
if (!Array.isArray(entry)) {
entry = [entry]
}
entry.forEach(file => {
let content
// normalize file
if (typeof file === 'string') {
content = fs.readFileSync(file, 'utf8')
} else {
content = file.content
file = file.file
}
let filter = options.filter
let depResolve = options.depResolve
let depth = options.depth
analyzeFile(file, content, filter, depResolve, depth, result)
})
return result
}
|
javascript
|
{
"resource": ""
}
|
q2465
|
getParseType
|
train
|
function getParseType (file) {
let ext = path.extname(file)
for (let type in FILE_TYPE_MAP) {
let exts = FILE_TYPE_MAP[type]
if (exts.indexOf(ext) >= 0) {
return type
}
}
}
|
javascript
|
{
"resource": ""
}
|
q2466
|
parseData
|
train
|
function parseData(options) {
return readFileTree(options.src.data, options.keys.data, options);
}
|
javascript
|
{
"resource": ""
}
|
q2467
|
train
|
function(firstName, lang) {
if (lang && male[firstName] && male[firstName][lang] && female[firstName] && female[firstName][lang]) {
return 'unisex';
} else if (lang && male[firstName] && male[firstName][lang]) {
return 'male';
} else if (lang && female[firstName] && female[firstName][lang]) {
return 'female';
} else if (male[firstName] && female[firstName]) {
return 'unisex';
} else if (male[firstName]) {
return 'male';
} else if (female[firstName]) {
return 'female';
}
return 'unknown';
}
|
javascript
|
{
"resource": ""
}
|
|
q2468
|
prepareData
|
train
|
function prepareData(items, with_timestamps, with_ns) {
var data = {
request: 'sender data',
data: items
};
if (with_timestamps) {
var ts = Date.now() / 1000;
data.clock = ts | 0;
if (with_ns) {
data.ns = (ts % 1) * 1000 * 1000000 | 0;
}
}
var payload = new Buffer(JSON.stringify(data), 'utf8'),
header = new Buffer(5 + 4); // ZBXD\1 + packed payload.length
header.write('ZBXD\x01');
header.writeInt32LE(payload.length, 5);
return Buffer.concat([header, new Buffer('\x00\x00\x00\x00'), payload]);
}
|
javascript
|
{
"resource": ""
}
|
q2469
|
train
|
function(module, memory) {
// @str
module.str = function(address) {
let view = memory.F32.subarray(address >> 2, (address >> 2) + 16);
let out = "";
for (let ii = 0; ii < 16; ++ii) {
if (ii + 1 < 16) out += view[ii] + ", ";
else out += view[ii];
}
return "mat4(" + out + ")";
};
// @view
module.view = function(address) {
let view = memory.F32.subarray(address >> 2, (address >> 2) + 16);
//view.address = address;
return view;
};
// @exactEquals
let _exactEquals = module.exactEquals;
module.exactEquals = function(a, b) {
return !!_exactEquals(a, b);
};
// @equals
let _equals = module.equals;
module.equals = function(a, b) {
return !!_equals(a, b);
};
}
|
javascript
|
{
"resource": ""
}
|
|
q2470
|
isSqlQuery
|
train
|
function isSqlQuery(literal) {
if (!literal) {
return false;
}
try {
parser.parse(literal);
} catch (error) {
return false;
}
return true;
}
|
javascript
|
{
"resource": ""
}
|
q2471
|
validate
|
train
|
function validate(node, context) {
if (!node) {
return;
}
if (node.type === 'TaggedTemplateExpression' && node.tag.name !== 'sql') {
node = node.quasi;
}
if (node.type === 'TemplateLiteral' && node.expressions.length) {
const literal = node.quasis.map(quasi => quasi.value.raw).join('x');
if (isSqlQuery(literal)) {
context.report(node, 'Use the `sql` tagged template literal for raw queries');
}
}
}
|
javascript
|
{
"resource": ""
}
|
q2472
|
yep
|
train
|
function yep(data) {
var err;
if (data instanceof Error) err = data;
if (!err && data) backup = collection.content = data;
else collection.content = backup;
self.logger.debug(
'Received a'+ (err ? 'n error' : ' processed')
+ ' response from the '+ layer.id +' plugin '
+ (collection.length ? 'for a '+ collection.extension +' file' : '')
);
if (err) err.stack.split('\n').forEach(function print(line) {
self.logger.error(line);
});
// Clean up before we continue
layer.destroy();
capture.dispose();
processed = collection;
done(err, processed);
}
|
javascript
|
{
"resource": ""
}
|
q2473
|
find
|
train
|
function find(deleted, file) {
var meta = bundle[file].meta
, match = files.some(function some (file) {
return file === meta.location || ~meta.location.indexOf(file)
|| (meta.compiler && ~meta.compiler.imported.join(',').indexOf(file));
});
if (!match) return;
// If we check previous for deleted files, don't read content.
if (!deleted) {
self.hold(meta.location); // Temporary freeze of the event loop.
self.package.bundle[file].meta.content = fs.readFileSync(meta.location, 'utf8');
}
// Add the file extension to the extensions list so we can create
// a dedicated rebuild.
extensions.push(meta.output);
changes.push(file);
}
|
javascript
|
{
"resource": ""
}
|
q2474
|
insert
|
train
|
function insert(match, commenttype, statement, file) {
var location = path.resolve(reference, file)
, data = '';
//
// If it's not an absolute path, try to require.resolve the file.
//
if (!fs.existsSync(location)) try {
location = require.resolve(file);
} catch (e) { }
if (!fs.existsSync(location)) {
return self.critical(
'// [square] @%s statement %s in %s does not exist'
, statement
, file.red
, reference.red
);
}
if (~seen.indexOf(location)) {
return self.critical('recursive [square] import statement detected %s', match);
}
// We processed the file, mark it as seen to protect us against recursive
// includes.
seen.push(location);
data += self.commentWrap('[square] Directive: ' + location, extension);
data += fs.readFileSync(location, 'utf8').trim();
// Pass the contents back in to the directive again so we can also process
// the directives inside the directive.
return self.directive(data, extension, path.dirname(location), seen);
}
|
javascript
|
{
"resource": ""
}
|
q2475
|
prev
|
train
|
function prev(index, lines) {
while (index--) {
var line = lines[index].trim();
if (line && !/^(\/\/|\/\*)/.test(line)) {
return { line: line, index: index };
}
}
return {};
}
|
javascript
|
{
"resource": ""
}
|
q2476
|
writer
|
train
|
function writer(err, collections) {
if (err) return done(err);
// @TODO make sure that self.write only accepts 2 arguments and reads the
// type from the supplied collection object.
self.logger.debug('Writing files');
async.map(collections, self.write.bind(self), done);
}
|
javascript
|
{
"resource": ""
}
|
q2477
|
done
|
train
|
function done(err, files) {
// On processing errors notify the watcher (if listening) that square is idle.
if (err && self.writable) {
self.logger.error(err);
return fn(err);
}
// Stop our cache, it will be activated again when we need it
self.cache.stop();
// Merge the results array to a key=>value object
files = (files || []).reduce(function reduceFiles(memo, collection) {
memo[collection.basename] = collection;
return memo;
}, {});
// Building is done, call callback if available.
self.logger.info('Successfully generated %s', Object.keys(files).join(', ').green);
if (err && !args.function) self.critical(err);
if (args.function) args.function.call(self, err, files, extensions);
}
|
javascript
|
{
"resource": ""
}
|
q2478
|
getObjectByKey
|
train
|
function getObjectByKey(data, prop) {
if (!prop || !~prop.indexOf('.')) return data[prop];
var result = prop
, structure = data;
for (var paths = prop.split('.'), i = 0, length = paths.length; i < length; i++) {
result = structure[+paths[i] || paths[i]];
structure = result;
}
return result || data[prop];
}
|
javascript
|
{
"resource": ""
}
|
q2479
|
finished
|
train
|
function finished(err) {
if (err) self.critical('Failed to store %s. %s.', base, err.message);
if (fn) fn(err, collection);
}
|
javascript
|
{
"resource": ""
}
|
q2480
|
spinner
|
train
|
function spinner(silent) {
var interval = 100
, frames = spinner.frames
, len = frames.length
, i = 0;
if (silent) return;
spinner.interval = setInterval(function tick() {
process.stdout.write(
'\r'
+ frames[i++ % len]
+ 'Waiting for file changes'.white
);
}, interval);
}
|
javascript
|
{
"resource": ""
}
|
q2481
|
full
|
train
|
function full(source) {
for (var i = 0, item; i < source.length; i++) {
item = source.splice(i, 1)[0];
seen.push(item);
if (source.length === 0) permutations.push(seen.slice());
full(source);
source.splice(i, 0, item);
seen.pop();
}
return permutations;
}
|
javascript
|
{
"resource": ""
}
|
q2482
|
fast
|
train
|
function fast(source) {
var i = source.length;
while (i--) {
combinations.push(source.slice(0, i + 1));
if (i === 1) fast(source.slice(i));
}
return combinations;
}
|
javascript
|
{
"resource": ""
}
|
q2483
|
Watcher
|
train
|
function Watcher(square, port, silent) {
var self = this;
this.square = square;
this.silent = silent || false;
this.socket = this.live.call(this, port);
this.config = path.resolve(process.env.PWD, this.square.package.location);
// Initialize the live reload, also trigger watching and process the file list.
this.init = function init() {
self.watch.apply(self, arguments[1]);
};
// Require fs.notify and findit, trigger the watch.
async.parallel([canihaz['fs.notify'], canihaz.findit], this.init);
}
|
javascript
|
{
"resource": ""
}
|
q2484
|
getPath
|
train
|
function getPath(path, callback) {
fs.lstat(path, function(err, stats) {
if(err) return callback(err);
// Check if it's a link
if(stats.isSymbolicLink()) fs.readlink(path, callback);
callback(err, path);
});
}
|
javascript
|
{
"resource": ""
}
|
q2485
|
filter
|
train
|
function filter(location) {
var file = path.basename(location)
, vim = file.charAt(file.length - 1) === '~'
, extension = path.extname(location).slice(1);
// filter out the duplicates
if (~changes.indexOf(location) || vim) return;
changes.push(location);
process.nextTick(limited);
}
|
javascript
|
{
"resource": ""
}
|
q2486
|
done
|
train
|
function done(err, content) {
if (err) return cb(err);
var code = JSON.parse(self.square.package.source)
, current = bundle.version || self.version(bundle.meta.content)
, source;
code.bundle[key].version = version;
bundle.version = version;
bundle.content = content;
// now that we have updated the shizzle, we can write a new file
// also update the old source with the new version
source = JSON.stringify(code, null, 2);
self.square.package.source = source;
try {
async.parallel([
async.apply(fs.writeFile, self.square.package.location, source)
, async.apply(fs.writeFile, bundle.meta.location, content)
], function (err, results) {
self.square.logger.notice(
'sucessfully updated %s from version %s to %s'
, key
, current.grey
, version.green
);
cb(err);
});
} catch (e) { err = e; }
}
|
javascript
|
{
"resource": ""
}
|
q2487
|
compile
|
train
|
function compile (extension, content, options, fn) {
// allow optional options argument
if (_.isFunction(options)) {
fn = options;
options = {};
}
var config = _.clone(compile.configuration)
, args = flags.slice(0)
, buffer = ''
, errors = ''
, compressor;
if (compile.configuration.type) {
config.type = extension;
}
// generate the --key value options, both the key and the value should added
// seperately to the `args` array or the child_process will chocke.
Object.keys(config).filter(function filter (option) {
return config[option];
}).forEach(function format (option) {
var bool = _.isBoolean(config[option]);
if (!bool || config[option]) {
args.push('--' + option);
if (!bool) args.push(config[option]);
}
});
// apply the configuration
_.extend(config, options);
// spawn the shit and set the correct encoding
compressor = spawn(type, args);
compressor.stdout.setEncoding('utf8');
compressor.stderr.setEncoding('utf8');
/**
* Buffer up the results so we can concat them once the compression is
* finished.
*
* @param {Buffer} chunk
* @api private
*/
compressor.stdout.on('data', function data (chunk) {
buffer += chunk;
});
compressor.stderr.on('data', function data (err) {
errors += err;
});
/**
* The compressor has finished can we now process the data and see if it was
* a success.
*
* @param {Number} code
* @api private
*/
compressor.on('close', function close (code) {
// invalid states
if (errors.length) return fn(new Error(errors));
if (code !== 0) return fn(new Error('process exited with code ' + code));
if (!buffer.length) return fn(new Error('no data returned ' + type + args));
// correctly processed the data
fn(null, buffer);
});
// write out the content that needs to be minified
compressor.stdin.end(content);
}
|
javascript
|
{
"resource": ""
}
|
q2488
|
message
|
train
|
function message(worker, task) {
var callback = worker.queue[task.id]
, err;
// Rebuild the Error object so we can pass it to our callbacks
if (task.err) {
err = new Error(task.err.message);
err.stack = task.err.stack;
}
// Kill the whole fucking system, we are in a fucked up state and should die
// badly, so just throw something and have the process.uncaughtException
// handle it.
if (!callback) {
if (err) console.error(err);
console.error(task);
throw new Error('Unable to process message from worker, can\'t locate the callback!');
}
callback(err, task);
delete worker.queue[task.id];
}
|
javascript
|
{
"resource": ""
}
|
q2489
|
Plugin
|
train
|
function Plugin(square, collection) {
if (!(this instanceof Plugin)) return new Plugin(square, collection);
if (!square) throw new Error('Missing square instance');
if (!collection) throw new Error('Missing collection');
var self = this;
this.square = square; // Reference to the current square instance.
this.async = async; // Handle async operation.
this._ = _; // Utilities.
this.logger = {}; // Our logging utility.
this.collection = collection; // Reference to the original collection.
// Provide a default namespace to the logging method, we are going to prefix
// it with the plugin's name which will help with the debug ability of this
// module.
Object.keys(square.logger.levels).forEach(function generate(level) {
self.logger[level] = square.logger[level].bind(
square.logger
, '[plugin::'+ self.id +']'
);
});
// Merge the given collection with the plugin, but don't override the default
// values.
Object.keys(collection).forEach(function each(key) {
self[key] = collection[key];
});
// Force an async nature of the plugin interface, this also allows us to
// attach or listen to methods after we have constructed the plugin.
process.nextTick(this.configure.bind(this));
}
|
javascript
|
{
"resource": ""
}
|
q2490
|
configure
|
train
|
function configure() {
var pkg = this.square.package
, configuration = pkg.configuration
, type = this.type || Plugin.modifier
, self = this
, load = [];
// Check for the distribution and if it should accept the given extension,
// extend self with the context of the plugin.
if (!~type.indexOf('once') && (!this.distributable() || !this.accepted())) {
this.logger.debug(
'disregarding this plugin for extension: '+ this.extension
+', distribution: '+ this.distribution
);
return this.emit('disregard');
}
// Check if there are any configuration options in the package.
if (_.isObject(pkg.plugins) && this.id in pkg.plugins) {
this.merge(this, pkg.plugins[this.id]);
}
// Merge in the plugin configuration.
if (
configuration
&& _.isObject(configuration.plugins)
&& this.id in configuration.plugins
) {
this.merge(this, configuration.plugins[this.id]);
}
// Check if the bundle it self also had specific configurations for this
// plugin.
if (this.id in this && _.isObject(this[this.id])) {
this.merge(this, this[this.id]);
}
// Ensure that our requires is an array, before we continue
if (!Array.isArray(this.requires)) this.requires = [this.requires];
// Check if we need to lazy load any dependencies
if (this.requires && this.requires.length) {
load = this.requires.map(function (file) {
if (typeof file !== 'object') return file;
if (!('extension' in file)) return file.name || file;
if (file.extension === self.extension) return file.name || file;
return undefined;
}).filter(Boolean); // Only get existing files
// Only fetch shizzle when we actually have shizzle to fetch here
if (load.length) return canihaz.apply(canihaz, load.concat(function canihaz(err) {
if (err) return self.emit('error', err);
// Add all the libraries to the context, the `canihaz#all` returns an
// error first, and then all libraries it installed or required in the
// order as given to it, which is in our case the `this.requires` order.
Array.prototype.slice.call(arguments, 1).forEach(function (lib, index) {
self[load[index]] = lib;
});
// We are now fully initialized.
if (self.initialize) self.initialize();
}));
}
// We are now fully initialized.
if (self.initialize) self.initialize();
}
|
javascript
|
{
"resource": ""
}
|
q2491
|
parser
|
train
|
function parser (content, options, fn) {
var jshintrc = path.join(process.env.HOME || process.env.USERPROFILE, '.jshintrc')
, jshintninja = configurator(jshintrc)
, config = options.jshint;
// extend all the things
config = _.extend(config, jshintninja);
canihaz.jshint(function lazyload (err, jshint) {
if (err) return fn(err);
var validates = jshint.JSHINT(content, config)
, errors;
if (!validates) errors = formatters.js(jshint.JSHINT.errors);
fn(null, errors);
});
}
|
javascript
|
{
"resource": ""
}
|
q2492
|
formatter
|
train
|
function formatter (fail) {
return fail.map(function oops (err) {
return {
line: err.line
, column: err.character
, message: err.reason
, ref: err
};
});
}
|
javascript
|
{
"resource": ""
}
|
q2493
|
train
|
function (file, errors, options) {
var reports = []
, content = file.content.split('\n');
errors.forEach(function error (err) {
// some linters don't return the location -_-
if (!err.line) return reports.push(err.message.grey, '');
var start = err.line > 3 ? err.line - 3 : 0
, stop = err.line + 2
, range = content.slice(start, stop)
, numbers = _.range(start + 1, stop + 1)
, len = stop.toString().length;
reports.push('Lint error: ' + err.line + ' col ' + err.column);
range.map(function reformat (line) {
var lineno = numbers.shift()
, offender = lineno === err.line
, inline = /\'[^\']+?\'/
, slice;
// this is the actual line with the error, so we should start finding
// what the error is and how we could highlight it in the output
if (offender) {
if (line.length < err.column) {
// we are missing something at the end of the line.. so add a red
// square
line += ' '.inverse.red;
} else {
// we have a direct match on a statement
if (inline.test(err.message)) {
slice = err.message.match(inline)[0].replace(/\'/g, '');
} else {
// it's happening in the center of things, so we can start
// coloring inside the shizzle
slice = line.slice(err.column - 1);
}
line = line.replace(slice, slice.inverse.red);
}
}
reports.push(' ' + pad(lineno, len) + ' | ' + line);
});
reports.push('');
reports.push(err.message.grey);
reports.push('');
});
// output the shizzle
reports.forEach(function output (line) {
this.logger.error(line);
}.bind(this));
}
|
javascript
|
{
"resource": ""
}
|
|
q2494
|
configurator
|
train
|
function configurator (location) {
return !(location && fs.existsSync(location))
? {}
: JSON.parse(
fs.readFileSync(location, 'UTF-8')
.replace(/\/\*[\s\S]*(?:\*\/)/g, '') // removes /* comments */
.replace(/\/\/[^\n\r]*/g, '') // removes // comments
);
}
|
javascript
|
{
"resource": ""
}
|
q2495
|
createBoundary
|
train
|
function createBoundary(data) {
while (true) {
var boundary = `----IPFSMini${Math.random() * 100000}.${Math.random() * 100000}`;
if (data.indexOf(boundary) === -1) {
return boundary;
}
}
}
|
javascript
|
{
"resource": ""
}
|
q2496
|
returnFile
|
train
|
function returnFile(cb) {
fs.readFile(fileToLoad, function (err, fileBuf) {
if (err || ! fileBuf) {
createFile(function (err) {
if (err) return cb(err);
returnFile(cb);
});
return;
}
cb(null, fileBuf, fileToLoad);
});
}
|
javascript
|
{
"resource": ""
}
|
q2497
|
createOverlay
|
train
|
function createOverlay(config) {
var id = MODAL_OVERLAY_ID;
var overlayText = config.text || MODAL_OVERLAY_TXT;
var overlayClass = config.prefixClass + MODAL_OVERLAY_CLASS_SUFFIX;
var overlayBackgroundEnabled = config.backgroundEnabled === 'disabled' ? 'disabled' : 'enabled';
return '<span\n id="' + id + '"\n class="' + overlayClass + '"\n ' + MODAL_OVERLAY_BG_ENABLED_ATTR + '="' + overlayBackgroundEnabled + '"\n title="' + overlayText + '"\n >\n <span class="' + VISUALLY_HIDDEN_CLASS + '">' + overlayText + '</span>\n </span>';
}
|
javascript
|
{
"resource": ""
}
|
q2498
|
createModal
|
train
|
function createModal(config) {
var id = MODAL_JS_ID;
var modalClassName = config.modalPrefixClass + MODAL_CLASS_SUFFIX;
var modalClassWrapper = config.modalPrefixClass + MODAL_WRAPPER_CLASS_SUFFIX;
var buttonCloseClassName = config.modalPrefixClass + MODAL_BUTTON_CLASS_SUFFIX;
var buttonCloseInner = config.modalCloseImgPath ? '<img src="' + config.modalCloseImgPath + '" alt="' + config.modalCloseText + '" class="' + config.modalPrefixClass + MODAL_CLOSE_IMG_CLASS_SUFFIX + '" />' : '<span class="' + config.modalPrefixClass + MODAL_CLOSE_TEXT_CLASS_SUFFIX + '">\n ' + config.modalCloseText + '\n </span>';
var contentClassName = config.modalPrefixClass + MODAL_CONTENT_CLASS_SUFFIX;
var titleClassName = config.modalPrefixClass + MODAL_TITLE_CLASS_SUFFIX;
var title = config.modalTitle !== '' ? '<h1 id="' + MODAL_TITLE_ID + '" class="' + titleClassName + '">\n ' + config.modalTitle + '\n </h1>' : '';
var button_close = '<button type="button" class="' + MODAL_BUTTON_JS_CLASS + ' ' + buttonCloseClassName + '" id="' + MODAL_BUTTON_JS_ID + '" title="' + config.modalCloseTitle + '" ' + MODAL_BUTTON_CONTENT_BACK_ID + '="' + config.modalContentId + '" ' + MODAL_BUTTON_FOCUS_BACK_ID + '="' + config.modalFocusBackId + '">\n ' + buttonCloseInner + '\n </button>';
var content = config.modalText;
var describedById = config.modalDescribedById !== '' ? ATTR_DESCRIBEDBY + '="' + config.modalDescribedById + '"' : '';
// If there is no content but an id we try to fetch content id
if (content === '' && config.modalContentId) {
var contentFromId = findById(config.modalContentId);
if (contentFromId) {
content = '<div id="' + MODAL_CONTENT_JS_ID + '">\n ' + contentFromId.innerHTML + '\n </div';
// we remove content from its source to avoid id duplicates, etc.
contentFromId.innerHTML = '';
}
}
return '<dialog id="' + id + '" class="' + modalClassName + '" ' + ATTR_ROLE + '="' + MODAL_ROLE + '" ' + describedById + ' ' + ATTR_OPEN + ' ' + ATTR_LABELLEDBY + '="' + MODAL_TITLE_ID + '">\n <div role="document" class="' + modalClassWrapper + '">\n ' + button_close + '\n <div class="' + contentClassName + '">\n ' + title + '\n ' + content + '\n </div>\n </div>\n </dialog>';
}
|
javascript
|
{
"resource": ""
}
|
q2499
|
$listModals
|
train
|
function $listModals() {
var node = arguments.length <= 0 || arguments[0] === undefined ? doc : arguments[0];
return [].slice.call(node.querySelectorAll('.' + MODAL_JS_CLASS));
}
|
javascript
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.