_id
stringlengths 2
6
| title
stringlengths 0
58
| partition
stringclasses 3
values | text
stringlengths 52
373k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q2200
|
getDataName
|
train
|
function getDataName(adapterName) {
expect(arguments).to.have.length.below(
2,
'Invalid arguments length when getting the data name of an Attribute ' +
'(it has to be passed less than 2 arguments)');
if (adapterName) {
expect(adapterName).to.be.a(
'string',
'Invalid argument "adapterName" when getting the data name of an ' +
'Attribute (it has to be a string)'
);
if (
this.dataName &&
typeof this.dataName === 'object' &&
this.dataName.hasOwnProperty(adapterName)
) {
return this.dataName[adapterName];
}
}
if (this.dataName && typeof this.dataName === 'string') {
return this.dataName;
} else {
return this.name;
}
}
|
javascript
|
{
"resource": ""
}
|
q2201
|
generateCSVSingleValue
|
train
|
function generateCSVSingleValue(field, val, downloadUrl, submissionId) {
var line = '';
var fieldValue = val;
if (!(typeof (fieldValue) === 'undefined' || fieldValue === null)) {
//Value is something, add the value
if (field.type === 'checkboxes') {
fieldValue = val.selections;
} else if (fieldTypeUtils.isFileType(field.type)) {
//File types have two fields, a name and url to be added
if (val.fileName) {
fieldValue = val.fileName;
} else {
fieldValue = '<not uploaded>';
}
} else if (fieldTypeUtils.isBarcodeType(field.type)) {
if (val.format) {
fieldValue = val.format;
} else {
fieldValue = "<not set>";
}
}
line += csvStr(fieldValue);
line += ',';
//If it is a file type, then the url should also be added
if (fieldTypeUtils.isFileType(field.type)) {
if (val.groupId) {
fieldValue = downloadUrl.replace(":id", submissionId).replace(":fileId", val
.groupId);
} else {
fieldValue = '<not uploaded>';
}
line += csvStr(fieldValue);
line += ',';
} else if (fieldTypeUtils.isBarcodeType(field.type)) {
if (val.text) {
fieldValue = val.text;
} else {
fieldValue = "<not set>";
}
line += csvStr(fieldValue);
line += ',';
}
} else {
//No value, spacers have to be added.
//For file type, the file name and url are included. Therefore blank values have to be spaced twice.
if (fieldTypeUtils.isFileType(field.type) || fieldTypeUtils.isBarcodeType(
field.type)) {
line += ',,';
} else {
line += ',';
}
}
return line;
}
|
javascript
|
{
"resource": ""
}
|
q2202
|
generateCSVFieldValues
|
train
|
function generateCSVFieldValues(baseField, ff, downloadUrl, sub) {
var line = '';
var fieldValues = [];
if (ff) {
fieldValues = misc.filterOutNullData(ff.fieldValues);
}
if (baseField && baseField.repeating) {
for (var j = 0; j < baseField.fieldOptions.definition.maxRepeat; j++) {
line += generateCSVSingleValue(baseField, fieldValues[j], downloadUrl, sub._id);
}
} else {
line += generateCSVSingleValue(baseField, fieldValues[0], downloadUrl, sub._id);
}
return line;
}
|
javascript
|
{
"resource": ""
}
|
q2203
|
checkWorkingDir
|
train
|
function checkWorkingDir(workingDir, cb) {
fs.stat(workingDir, function(err, stats) {
var errMessage;
if (err) {
errMessage = "The directory " + workingDir + " does not exist.";
logger.error(errMessage);
return cb(errMessage);
}
//Checking that it is a directory
if (!stats.isDirectory()) {
errMessage = "Expected " + workingDir + " to be a directory";
logger.error(errMessage);
return cb(errMessage);
}
return cb();
});
}
|
javascript
|
{
"resource": ""
}
|
q2204
|
checkZipFile
|
train
|
function checkZipFile(zipFilePath, cb) {
//Checking that it is a ZIP file
mimeInspector.detectFile(zipFilePath, function(err, fileMimetype) {
var errMessage;
if (err) {
logger.error("Error detecting ZIP file", err);
return cb(err);
}
if (fileMimetype !== 'application/zip') {
errMessage = "Expected the file MIME type to be application/zip but was " + fileMimetype;
logger.error(errMessage);
}
return cb(errMessage);
});
}
|
javascript
|
{
"resource": ""
}
|
q2205
|
importForms
|
train
|
function importForms(connections, params, callback) {
params = params || {};
logger.debug("Importing Forms ", params);
//Validating
var paramsValidator = validate(params);
var failed = paramsValidator.has(ZIP_FILE_PATH, WORKING_DIR);
if (failed) {
return callback("Validation Failed " + (failed[ZIP_FILE_PATH] || failed[WORKING_DIR]));
}
//Random directory name.
var newDirectoryName = (new mongoose.Types.ObjectId()).toString();
var unzipDirectoryPath = path.join(params.workingDir, "/", newDirectoryName);
async.waterfall([
function checkFiles(cb) {
async.parallel([
async.apply(checkWorkingDir, params.workingDir),
async.apply(checkZipFile, params.zipFilePath)
], function(err) {
//Not interested in passing any of the results from the aync.parallel to the waterfall callback
cb(err);
});
},
function createUniqueDirToUnzipTo(cb) {
//Need to create a new directory
mkdirp(unzipDirectoryPath, function(err) {
return cb(err);
});
},
async.apply(unzipFile, {
zipFilePath: params.zipFilePath,
workingDir: unzipDirectoryPath,
queueConcurrency: 5
}),
function validateInput(cb) {
inputValidator(unzipDirectoryPath, true, cb);
},
async.apply(importFromDir, connections, unzipDirectoryPath)
], function(err, importedForms) {
if (err) {
logger.error("Error Importing Forms ", err);
}
//we always need to cleanup
cleanupFiles(unzipDirectoryPath, params.zipFilePath);
return callback(err, importedForms);
});
}
|
javascript
|
{
"resource": ""
}
|
q2206
|
makeExportDirectory
|
train
|
function makeExportDirectory(params, callback) {
var newDirPath = params.workingDir + "/" + params.entry.fileName;
mkdirp(newDirPath, function(err) {
if (err) {
logger.debug("Error making directory " + newDirPath, err);
return callback(err);
}
params.zipfile.readEntry();
return callback(err);
});
}
|
javascript
|
{
"resource": ""
}
|
q2207
|
streamFileEntry
|
train
|
function streamFileEntry(params, callback) {
params.zipfile.openReadStream(params.entry, function(err, readStream) {
if (err) {
return callback(err);
}
// ensure parent directory exists
var newFilePath = params.workingDir + "/" + params.entry.fileName;
mkdirp(path.dirname(newFilePath), function(err) {
if (err) {
logger.debug("Error making directory " + newFilePath, err);
return callback(err);
}
readStream.pipe(fs.createWriteStream(newFilePath));
readStream.on("end", function() {
params.zipfile.readEntry();
callback();
});
readStream.on('error', function(err) {
callback(err);
});
});
});
}
|
javascript
|
{
"resource": ""
}
|
q2208
|
unzipWorker
|
train
|
function unzipWorker(unzipTask, workerCb) {
var zipfile = unzipTask.zipfile;
var entry = unzipTask.entry;
var workingDir = unzipTask.workingDir;
if (/\/$/.test(entry.fileName)) {
// directory file names end with '/'
makeExportDirectory({
entry: entry,
zipfile: zipfile,
workingDir: workingDir
}, workerCb);
} else {
// file entry
streamFileEntry({
zipfile: zipfile,
entry: entry,
workingDir: workingDir
}, workerCb);
}
}
|
javascript
|
{
"resource": ""
}
|
q2209
|
unzipToWorkingDir
|
train
|
function unzipToWorkingDir(params, callback) {
var unzipError;
var queue = async.queue(unzipWorker, params.queueConcurrency || 5);
//Pushing a single file unzip to the queue.
function getQueueEntry(zipfile) {
return function queueEntry(entry) {
queue.push({
zipfile: zipfile,
workingDir: params.workingDir,
entry: entry
}, function(err) {
if (err) {
logger.debug("Error unzipping file params.zipFilePath", err);
//If one of the files has failed to unzip correctly. No point in continuing to unzip. Close the zip file.
zipfile.close();
}
});
};
}
unzip.open(params.zipFilePath, {lazyEntries: true}, function(err, zipfile) {
if (err) {
return callback(err);
}
zipfile.on("entry", getQueueEntry(zipfile));
zipfile.readEntry();
zipfile.on('error', function(err) {
logger.error("Error unzipping Zip File " + params.zipFilePath, err);
unzipError = err;
});
zipfile.on('close', function() {
//When the queue is empty and the zip file has finisihed scanning files, then the unzip is finished
logger.debug("Zip File " + params.zipFilePath + " Unzipped");
callback(unzipError);
});
});
}
|
javascript
|
{
"resource": ""
}
|
q2210
|
getQueueEntry
|
train
|
function getQueueEntry(zipfile) {
return function queueEntry(entry) {
queue.push({
zipfile: zipfile,
workingDir: params.workingDir,
entry: entry
}, function(err) {
if (err) {
logger.debug("Error unzipping file params.zipFilePath", err);
//If one of the files has failed to unzip correctly. No point in continuing to unzip. Close the zip file.
zipfile.close();
}
});
};
}
|
javascript
|
{
"resource": ""
}
|
q2211
|
filterOutNullData
|
train
|
function filterOutNullData(fieldValues) {
return _.filter(fieldValues || [], function(val) {
return val === false || val === 0 || val;
});
}
|
javascript
|
{
"resource": ""
}
|
q2212
|
addAdminFieldToSubmission
|
train
|
function addAdminFieldToSubmission(field) {
//Full field object is used in the return as existing fields are populated already.
var subObject = {
fieldId : field,
fieldValues: []
};
submission.formFields.push(subObject);
}
|
javascript
|
{
"resource": ""
}
|
q2213
|
convertAllObjectIdsToString
|
train
|
function convertAllObjectIdsToString(form) {
form._id = form._id ? form._id.toString() : form._id;
form.pages = _.map(form.pages, function(page) {
page._id = page._id ? page._id.toString() : page._id;
page.fields = _.map(page.fields, function(field) {
field._id = field._id ? field._id.toString() : field._id;
return field;
});
return page;
});
return form;
}
|
javascript
|
{
"resource": ""
}
|
q2214
|
getMostRecentRefresh
|
train
|
function getMostRecentRefresh(formLastUpdated, dataLastUpdated) {
var formTimestamp = new Date(formLastUpdated).getTime();
var dataTimestamp = new Date(dataLastUpdated).getTime();
if (!dataLastUpdated) {
return formLastUpdated;
}
if (dataTimestamp > formTimestamp) {
return dataLastUpdated;
} else {
return formLastUpdated;
}
}
|
javascript
|
{
"resource": ""
}
|
q2215
|
checkId
|
train
|
function checkId(id) {
id = id || "";
id = id.toString();
return _.isString(id) && id.length === 24;
}
|
javascript
|
{
"resource": ""
}
|
q2216
|
buildErrorResponse
|
train
|
function buildErrorResponse(params) {
params = params || {};
params.error = params.error || {};
var ERROR_CODES = models.CONSTANTS.ERROR_CODES;
if (params.error.userDetail) {
return params.error;
}
if (params.error) {
var message = params.error.message || "";
//If the message is about validation, the return a validation http response
if (message.indexOf("validation") > -1) {
params.code = params.code || ERROR_CODES.FH_FORMS_INVALID_PARAMETERS;
}
}
//Mongoose Validation Failed
if (params.error && params.error.errors) {
var fieldKey = _.keys(params.error.errors)[0];
params.userDetail = params.userDetail || params.error.errors[fieldKey].message;
params.code = params.code || ERROR_CODES.FH_FORMS_INVALID_PARAMETERS;
}
var userDetail = params.userDetail || params.error.userDetail || params.error.message || "An Unexpected Error Occurred";
var systemDetail = params.systemDetail || params.error.systemDetail || params.error.stack || "";
var code = params.code || ERROR_CODES.FH_FORMS_UNEXPECTED_ERROR;
return {
userDetail: userDetail,
systemDetail: systemDetail,
code: code
};
}
|
javascript
|
{
"resource": ""
}
|
q2217
|
pruneIds
|
train
|
function pruneIds(form) {
var testForm = _.clone(form);
testForm.pages = _.map(testForm.pages, function(page) {
page.fields = _.map(page.fields, function(field) {
return _.omit(field, '_id');
});
return _.omit(page, '_id');
});
return _.omit(testForm, '_id');
}
|
javascript
|
{
"resource": ""
}
|
q2218
|
buildFormFileSizes
|
train
|
function buildFormFileSizes(submissions) {
//Grouping By Form Id
var fileSizesByForm = _.groupBy(submissions, 'formId');
//Getting all files associated with the submissions
fileSizesByForm = _.mapObject(fileSizesByForm, function(formSubs) {
//Getting File Sizes For All Entries In All Submissions Related To formId
var allSubmissionSizes = _.map(formSubs, function(submission) {
//For a single submission, get all file sizes
var submissionFileSizes = _.map(submission.formFields, function(formField) {
return _.map(_.compact(formField.fieldValues), function(fieldValue) {
return fieldValue.fileSize;
});
});
var totalSize = _.compact(_.flatten(submissionFileSizes));
//Adding all the file sizes for a single submission.
return _.reduce(totalSize, function(memo, fileSize) {
return memo + fileSize;
}, 0);
});
//Adding all file sizes for all submissions
return _.reduce(_.flatten(allSubmissionSizes), function(memo, fileSize) {
return memo + fileSize;
}, 0);
});
return fileSizesByForm;
}
|
javascript
|
{
"resource": ""
}
|
q2219
|
getDataSourceIds
|
train
|
function getDataSourceIds(forms) {
var dataSources = _.map(forms, function(form) {
return _.map(form.dataSources.formDataSources, function(dataSourceMeta) {
return dataSourceMeta._id.toString();
});
});
dataSources = _.flatten(dataSources);
//Only want unique data source Ids as multiple forms may use the same data source.
return _.uniq(dataSources);
}
|
javascript
|
{
"resource": ""
}
|
q2220
|
populateFieldDataFromDataSources
|
train
|
function populateFieldDataFromDataSources(populatedForms, cb) {
logger.debug("populateFieldDataFromDataSources", populatedForms);
var DataSource = models.get(connections.mongooseConnection, models.MODELNAMES.DATA_SOURCE);
//If no data source cache data is expected, then there is no need to load the Data Source Cache Data.
if (!options.expectDataSourceCache) {
return cb(undefined, populatedForms);
}
var dataSourceIds = getDataSourceIds(populatedForms);
logger.debug("populateFieldDataFromDataSources", {dataSourceIds: dataSourceIds});
//If none of the forms refer to any data sources, then no need to search
if (dataSourceIds.length === 0) {
return cb(undefined, populatedForms);
}
var query = {
_id: {
"$in": dataSourceIds
}
};
//One query to populate all data sources
DataSource.find(query).exec(function(err, dataSources) {
if (err) {
logger.error("Error Finding Data Sources", {error: err, dataSourceIds:dataSourceIds});
return cb(err);
}
logger.debug("populateFieldDataFromDataSources", {dataSources: dataSources});
var validatonError = _validateReturnedDataSources(dataSourceIds, dataSources);
if (validatonError) {
logger.error("Error Getting Form With Data Sources", {error: validatonError});
return cb(validatonError);
}
var cacheEntries = {};
//Assigning a lookup for cache entries
_.each(dataSources, function(dataSource) {
cacheEntries[dataSource._id] = dataSource.cache[0].data;
});
//Overriding field options for a field with data source data if the field is defined as being sourced from a data source.
populatedForms = _.map(populatedForms, function(populatedForm) {
populatedForm.pages = _.map(populatedForm.pages, function(page) {
page.fields = _.map(page.fields, function(field) {
//If it is a data source type field, then return the data source data
if (field.dataSourceType === models.FORM_CONSTANTS.DATA_SOURCE_TYPE_DATA_SOURCE) {
//No guarantee these are set
field.fieldOptions = field.fieldOptions || {};
field.fieldOptions.definition = field.fieldOptions.definition || {};
//Setting the data source data
field.fieldOptions.definition.options = models.convertDSCacheToFieldOptions(field.type, cacheEntries[field.dataSource]);
}
return field;
});
return page;
});
return populatedForm;
});
logger.debug("populateFieldDataFromDataSources", {populatedForms: JSON.stringify(populatedForms)});
//Finished, return the merged forms
return cb(undefined, populatedForms);
});
}
|
javascript
|
{
"resource": ""
}
|
q2221
|
pruneDataSourceInfo
|
train
|
function pruneDataSourceInfo(form) {
if (options.includeDataSources) {
return form;
}
delete form.dataSources;
form.pages = _.map(form.pages, function(page) {
page.fields = _.map(page.fields, function(field) {
delete field.dataSource;
delete field.dataSourceType;
return field;
});
return page;
});
return form;
}
|
javascript
|
{
"resource": ""
}
|
q2222
|
train
|
function (state, ev) {
if(state.dirtyTs + 200 < ev.ts && state.queue) {
state.cleanTs = ev.ts
state.writing = true
state.dirty = false
return {state: state, effects: {type: 'write'}}
}
return state
}
|
javascript
|
{
"resource": ""
}
|
|
q2223
|
train
|
function(field) {
var converted = field.fieldId;
converted.values = field.fieldValues;
converted.sectionIndex = field.sectionIndex;
return converted;
}
|
javascript
|
{
"resource": ""
}
|
|
q2224
|
train
|
function(section, pageFields, submittedFields) {
var thisSection = section;
var fieldsInSection = sectionUtils.getFieldsInSection(section._id, pageFields);
var renderData = [];
var addedSectionBreaks = false;
var idsOfFieldsInTheSection = [];
_.each(fieldsInSection, function(field) {
idsOfFieldsInTheSection.push(field._id);
var thisFieldInSection = _.filter(submittedFields, function(subField) {
return subField.fieldId._id === field._id;
});
if (!addedSectionBreaks) {
_.each(thisFieldInSection, function(field, index) {
var sectionForIndex = _.clone(thisSection);
sectionForIndex.idx = index + 1;
field.sectionIndex = field.sectionIndex || 0;
renderData[field.sectionIndex] = [sectionForIndex];
});
addedSectionBreaks = true;
}
_.each(thisFieldInSection, function(field) {
field.sectionIndex = field.sectionIndex || 0;
renderData[field.sectionIndex].push(convertFieldToFormFormat(field));
});
});
renderData = _.flatten(renderData);
return {renderData: renderData, fieldsInSection: idsOfFieldsInTheSection};
}
|
javascript
|
{
"resource": ""
}
|
|
q2225
|
RequestStream
|
train
|
function RequestStream(requestStream, options) {
var self = this;
if(!options) options = {};
if(options.allowedStatusCodes === undefined) {
options.allowedStatusCodes = [200, 201, 202, 203, 204, 205, 206];
}
if(options.readErrorResponse === undefined) {
options.readErrorResponse = true;
}
ClassicDuplex.call(this, requestStream, options);
this._readErrorResponse = !!options.readErrorResponse;
this._allowedStatusCodes = options.allowedStatusCodes;
requestStream.on('response', function(response) {
self._currentResponse = response;
self.emit('response', response);
if(Array.isArray(self._allowedStatusCodes)) {
var statusCode = ''+response.statusCode;
var statusCodeIsAllowed = false;
for(var i = 0; i < self._allowedStatusCodes.length; i++) {
if(''+self._allowedStatusCodes[i] === statusCode) {
statusCodeIsAllowed = true;
}
}
if(!statusCodeIsAllowed) {
self._handleErrorInResponse(response, new Error('Received error status code: ' + statusCode));
}
}
});
if(requestStream.method === 'GET') {
this._compoundWritable.end();
}
}
|
javascript
|
{
"resource": ""
}
|
q2226
|
filterAncestor
|
train
|
function filterAncestor(item) {
return item.nodeType === 1 && item.tagName.toLowerCase() !== 'body' && item.tagName.toLowerCase() !== 'html';
}
|
javascript
|
{
"resource": ""
}
|
q2227
|
ClassicDuplex
|
train
|
function ClassicDuplex(stream, options) {
var readable, writable, classicReadable, classicWritable, self = this;
readable = new PassThrough();
writable = new PassThrough();
CompoundDuplex.call(self, writable, readable, options);
classicMixins.call(this, stream, options);
classicReadable = this._internalReadable = new ClassicReadable(stream, options);
classicReadable.on('error', this._duplexHandleInternalError.bind(this));
classicWritable = this._internalWritable = new ClassicWritable(stream, options);
classicWritable.on('error', this._duplexHandleInternalError.bind(this));
writable.pipe(classicWritable);
classicReadable.pipe(readable);
}
|
javascript
|
{
"resource": ""
}
|
q2228
|
_loadEntity
|
train
|
function _loadEntity() {
if (_Entity && _Entity !== models.Entity) {
if (_nameValidation) {
_Entity.adapter.loadEntity(_Entity);
for (var attribute in _attributes) {
_loadEntityAttribute(_attributes[attribute]);
}
}
for (var method in _methods) {
_loadEntityMethod(_methods[method], method);
}
}
}
|
javascript
|
{
"resource": ""
}
|
q2229
|
_loadEntityAttribute
|
train
|
function _loadEntityAttribute(attribute) {
expect(_methods).to.not.have.ownProperty(
attribute.name,
'failed to load entity attribute "' + attribute.name + '" because ' +
'there is a method with same name in the current Entity and it cannot ' +
'be overloaded'
);
if (_Entity.General) {
expect(_Entity.General.attributes).to.not.have.ownProperty(
attribute.name,
'failed to load entity attribute "' + attribute.name + '" because ' +
'there is an attribute with same name in a parent of current Entity ' +
'and it cannot be overriden'
);
expect(_Entity.General.methods).to.not.respondTo(
attribute.name,
'failed to load entity attribute "' + attribute.name + '" because ' +
'there is a method with same name in a parent of current Entity ' +
'and it cannot be overriden'
);
}
var entitySpecializations = _Entity.specializations;
for (var specialization in entitySpecializations) {
expect(entitySpecializations[specialization].specification.attributes)
.to.not.have.ownProperty(
attribute.name,
'failed to load entity attribute "' + attribute.name + '" because ' +
'there is an attribute with same name in a child of current Entity'
);
expect(entitySpecializations[specialization].specification.methods)
.to.not.have.ownProperty(
attribute.name,
'failed to load entity attribute "' + attribute.name + '" because ' +
'there is a method with same name in a child of current Entity'
);
}
_Entity.adapter.loadEntityAttribute(_Entity, attribute);
}
|
javascript
|
{
"resource": ""
}
|
q2230
|
_loadEntityMethod
|
train
|
function _loadEntityMethod(func, name) {
expect(_attributes).to.not.have.ownProperty(
name,
'failed to load entity method "' + name + '" because there is an ' +
'attribute with same name in the current Entity and it cannot be ' +
'overloaded'
);
if (_Entity.General) {
expect(_Entity.General.attributes).to.not.have.ownProperty(
name,
'failed to load entity method "' + name + '" because there is an ' +
'attribute with same name in a parent of current Entity and it ' +
'cannot be overriden'
);
}
var entitySpecializations = _Entity.specializations;
for (var specialization in entitySpecializations) {
expect(entitySpecializations[specialization].specification.attributes)
.to.not.have.ownProperty(
name,
'failed to load entity method "' + name + '" because there is an ' +
'attribute with same name in a child of current Entity'
);
}
_Entity.prototype[name] = func;
}
|
javascript
|
{
"resource": ""
}
|
q2231
|
addAttribute
|
train
|
function addAttribute() {
var attribute =
arguments.length === 1 && arguments[0] instanceof attributes.Attribute ?
arguments[0] :
attributes.Attribute.resolve.apply(
null,
Array.prototype.slice.call(arguments)
);
var newAttributes = attributes.AttributeDictionary.concat(
_attributes,
attribute
);
if (_Entity) {
_loadEntityAttribute(attribute);
}
_attributes = newAttributes;
}
|
javascript
|
{
"resource": ""
}
|
q2232
|
addMethod
|
train
|
function addMethod(func, name) {
var newMethods = methods.MethodDictionary.concat(
_methods,
func,
name
);
if (_Entity) {
_loadEntityMethod(func, name);
}
_methods = newMethods;
}
|
javascript
|
{
"resource": ""
}
|
q2233
|
_Stream
|
train
|
function _Stream(superObj) {
var self = this;
this._zSuperObj = superObj;
this._isZStream = true;
this._ignoreStreamError = false;
this._zStreamId = streamIdCounter++;
this._currentStreamChain = new StreamChain(true);
this._currentStreamChain._addStream(this);
this._zStreamRank = 0;
this.on('error', function(error) {
// If there are no other 'error' handlers on this stream, trigger a chainerror
if(self.listeners('error').length <= 1) {
self.triggerChainError(error);
}
});
}
|
javascript
|
{
"resource": ""
}
|
q2234
|
getFormFiles
|
train
|
function getFormFiles(metaDataFilePath) {
var formsZipMetaData = require(metaDataFilePath);
if (formsZipMetaData && formsZipMetaData.files) {
var formFiles = formsZipMetaData.files;
return _.map(formFiles, function(formDetails) {
return formDetails.path;
});
} else {
return [];
}
}
|
javascript
|
{
"resource": ""
}
|
q2235
|
getStatusUpdater
|
train
|
function getStatusUpdater(connections, isAsync) {
return function updateExportStatus(statusUpdate, cb) {
cb = cb || _.noop;
if (isAsync) {
updateCSVExportStatus(connections, statusUpdate, cb);
} else {
return cb();
}
};
}
|
javascript
|
{
"resource": ""
}
|
q2236
|
generateQuery
|
train
|
function generateQuery(searchParams, cb) {
if (searchParams.query ) {
searchSubmissions.queryBuilder(searchParams.query, cb);
} else {
return cb(undefined, buildQuery(searchParams));
}
}
|
javascript
|
{
"resource": ""
}
|
q2237
|
buildCompositeForm
|
train
|
function buildCompositeForm(formSubmissionModel, formId, singleFormQuery, statusUpdaterFunction, cb) {
var mergedFields = {};
mergedFields[formId] = {};
logger.debug("buildCompositeForm start");
statusUpdaterFunction({
message: "Creating form metadata for submissions with form ID: " + formId
});
var mapReduceOptions = {
map: function() {
//The only difference will be the "lastUpdated" timestamp.
emit(this.formSubmittedAgainst.lastUpdated, this.formSubmittedAgainst); // eslint-disable-line no-undef
},
reduce: function(lastUpdatedTimestamp, formEntries) {
//Only want one of each form definition for each different timestamp
var formEntry = formEntries[0];
//Only need the pages, _id and name
if (formEntry && formEntry.pages) {
return {
_id: formEntry._id,
name: formEntry.name,
pages: formEntry.pages
};
} else {
return null;
}
},
query: singleFormQuery
};
formSubmissionModel.mapReduce(mapReduceOptions, function(err, subFormsSubmittedAgainst) {
if (err) {
logger.error("Error Using mapReduce ", err);
return cb(err);
}
statusUpdaterFunction({
message: "Finished Creating form metadata for submissions with form ID: " + formId
});
logger.debug("buildCompositeForm finish");
var formName = "";
_.each(subFormsSubmittedAgainst, function(subFormSubmittedAgainst) {
formName = formName || subFormSubmittedAgainst.value.name;
mergedFields[formId] = mergeFormFields(mergedFields[formId] || {}, subFormSubmittedAgainst.value);
});
return cb(null, mergedFields, formName);
});
}
|
javascript
|
{
"resource": ""
}
|
q2238
|
buildCSVsForSingleMergedForm
|
train
|
function buildCSVsForSingleMergedForm(formSubmissionModel, params, cb) {
var formId = params.formId;
var formName = params.formName;
var date = params.date;
var mergedFields = params.mergedFields;
var fieldHeader = params.fieldHeader;
var singleFormQuery = params.singleFormQuery;
var downloadUrl = params.downloadUrl;
var fullSubmissionCSVString = "";
//Form Name might not be unique but the ID will always be.
var fileName = date + "-" + formId + "-" + (formName.split(' ').join('_'));
//Query the submissions for the formId
//LEAN
//Select only the metadata and formFields in the submission.
// Stream response
// Build CSV string for each entry.
// Add to the zip file.
cb = _.once(cb);
params.statusUpdaterFunction({
message: "Beginning export of submissions for form ID: " + formId
});
var exportProgressInterval = setInterval(function() {
params.statusUpdaterFunction({
message: "Exporting submission " + params.exportCounter.numSubsExported + " of " + params.exportCounter.numSubmissionsToExport
});
}, 1000);
//First, generate headers.
fullSubmissionCSVString = csvHeaders.generateCSVHeaders(_.keys(mergedFields[formId]), mergedFields[formId], fieldHeader);
var submissionQueryStream = formSubmissionModel.find(singleFormQuery).select({
"formSubmittedAgainst.pages": 0,
"formSubmittedAgainst.pageRules": 0,
"formSubmittedAgainst.fieldRules": 0
}).lean().stream();
submissionQueryStream.on('data', function addSubmissionToCSV(submissionJSON) {
//Merge the form fields
fullSubmissionCSVString += processSingleSubmission({
mergedFields: mergedFields,
submission: submissionJSON,
date: date,
fieldHeader: fieldHeader,
downloadUrl: downloadUrl
});
params.exportCounter.numSubsExported ++;
}).on('error', function(err) {
logger.error("Error streaming submissions ", err);
clearInterval(exportProgressInterval);
return cb(err);
}).on('close', function() {
clearInterval(exportProgressInterval);
return cb(undefined, {
formId: formId,
fileName: fileName,
csvString: fullSubmissionCSVString
});
});
}
|
javascript
|
{
"resource": ""
}
|
q2239
|
buildCSVsForSingleForm
|
train
|
function buildCSVsForSingleForm(formSubmissionModel, params, formId, callback) {
logger.debug("buildCSVsForSingleForm", params, formId);
var date = params.date;
var searchParams = params.searchParams || {};
var fieldHeader = searchParams.fieldHeader;
var downloadUrl = searchParams.downloadUrl || "";
formId = formId.toString();
params.statusUpdaterFunction({
message: "Starting export of submissions for form with ID:" + formId
});
generateQuery(_.defaults({
formId: formId
}, searchParams), function(err, singleFormQuery) {
if (err) {
return callback(err);
}
async.waterfall([
async.apply(buildCompositeForm, formSubmissionModel, formId, singleFormQuery, params.statusUpdaterFunction),
function buildSubmissionCSVForSingleForm(mergedFields, formName, cb) {
buildCSVsForSingleMergedForm(formSubmissionModel, {
date: date,
fieldHeader: fieldHeader,
downloadUrl: downloadUrl,
mergedFields: mergedFields,
formName: formName,
formId: formId,
exportCounter: params.exportCounter,
singleFormQuery: singleFormQuery,
statusUpdaterFunction: params.statusUpdaterFunction
}, cb);
}
], callback);
});
}
|
javascript
|
{
"resource": ""
}
|
q2240
|
ZWritable
|
train
|
function ZWritable(options) {
if(options) {
if(options.writableObjectMode) {
options.objectMode = true;
}
//Add support for iojs simplified stream constructor
if(typeof options.write === 'function') {
this._write = options.write;
}
if(typeof options.flush === 'function') {
this._flush = options.flush;
}
}
Transform.call(this, options);
streamMixins.call(this, Transform.prototype, options);
writableMixins.call(this, options);
}
|
javascript
|
{
"resource": ""
}
|
q2241
|
needsAnUpdate
|
train
|
function needsAnUpdate(dataSource, currentTime) {
//The last time the Data Source was refreshed
var lastRefreshedMs = new Date(dataSource.cache[0].lastRefreshed).valueOf();
currentTime = new Date(currentTime);
var conf = config.get();
var defaults = config.defaults();
//The number of minutes between backoffs
var minsPerBackOffIndex = conf.minsPerBackOffIndex || defaults.minsPerBackOffIndex;
//The number of milliseconds to wait until the Data Source needs to be refreshed.
var refreshIntervalMs = dataSource.refreshInterval * MIN_MS;
var backOffIndex = dataSource.cache[0].backOffIndex || 0;
//The number of milliseconds to wait because of backing off from errors.
var backOffMs = backOffIndex * (minsPerBackOffIndex * MIN_MS);
//Will only wait a max of a week between failed updates.
backOffMs = Math.min(backOffMs, conf.dsMaxIntervalMs || defaults.dsMaxIntervalMs);
//The next time the Data Source will refresh will either be normal interval or the backOff interval, whichever is the largest.
var nextRefreshMs = Math.max(refreshIntervalMs, backOffMs);
//Checking if the total of the three times is <= currentTime. If it is, then the data source should try to update.
return new Date(lastRefreshedMs + nextRefreshMs) <= currentTime;
}
|
javascript
|
{
"resource": ""
}
|
q2242
|
validateThemeNotInUseByApps
|
train
|
function validateThemeNotInUseByApps(cb) {
var appThemeModel = models.get(connections.mongooseConnection, models.MODELNAMES.APP_THEMES);
appThemeModel.count({"theme" : options._id}, function(err, countAppsUsingTheme) {
if (err) {
return cb(err);
}
if (countAppsUsingTheme > 0) {
return cb(new Error("Cannot delete theme in use by apps. Apps Using this theme" + countAppsUsingTheme));
}
return cb();
});
}
|
javascript
|
{
"resource": ""
}
|
q2243
|
registerPatternHelpers
|
train
|
function registerPatternHelpers(options) {
const Handlebars = options.handlebars;
if (Handlebars.helpers.pattern) {
DrizzleError.error(
new DrizzleError(
'`pattern` helper already registered',
DrizzleError.LEVELS.WARN
),
options.debug
);
}
/**
* The `pattern` helper allows the embedding of patterns anywhere
* and they can get their correct local context.
*/
Handlebars.registerHelper('pattern', (id, rootContext, opts) => {
const renderedTemplate = renderPatternPartial(
id,
rootContext.drizzle,
Handlebars
);
return renderedTemplate;
});
if (Handlebars.helpers.patternSource) {
DrizzleError.error(
new DrizzleError(
'`patternSource` helper already registered',
DrizzleError.LEVELS.WARN
),
options.debug
);
}
/**
* Similar to `pattern` but the returned string is HTML-escaped.
* Can be used for rendering source in `<pre>` tags.
*/
Handlebars.registerHelper('patternSource', (id, rootContext, opts) => {
const renderedTemplate = renderPatternPartial(
id,
rootContext.drizzle,
Handlebars
);
const sourceMarkup = beautify(renderedTemplate, options.beautifier);
return Handlebars.Utils.escapeExpression(sourceMarkup);
});
return Handlebars;
}
|
javascript
|
{
"resource": ""
}
|
q2244
|
deploy
|
train
|
function deploy(req, res, next) {
var dataSource = req.body;
dataSource._id = req.params.id;
forms.dataSources.deploy(req.connectionOptions, dataSource, dataSourcesHandler(constants.resultTypes.dataSources, req, next));
}
|
javascript
|
{
"resource": ""
}
|
q2245
|
makeReflectTypes
|
train
|
function makeReflectTypes(uniforms, useIndex) {
var obj = {}
for(var i=0; i<uniforms.length; ++i) {
var n = uniforms[i].name
var parts = n.split(".")
var o = obj
for(var j=0; j<parts.length; ++j) {
var x = parts[j].split("[")
if(x.length > 1) {
if(!(x[0] in o)) {
o[x[0]] = []
}
o = o[x[0]]
for(var k=1; k<x.length; ++k) {
var y = parseInt(x[k])
if(k<x.length-1 || j<parts.length-1) {
if(!(y in o)) {
if(k < x.length-1) {
o[y] = []
} else {
o[y] = {}
}
}
o = o[y]
} else {
if(useIndex) {
o[y] = i
} else {
o[y] = uniforms[i].type
}
}
}
} else if(j < parts.length-1) {
if(!(x[0] in o)) {
o[x[0]] = {}
}
o = o[x[0]]
} else {
if(useIndex) {
o[x[0]] = i
} else {
o[x[0]] = uniforms[i].type
}
}
}
}
return obj
}
|
javascript
|
{
"resource": ""
}
|
q2246
|
getRequestFileParameters
|
train
|
function getRequestFileParameters(req, res, next) {
//A valid getForms request must have an appId parameter set
var submitFileParams = {};
submitFileParams.fileDetails = {};
//Get the content body for normal parameter
var filesInRequest = req.files;
if (_.size(filesInRequest) === 0) {
logger.error("Middleware: getRequestFileParameters, Expected A File To Have Been Sent ", {params: req.params});
return next(new Error("Expected A File To Have Been Submitted"));
}
var fileDetails = _.map(filesInRequest, function(fileValue) {
return fileValue;
});
fileDetails = _.first(fileDetails);
logger.debug("Middleware: getRequestFileParameters ", {fileDetails: fileDetails, body: req.body});
submitFileParams.fileDetails = {
fileStream: fileDetails.path,
fileName: fileDetails.originalname || fileDetails.name,
fileType: fileDetails.mimetype,
fileSize: fileDetails.size
};
req.appformsResultPayload = {
data: submitFileParams,
type: constants.resultTypes.submissions
};
logger.debug("Middleware: getRequestFileParameters ", {params: req.appformsResultPayload});
return next();
}
|
javascript
|
{
"resource": ""
}
|
q2247
|
list
|
train
|
function list(req, res, next) {
logger.debug("Middleware Submissions List ", {connectionOptions: req.connectionOptions});
forms.getSubmissions(req.connectionOptions, {}, _getSubmissionsResultHandler(req, next));
}
|
javascript
|
{
"resource": ""
}
|
q2248
|
listProjectSubmissions
|
train
|
function listProjectSubmissions(req, res, next) {
var formId = req.body.formId;
var subIds = req.body.subid;
var params = {
wantRestrictions: false,
appId: req.params.projectid
};
//Assigning Form Search If Set
if (_.isString(formId)) {
params.formId = formId;
}
//Assigning Submission Search Params If Set
if (_.isArray(subIds)) {
params.subid = subIds;
} else if (_.isString(subIds)) {
params.subid = [subIds];
}
logger.debug("Middleware listProjectSubmissions ", {params: params});
forms.getSubmissions(req.connectionOptions, params, submissionsHandler(constants.resultTypes.submissions, req, next));
}
|
javascript
|
{
"resource": ""
}
|
q2249
|
remove
|
train
|
function remove(req, res, next) {
var params = {"_id": req.params.id};
logger.debug("Middleware Submissions Remove ", {params: params});
forms.deleteSubmission(req.connectionOptions, params, submissionsHandler(constants.resultTypes.submissions, req, next));
}
|
javascript
|
{
"resource": ""
}
|
q2250
|
getSubmissionFile
|
train
|
function getSubmissionFile(req, res, next) {
var params = {"_id": req.params.fileId};
logger.debug("Middleware getSubmissionFile ", {params: params});
forms.getSubmissionFile(req.connectionOptions, params, submissionsHandler(constants.resultTypes.submissions, req, next));
}
|
javascript
|
{
"resource": ""
}
|
q2251
|
updateSubmissionFile
|
train
|
function updateSubmissionFile(req, res, next) {
var fileUpdateOptions = req.appformsResultPayload.data;
fileUpdateOptions.submission = {
submissionId: req.params.id,
fieldId: req.params.fieldId
};
//Remove the cached file when finished
fileUpdateOptions.keepFile = false;
//Adding A New File If Required
fileUpdateOptions.addingNewSubmissionFile = req.addingNewSubmissionFile;
//If not adding a new file, the fileId param is expected to be the file group id
if (!fileUpdateOptions.addingNewSubmissionFile) {
fileUpdateOptions.fileDetails.groupId = req.params.fileId;
} else {
fileUpdateOptions.fileDetails.hashName = req.params.fileId;
}
logger.debug("Middleware updateSubmissionFile ", {fileUpdateOptions: fileUpdateOptions});
forms.updateSubmissionFile(_.extend(fileUpdateOptions, req.connectionOptions), submissionsHandler(constants.resultTypes.submissions, req, next));
}
|
javascript
|
{
"resource": ""
}
|
q2252
|
addSubmissionFile
|
train
|
function addSubmissionFile(req, res, next) {
req.addingNewSubmissionFile = true;
updateSubmissionFile(req, res, next);
}
|
javascript
|
{
"resource": ""
}
|
q2253
|
status
|
train
|
function status(req, res, next) {
var params = {
submission: {
submissionId: req.params.id
}
};
logger.debug("Middleware Submission status ", {params: params});
forms.getSubmissionStatus(_.extend(params, req.connectionOptions), submissionsHandler(constants.resultTypes.submissions, req, next));
}
|
javascript
|
{
"resource": ""
}
|
q2254
|
search
|
train
|
function search(req, res, next) {
var queryParams = req.body;
logger.debug("Middleware Submission Search ", {params: queryParams});
forms.submissionSearch(req.connectionOptions, queryParams, _getSubmissionsResultHandler(req, next));
}
|
javascript
|
{
"resource": ""
}
|
q2255
|
exportSubmissions
|
train
|
function exportSubmissions(req, res, next) {
var params = {
"appId" : req.body.projectId,
"subid": req.body.subid,
"formId": req.body.formId,
"fieldHeader": req.body.fieldHeader,
"downloadUrl": req.body.fileUrl,
"filter": req.body.filter,
"query": req.body.query,
"wantRestrictions": false
};
logger.debug("Middleware exportSubmissions ", {req: req, body: req.body, params: params});
forms.exportSubmissions(req.connectionOptions, params, function(err, submissionCsvValues) {
if (err) {
logger.error("Middleware Export Submissions ", {error: err});
return next(err);
}
logger.debug("Middleware exportSubmissions submissionCsvValues", {submissionCsvValues: submissionCsvValues.length});
_processExportResponse(submissionCsvValues, res, next);
});
}
|
javascript
|
{
"resource": ""
}
|
q2256
|
generatePDF
|
train
|
function generatePDF(req, res, next) {
req.appformsResultPayload = req.appformsResultPayload || {};
//If there is already a submission result, render this. This is useful for cases where the submission is fetched from another database and rendered elsewhere.
var existingSubmission = req.appformsResultPayload.data;
var params = {
_id: req.params.id,
pdfExportDir: req.pdfExportDir,
downloadUrl: '' + req.protocol + '://' + req.hostname,
existingSubmission: existingSubmission,
environment: req.environment,
mbaasConf: req.mbaasConf,
domain: req.user.domain,
filesAreRemote: req.filesAreRemote,
fileUriPath: req.fileUriPath,
location: req.coreLocation,
pdfTemplateLoc: req.pdfTemplateLoc,
maxConcurrentPhantomPerWorker: req.maxConcurrentPhantomPerWorker
};
logger.debug("Middleware generatePDF ", {params: params});
forms.generateSubmissionPdf(_.extend(params, req.connectionOptions), function(err, submissionPdfLocation) {
if (err) {
logger.error("Middleware generatePDF", {error: err});
return next(err);
}
logger.debug("Middleware generatePDF ", {submissionPdfLocation: submissionPdfLocation});
//Streaming the file as an attachment
res.download(submissionPdfLocation, '' + req.params.id + ".pdf", function(fileDownloadError) {
//Download Complete, remove the cached file
fs.unlink(submissionPdfLocation, function() {
if (fileDownloadError) {
logger.error("Middleware generatePDF ", {error: fileDownloadError});
//If the headers have not been sent to the client, can use the error handler
if (!res.headersSent) {
return next(fileDownloadError);
}
}
});
});
});
}
|
javascript
|
{
"resource": ""
}
|
q2257
|
_processExportResponse
|
train
|
function _processExportResponse(csvs, res, next) {
var zip = archiver('zip');
// convert csv entries to in-memory zip file and stream response
res.setHeader('Content-type', 'application/zip');
res.setHeader('Content-disposition', 'attachment; filename=submissions.zip');
zip.pipe(res);
for (var form in csvs) { // eslint-disable-line guard-for-in
var csv = csvs[form];
zip.append(csv, {name: form + '.csv'});
}
var respSent = false;
zip.on('error', function(err) {
logger.error("_processExportResponse ", {error: err});
if (err) {
if (!respSent) {
respSent = true;
return next(err);
}
}
});
zip.finalize(function(err) {
if (err) {
logger.error("_processExportResponse finalize", {error: err});
if (!respSent) {
respSent = true;
return next(err);
}
logger.debug("_processExportResponse finalize headers sent");
}
logger.debug("_processExportResponse finalize finished");
});
}
|
javascript
|
{
"resource": ""
}
|
q2258
|
processFileResponse
|
train
|
function processFileResponse(req, res, next) {
var fileDetails = req.appformsResultPayload.data;
if (fileDetails.stream) {
var headers = {};
headers["Content-Type"] = fileDetails.type;//Setting the file content type. Mime types are set by the file handler.
headers["Content-Disposition"] = "attachment; filename=" + fileDetails.name;
res.writeHead(200, headers);
fileDetails.stream.pipe(res);
fileDetails.stream.resume(); //Unpausing the stream as it was paused by the file handler
} else {
return next('Error getting submitted file - result: ' + JSON.stringify(fileDetails));
}
}
|
javascript
|
{
"resource": ""
}
|
q2259
|
reformatFormIdAndName
|
train
|
function reformatFormIdAndName(submission) {
var formName = "Unknown";
if (submission && submission.formId && submission.formId.name) {
formName = submission.formId.name;
}
if (submission && submission.formSubmittedAgainst) {
formName = submission.formSubmittedAgainst.name;
}
submission.formName = formName;
submission.formId = submission.formId.toString();
return submission;
}
|
javascript
|
{
"resource": ""
}
|
q2260
|
restrictSubmissionForSummary
|
train
|
function restrictSubmissionForSummary(submission) {
submission.formFields = _.filter(submission.formFields, function(formField) {
return CONSTANTS.FIELD_TYPES_INCLUDED_IN_SUMMARY.indexOf(formField.fieldId.type) >= 0;
});
submission.formFields = _.first(submission.formFields, CONSTANTS.NUM_FIELDS_INCLUDED_IN_SUMMARY);
return submission;
}
|
javascript
|
{
"resource": ""
}
|
q2261
|
train
|
function(submission) {
return _.flatten(submission.formFields.map(function(field) {
return field.fieldValues.filter(hasGroupId);
}).map(function(fieldValue) {
return fieldValue.map(extractGroupId);
}));
}
|
javascript
|
{
"resource": ""
}
|
|
q2262
|
spawnWatcher
|
train
|
function spawnWatcher() {
var subprocess = spawn(process.argv[0], ['prebuild.js', '--watcher'], {detached: true, stdio: 'ignore'})
subprocess.unref()
}
|
javascript
|
{
"resource": ""
}
|
q2263
|
createIndex
|
train
|
function createIndex() {
var directory = 'src/views'
var directories = fs.readdirSync(directory)
try {
var lines = ['// This is a generated file, do not edit, or disable "prebuild" command in package.json if you want to take control']
for (var i = 0; i < directories.length; i++) {
var path = directory + '/' + directories[i];
if (fs.existsSync(path) && fs.lstatSync(path).isDirectory()) {
var file = directories[i] + '.js'
if (fs.existsSync(directory + '/' + directories[i] + '/' + file)) {
lines.push('export { default as ' + directories[i] + ' } from \'./' + directories[i] + '/' + directories[i] + '\'')
}
}
}
fs.writeFileSync(directory + '/index.js', lines.join('\n') + '\n');
} catch (err) {
console.log(err)
}
}
|
javascript
|
{
"resource": ""
}
|
q2264
|
generateFieldHeader
|
train
|
function generateFieldHeader(field, headerName, fieldRepeatIndex) {
var csv = '';
//If the field is repeating, the structure of the header is different
if (_.isNumber(fieldRepeatIndex)) {
//If it is a file type field, need to add two fields for the file name and url
if (fieldTypeUtils.isFileType(field.type)) {
csv += csvStr(headerName + '-' + (fieldRepeatIndex + 1) + "-name") + ",";
csv += csvStr(headerName + '-' + (fieldRepeatIndex + 1) + "-url");
} else if (fieldTypeUtils.isBarcodeType(field.type)) {
//If it is a barcode type field, need to add two fields for the format name and text
csv += csvStr(headerName + '-' + (fieldRepeatIndex + 1) + "-format") +
",";
csv += csvStr(headerName + '-' + (fieldRepeatIndex + 1) + "-text");
} else {
//Otherwise, just append the index.
csv += csvStr(headerName + '-' + (fieldRepeatIndex + 1));
}
} else {
//If it is a file type field, need to add two fields for the file name and url
if (fieldTypeUtils.isFileType(field.type)) {
csv += csvStr(headerName + "-name") + ",";
csv += csvStr(headerName + "-url");
} else if (fieldTypeUtils.isBarcodeType(field.type)) {
//If it is a barcode type field, need to add two fields for the format name and text
csv += csvStr(headerName + "-format") + ",";
csv += csvStr(headerName + "-text");
} else {
csv += csvStr(headerName);
}
}
return csv;
}
|
javascript
|
{
"resource": ""
}
|
q2265
|
generateCSVHeader
|
train
|
function generateCSVHeader(csv, field, headerName, fieldRepeatIndex) {
//If the previous csv value is set, then a ',' is needed to separate.
if (csv) {
csv += ',';
}
// Sanity check after the headers to ensure we don't have a double ,, appearing
// The above if is necessary and cannot be removed
if (endsWith(csv, ',,')) {
csv = csv.slice(0, -1);
}
csv += generateFieldHeader(field, headerName, fieldRepeatIndex);
return csv;
}
|
javascript
|
{
"resource": ""
}
|
q2266
|
generateCSVHeaders
|
train
|
function generateCSVHeaders(fieldKeys, mergedFieldEntries, fieldHeader) {
var csv = '';
var fieldRepeatIndex = 0;
// Here we need to add the metaDataHeaders
_.each(metaDataHeaders, function(headerName) {
csv += headerName + ',';
});
var fieldKeysProcessed = [];
var fieldsProcessed = {};
fieldKeys.forEach(function(fieldKey) {
// check if its a repeating form (and extra headers required)
var field = mergedFieldEntries[fieldKey];
if (field.type === 'sectionBreak' && field.repeating) {
var fieldsInThisSection = sectionUtils.getFieldsInSection(field._id, _.values(mergedFieldEntries));
_.each(fieldsInThisSection, function(field) {
fieldsProcessed[field._id] = true;
});
for (var i = 0; i < field.fieldOptions.definition.maxRepeat; i++) {
_.each(fieldsInThisSection, function(fieldInThisSection) {
fieldKeysProcessed.push({key: fieldInThisSection._id, sectionIndex: i + 1, inRepeatingForm: true});
});
}
} else if (!fieldsProcessed[fieldKey]) {
fieldKeysProcessed.push({key: fieldKey});
}
});
//for each form get each of the unique fields and add a header
fieldKeysProcessed.forEach(function(processedField) {
// check if its a repeating form (and extra headers required)
var field = mergedFieldEntries[processedField.key];
//Fields may not have a field code, if not just use the field name.
var headerName = typeof (field[fieldHeader]) === "string" ? field[fieldHeader] : field.name;
if (processedField.inRepeatingForm) {
headerName = '(section repeat: ' + processedField.sectionIndex + ') ' + headerName;
}
if (field.repeating === true) {
for (fieldRepeatIndex = 0; fieldRepeatIndex < field.fieldOptions.definition.maxRepeat; fieldRepeatIndex++) {
csv = generateCSVHeader(csv, field, headerName, fieldRepeatIndex);
}
} else {
csv = generateCSVHeader(csv, field, headerName, null);
}
});
csv += '\r\n';
return csv;
}
|
javascript
|
{
"resource": ""
}
|
q2267
|
ShaderAttribute
|
train
|
function ShaderAttribute(gl, program, location, dimension, name, constFunc, relink) {
this._gl = gl
this._program = program
this._location = location
this._dimension = dimension
this._name = name
this._constFunc = constFunc
this._relink = relink
}
|
javascript
|
{
"resource": ""
}
|
q2268
|
addVectorAttribute
|
train
|
function addVectorAttribute(gl, program, location, dimension, obj, name, doLink) {
var constFuncArgs = [ 'gl', 'v' ]
var varNames = []
for(var i=0; i<dimension; ++i) {
constFuncArgs.push('x'+i)
varNames.push('x'+i)
}
constFuncArgs.push([
'if(x0.length===void 0){return gl.vertexAttrib', dimension, 'f(v,', varNames.join(), ')}else{return gl.vertexAttrib', dimension, 'fv(v,x0)}'
].join(''))
var constFunc = Function.apply(undefined, constFuncArgs)
var attr = new ShaderAttribute(gl, program, location, dimension, name, constFunc, doLink)
Object.defineProperty(obj, name, {
set: function(x) {
gl.disableVertexAttribArray(attr._location)
constFunc(gl, attr._location, x)
return x
}
, get: function() {
return attr
}
, enumerable: true
})
}
|
javascript
|
{
"resource": ""
}
|
q2269
|
createAttributeWrapper
|
train
|
function createAttributeWrapper(gl, program, attributes, doLink) {
var obj = {}
for(var i=0, n=attributes.length; i<n; ++i) {
var a = attributes[i]
var name = a.name
var type = a.type
var location = gl.getAttribLocation(program, name)
switch(type) {
case 'bool':
case 'int':
case 'float':
addVectorAttribute(gl, program, location, 1, obj, name, doLink)
break
default:
if(type.indexOf('vec') >= 0) {
var d = type.charCodeAt(type.length-1) - 48
if(d < 2 || d > 4) {
throw new Error('gl-shader: Invalid data type for attribute ' + name + ': ' + type)
}
addVectorAttribute(gl, program, location, d, obj, name, doLink)
} else {
throw new Error('gl-shader: Unknown data type for attribute ' + name + ': ' + type)
}
break
}
}
return obj
}
|
javascript
|
{
"resource": ""
}
|
q2270
|
render
|
train
|
function render(drizzleData) {
return Promise.all([
renderPages(drizzleData),
renderCollections(drizzleData)
]).then(
allData => {
return {
data: drizzleData.data,
pages: allData[0],
patterns: allData[1],
templates: drizzleData.templates,
options: drizzleData.options,
tree: drizzleData.tree
};
},
error => DrizzleError.error(error, drizzleData.options.debug)
);
}
|
javascript
|
{
"resource": ""
}
|
q2271
|
convertToZStream
|
train
|
function convertToZStream(stream, options) {
if(stream._isZStream) return stream;
if(isRequestStream(stream)) {
// Request Stream
return new RequestStream(stream, options);
}
if(isClassicStream(stream)) {
if(stream.readable && stream.writable) {
// Duplex
return new ClassicDuplex(stream, options);
} else if(stream.readable) {
// Readable
return new ClassicReadable(stream, options);
} else {
// Writable
return new ClassicWritable(stream, options);
}
}
var origFuncs = {};
for(var key in stream) {
origFuncs[key] = stream[key];
}
// Use duck typing in case of multiple stream implementations
extend(stream, streamMixins.prototype);
streamMixins.call(stream, origFuncs);
if(stream.read) {
extend(stream, readableMixins.prototype);
readableMixins.call(stream);
}
if(stream.write) {
extend(stream, writableMixins.prototype);
writableMixins.call(stream);
}
if(typeof process === 'object' && (stream === process.stdout || stream === process.stderr)) {
// Don't abort stdio streams on error
stream._zNoAbort = true;
}
return stream;
}
|
javascript
|
{
"resource": ""
}
|
q2272
|
isDirty
|
train
|
function isDirty(attribute) {
expect(arguments).to.have.length.below(
2,
'Invalid arguments length when checking if an Entity attribute is ' +
'dirty (it has to be passed less than 2 arguments)'
);
var attributes = this.Entity.attributes;
if (attribute) {
expect(attribute).to.be.a(
'string',
'Invalid argument "attribute" when checking if an Entity attribute ' +
'is dirty (it has to be a string)'
);
expect(attributes).to.have.ownProperty(
attribute,
'Invalid argument "attribute" when checking an Entity attribute ' +
'is dirty (this attribute does not exist in the Entity)'
);
var newAttributes = {};
newAttributes[attribute] = attributes[attribute];
attributes = newAttributes;
}
if (this.isNew) {
return true;
}
for (var attributeName in attributes) {
if (_cleanSet) {
if (_attributeIsSet[attributeName]) {
if (
!_attributeStorageValues.hasOwnProperty(attributeName) ||
_attributeStorageValues[attributeName] !== this[attributeName]
) {
return true;
}
}
} else {
if (
!_attributeStorageValues.hasOwnProperty(attributeName) ||
_attributeStorageValues[attributeName] !== this[attributeName]
) {
return true;
}
}
}
return false;
}
|
javascript
|
{
"resource": ""
}
|
q2273
|
clean
|
train
|
function clean(attribute) {
expect(arguments).to.have.length.below(
2,
'Invalid arguments length when cleaning an Entity attribute (it has ' +
'to be passed less than 2 arguments)'
);
var attributes = this.Entity.attributes;
if (attribute) {
expect(attribute).to.be.a(
'string',
'Invalid argument "attribute" when cleaning an Entity attribute (it ' +
'has to be a string)'
);
expect(attributes).to.have.ownProperty(
attribute,
'Invalid argument "attribute" when cleaning an Entity attribute ' +
'(this attribute does not exist in the Entity)'
);
var newAttributes = {};
newAttributes[attribute] = attributes[attribute];
attributes = newAttributes;
}
for (var attributeName in attributes) {
_attributeStorageValues[attributeName] = this[attributeName];
_attributeIsSet[attributeName] = false;
}
}
|
javascript
|
{
"resource": ""
}
|
q2274
|
_visitSpecializations
|
train
|
function _visitSpecializations(entities, visitedEntities) {
for (var entityName in entities) {
if (!visitedEntities.hasOwnProperty(entityName)) {
visitedEntities[entityName] = entities[entityName];
_visitSpecializations(
entities[entityName].directSpecializations,
visitedEntities
);
}
}
}
|
javascript
|
{
"resource": ""
}
|
q2275
|
train
|
function (CurrentEntity) {
return function (entity) {
expect(arguments).to.have.length(
1,
'Invalid arguments length when getting an Entity specialization (it ' +
'has to be passed 1 argument)'
);
expect(entity).to.be.a(
'string',
'Invalid argument when creating a new Entity function (it has to be ' +
'a string'
);
var entities = CurrentEntity.specializations;
try {
expect(entities).to.have.ownProperty(entity);
} catch (e) {
throw new errors.EntityNotFoundError(
entity,
e
);
}
return entities[entity];
};
}
|
javascript
|
{
"resource": ""
}
|
|
q2276
|
train
|
function (CurrentEntity) {
return function (entity) {
expect(arguments).to.have.length.below(
2,
'Invalid arguments length when creating a new Entity function (it has ' +
'to be passed less than 2 arguments)'
);
return function (attributeValues) {
expect(arguments).to.have.length.below(
2,
'Invalid arguments length when creating a new Entity (it has ' +
'not to be passed less than 2 arguments)'
);
var EntityClass = CurrentEntity;
if (entity) {
EntityClass = Entity.getSpecialization(entity);
}
return new EntityClass(attributeValues);
};
};
}
|
javascript
|
{
"resource": ""
}
|
|
q2277
|
train
|
function (CurrentEntity) {
return function (attributeValues) {
expect(arguments).to.have.length.below(
2,
'Invalid arguments length when creating a new "' +
CurrentEntity.specification.name +
'" instance (it has to be passed less than 2 arguments)');
return new Promise(function (resolve, reject) {
var newEntity = new CurrentEntity(attributeValues);
newEntity
.save({
forceCreate: true
})
.then(function () {
resolve(newEntity);
})
.catch(reject);
});
};
}
|
javascript
|
{
"resource": ""
}
|
|
q2278
|
_getFindFunction
|
train
|
function _getFindFunction(CurrentEntity) {
return function (query, params) {
expect(arguments).to.have.length.within(
1,
2,
'Invalid arguments length when finding an Entity ' +
'(it has to be passed 1 or 2 arguments)'
);
expect(query).to.be.an(
'object',
'Invalid argument when finding an Entity (it has to be an object)'
);
return Promise.try(function () {
var adapter = CurrentEntity.adapter;
return adapter.findObjects(CurrentEntity, query, params);
});
};
}
|
javascript
|
{
"resource": ""
}
|
q2279
|
isValid
|
train
|
function isValid(attribute) {
try {
this.validate(attribute);
} catch (e) {
if (e instanceof errors.ValidationError) {
return false;
} else {
throw e;
}
}
return true;
}
|
javascript
|
{
"resource": ""
}
|
q2280
|
relativePathArray
|
train
|
function relativePathArray(filePath, fromPath) {
filePath = path.normalize(filePath);
fromPath = path.normalize(fromPath);
if (filePath.indexOf(fromPath) === -1 || filePath === fromPath) {
// TODO Error handling: this should cause a warn
return [];
}
const keys = path.relative(fromPath, path.dirname(filePath));
if (keys && keys.length !== 0) {
return keys.split(path.sep);
}
return [];
}
|
javascript
|
{
"resource": ""
}
|
q2281
|
resourcePath
|
train
|
function resourcePath(resourceId, dest = '') {
const subPath = idKeys(resourceId);
// Remove first item because it is the "resource type"
// If there _is_ only one item in the ID, it will be left alone
// To serve as the filename.
if (subPath.length !== 0 && subPath.length > 1) {
subPath.shift();
}
const filename = subPath.pop() + '.html';
const outputPath = path.normalize(
path.join(dest, subPath.join(path.sep), filename)
);
return outputPath;
}
|
javascript
|
{
"resource": ""
}
|
q2282
|
ZPassThrough
|
train
|
function ZPassThrough(options) {
if(options) {
if(options.objectMode) {
options.readableObjectMode = true;
options.writableObjectMode = true;
}
if(options.readableObjectMode && options.writableObjectMode) {
options.objectMode = true;
}
}
PassThrough.call(this, options);
// note: exclamation marks are used to convert to booleans
if(options && !options.objectMode && (!options.readableObjectMode) !== (!options.writableObjectMode)) {
this._writableState.objectMode = !!options.writableObjectMode;
this._readableState.objectMode = !!options.readableObjectMode;
}
if(options && options.readableObjectMode) {
this._readableState.highWaterMark = 16;
}
if(options && options.writableObjectMode) {
this._writableState.highWaterMark = 16;
}
streamMixins.call(this, PassThrough.prototype, options);
readableMixins.call(this, options);
writableMixins.call(this, options);
}
|
javascript
|
{
"resource": ""
}
|
q2283
|
destRoot
|
train
|
function destRoot(type, drizzle) {
const options = drizzle.options;
// TODO: this is unfortunate, and due to difficulty using defaults.keys
const keys = new Map([
['page', 'pages'],
['collection', 'collections'],
['pattern', 'patterns']
]);
return relativePath(options.dest.root, options.dest[keys.get(type)]);
}
|
javascript
|
{
"resource": ""
}
|
q2284
|
isHidden
|
train
|
function isHidden(collection, pattern, patternKey) {
return (
(collection.hidden && collection.hidden.indexOf(patternKey) !== -1) ||
(pattern.data && pattern.data.hidden)
);
}
|
javascript
|
{
"resource": ""
}
|
q2285
|
isCollection
|
train
|
function isCollection(obj) {
if (isPattern(obj)) {
return false;
}
return Object.keys(obj).some(childKey => isPattern(obj[childKey]));
}
|
javascript
|
{
"resource": ""
}
|
q2286
|
buildPattern
|
train
|
function buildPattern(patternObj, options) {
const patternFile = { path: patternObj.path };
return Object.assign(patternObj, {
name:
(patternObj.data && patternObj.data.name) ||
titleCase(resourceKey(patternFile))
});
}
|
javascript
|
{
"resource": ""
}
|
q2287
|
buildPatterns
|
train
|
function buildPatterns(collectionObj, options) {
const patterns = {};
for (const childKey in collectionObj) {
if (isPattern(collectionObj[childKey])) {
patterns[childKey] = buildPattern(collectionObj[childKey], options);
delete collectionObj[childKey];
}
}
return patterns;
}
|
javascript
|
{
"resource": ""
}
|
q2288
|
buildCollection
|
train
|
function buildCollection(collectionObj, options) {
const items = buildPatterns(collectionObj, options);
const pseudoFile = { path: collectionPath(items) };
return readFiles(collectionGlob(items), options).then(collData => {
const collectionMeta = collData.length ? collData[0].contents : {};
collectionObj.collection = Object.assign(
{
name: titleCase(collectionKey(items)),
resourceType: options.keys.collections.singular,
id: resourceId(
pseudoFile,
options.src.patterns.basedir,
options.keys.collections.plural
)
},
collectionMeta
);
checkNamespaceCollision(
['items', 'patterns'],
collectionObj.collection,
`Collection ${collectionObj.collection.name}`,
options
);
collectionObj.collection.items = items;
collectionObj.collection.patterns = buildOrderedPatterns(
collectionObj.collection
);
return collectionObj;
});
}
|
javascript
|
{
"resource": ""
}
|
q2289
|
buildCollections
|
train
|
function buildCollections(patternObj, options, collectionPromises = []) {
if (isPattern(patternObj)) {
return collectionPromises;
}
if (isCollection(patternObj)) {
collectionPromises.push(buildCollection(patternObj, options));
}
for (const patternKey in patternObj) {
if (patternKey !== 'collection') {
buildCollections(patternObj[patternKey], options, collectionPromises);
}
}
return collectionPromises;
}
|
javascript
|
{
"resource": ""
}
|
q2290
|
parsePatterns
|
train
|
function parsePatterns(options) {
return readFileTree(
options.src.patterns,
options.keys.patterns,
options
).then(patternObj => {
return Promise.all(buildCollections(patternObj, options)).then(
() => patternObj,
error => DrizzleError.error(error, options.debug)
);
});
}
|
javascript
|
{
"resource": ""
}
|
q2291
|
getAdminDbUrl
|
train
|
function getAdminDbUrl(mongoConnectionString, formUser, poolSize) {
var parsedMongoUrl = mongoUrlParser.parse(mongoConnectionString);
parsedMongoUrl.username = formUser.user;
parsedMongoUrl.password = formUser.pass;
//according to this: https://docs.mongodb.com/v2.4/reference/user-privileges/#any-database-roles, this type of user should be created in the `admin` database.
parsedMongoUrl.database = "admin";
parsedMongoUrl.options = parsedMongoUrl.options || {};
parsedMongoUrl.options.poolSize = poolSize || MONGODB_DEFAULT_POOL_SIZE;
var mongourl = mongoUrlParser.format(parsedMongoUrl);
return mongourl;
}
|
javascript
|
{
"resource": ""
}
|
q2292
|
getMongodbConnection
|
train
|
function getMongodbConnection(mongoDbUrl, logger, cb) {
logger.debug("creating mongodb connection for data_source_update job", {mongoDbUrl: mongoDbUrl});
MongoClient.connect(mongoDbUrl, cb);
}
|
javascript
|
{
"resource": ""
}
|
q2293
|
getMongooseConnection
|
train
|
function getMongooseConnection(mongoDbUrl, logger, cb) {
logger.debug("creating mongoose connection for data_source_update job", {mongoDbUrl: mongoDbUrl});
var mongooseConnection = mongoose.createConnection(mongoDbUrl);
return cb(undefined, mongooseConnection);
}
|
javascript
|
{
"resource": ""
}
|
q2294
|
sortByProp
|
train
|
function sortByProp(prop, list) {
const get = R.is(Array, prop) ? R.path : R.prop;
return R.sort((elA, elB) => {
const a = get(prop, elA);
const b = get(prop, elB);
return sortObjects(a, b);
}, list);
}
|
javascript
|
{
"resource": ""
}
|
q2295
|
ClassicReadable
|
train
|
function ClassicReadable(stream, options) {
Readable.call(this, options);
classicMixins.call(this, stream, options);
// Readable streams already include a wrapping for Classic Streams
this.wrap(stream);
}
|
javascript
|
{
"resource": ""
}
|
q2296
|
list
|
train
|
function list(req, res, next) {
forms.getAllAppForms(req.connectionOptions, formsResultHandlers(constants.resultTypes.formProjects, req, next));
}
|
javascript
|
{
"resource": ""
}
|
q2297
|
update
|
train
|
function update(req, res, next) {
var params = {
appId: req.params.id || req.body._id,
forms: req.body.forms || []
};
forms.updateAppForms(req.connectionOptions, params, formsResultHandlers(constants.resultTypes.formProjects, req, next));
}
|
javascript
|
{
"resource": ""
}
|
q2298
|
updateTheme
|
train
|
function updateTheme(req, res, next) {
//No theme sent, no need update the project theme
if (!req.body.theme) {
return next();
}
var params = {
appId: req.params.id || req.body._id,
theme: req.body.theme
};
forms.setAppTheme(_.extend(req.connectionOptions, params), formsResultHandlers(constants.resultTypes.formProjects, req, next));
}
|
javascript
|
{
"resource": ""
}
|
q2299
|
getFullTheme
|
train
|
function getFullTheme(req, res, next) {
req.getFullTheme = true;
getTheme(req, res, next);
}
|
javascript
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.