language
stringclasses
6 values
original_string
stringlengths
25
887k
text
stringlengths
25
887k
JavaScript
addDoc(newDocument) { const name = newDocument.ref.path; assert(!this.docMap.has(name), 'Document to add already exists'); this.docTree = this.docTree.insert(newDocument, null); const newIndex = this.docTree.find(newDocument).index; this.docMap.set(name, newDocument); return new document_change_1.DocumentChange(ChangeType.added, newDocument, -1, newIndex); }
addDoc(newDocument) { const name = newDocument.ref.path; assert(!this.docMap.has(name), 'Document to add already exists'); this.docTree = this.docTree.insert(newDocument, null); const newIndex = this.docTree.find(newDocument).index; this.docMap.set(name, newDocument); return new document_change_1.DocumentChange(ChangeType.added, newDocument, -1, newIndex); }
JavaScript
modifyDoc(newDocument) { const name = newDocument.ref.path; assert(this.docMap.has(name), 'Document to modify does not exist'); const oldDocument = this.docMap.get(name); if (!oldDocument.updateTime.isEqual(newDocument.updateTime)) { const removeChange = this.deleteDoc(name); const addChange = this.addDoc(newDocument); return new document_change_1.DocumentChange(ChangeType.modified, newDocument, removeChange.oldIndex, addChange.newIndex); } return null; }
modifyDoc(newDocument) { const name = newDocument.ref.path; assert(this.docMap.has(name), 'Document to modify does not exist'); const oldDocument = this.docMap.get(name); if (!oldDocument.updateTime.isEqual(newDocument.updateTime)) { const removeChange = this.deleteDoc(name); const addChange = this.addDoc(newDocument); return new document_change_1.DocumentChange(ChangeType.modified, newDocument, removeChange.oldIndex, addChange.newIndex); } return null; }
JavaScript
computeSnapshot(readTime) { const changeSet = this.extractCurrentChanges(readTime); const appliedChanges = []; // Process the sorted changes in the order that is expected by our clients // (removals, additions, and then modifications). We also need to sort the // individual changes to assure that oldIndex/newIndex keep incrementing. changeSet.deletes.sort((name1, name2) => { // Deletes are sorted based on the order of the existing document. return this.getComparator()(this.docMap.get(name1), this.docMap.get(name2)); }); changeSet.deletes.forEach(name => { const change = this.deleteDoc(name); appliedChanges.push(change); }); changeSet.adds.sort(this.getComparator()); changeSet.adds.forEach(snapshot => { const change = this.addDoc(snapshot); appliedChanges.push(change); }); changeSet.updates.sort(this.getComparator()); changeSet.updates.forEach(snapshot => { const change = this.modifyDoc(snapshot); if (change) { appliedChanges.push(change); } }); assert(this.docTree.length === this.docMap.size, 'The update document ' + 'tree and document map should have the same number of entries.'); return appliedChanges; }
computeSnapshot(readTime) { const changeSet = this.extractCurrentChanges(readTime); const appliedChanges = []; // Process the sorted changes in the order that is expected by our clients // (removals, additions, and then modifications). We also need to sort the // individual changes to assure that oldIndex/newIndex keep incrementing. changeSet.deletes.sort((name1, name2) => { // Deletes are sorted based on the order of the existing document. return this.getComparator()(this.docMap.get(name1), this.docMap.get(name2)); }); changeSet.deletes.forEach(name => { const change = this.deleteDoc(name); appliedChanges.push(change); }); changeSet.adds.sort(this.getComparator()); changeSet.adds.forEach(snapshot => { const change = this.addDoc(snapshot); appliedChanges.push(change); }); changeSet.updates.sort(this.getComparator()); changeSet.updates.forEach(snapshot => { const change = this.modifyDoc(snapshot); if (change) { appliedChanges.push(change); } }); assert(this.docTree.length === this.docMap.size, 'The update document ' + 'tree and document map should have the same number of entries.'); return appliedChanges; }
JavaScript
isPermanentError(error) { if (error.code === undefined) { logger_1.logger('Watch.isPermanentError', this.requestTag, 'Unable to determine error code: ', error); return false; } switch (error.code) { case GRPC_STATUS_CODE.ABORTED: case GRPC_STATUS_CODE.CANCELLED: case GRPC_STATUS_CODE.UNKNOWN: case GRPC_STATUS_CODE.DEADLINE_EXCEEDED: case GRPC_STATUS_CODE.RESOURCE_EXHAUSTED: case GRPC_STATUS_CODE.INTERNAL: case GRPC_STATUS_CODE.UNAVAILABLE: case GRPC_STATUS_CODE.UNAUTHENTICATED: return false; default: return true; } }
isPermanentError(error) { if (error.code === undefined) { logger_1.logger('Watch.isPermanentError', this.requestTag, 'Unable to determine error code: ', error); return false; } switch (error.code) { case GRPC_STATUS_CODE.ABORTED: case GRPC_STATUS_CODE.CANCELLED: case GRPC_STATUS_CODE.UNKNOWN: case GRPC_STATUS_CODE.DEADLINE_EXCEEDED: case GRPC_STATUS_CODE.RESOURCE_EXHAUSTED: case GRPC_STATUS_CODE.INTERNAL: case GRPC_STATUS_CODE.UNAVAILABLE: case GRPC_STATUS_CODE.UNAUTHENTICATED: return false; default: return true; } }
JavaScript
create(documentRef, data) { reference_1.validateDocumentReference('documentRef', documentRef); validateDocumentData('data', data, /* allowDeletes= */ false); this.verifyNotCommitted(); const transform = document_1.DocumentTransform.fromObject(documentRef, data); transform.validate(); const precondition = new document_1.Precondition({ exists: false }); const op = () => { const document = document_1.DocumentSnapshot.fromObject(documentRef, data); const write = !document.isEmpty || transform.isEmpty ? document.toProto() : null; return { write, transform: transform.toProto(this._serializer), precondition: precondition.toProto(), }; }; this._ops.push(op); return this; }
create(documentRef, data) { reference_1.validateDocumentReference('documentRef', documentRef); validateDocumentData('data', data, /* allowDeletes= */ false); this.verifyNotCommitted(); const transform = document_1.DocumentTransform.fromObject(documentRef, data); transform.validate(); const precondition = new document_1.Precondition({ exists: false }); const op = () => { const document = document_1.DocumentSnapshot.fromObject(documentRef, data); const write = !document.isEmpty || transform.isEmpty ? document.toProto() : null; return { write, transform: transform.toProto(this._serializer), precondition: precondition.toProto(), }; }; this._ops.push(op); return this; }
JavaScript
update(documentRef, dataOrField, ...preconditionOrValues) { validate_1.validateMinNumberOfArguments('WriteBatch.update', arguments, 2); reference_1.validateDocumentReference('documentRef', documentRef); this.verifyNotCommitted(); const updateMap = new Map(); let precondition = new document_1.Precondition({ exists: true }); const argumentError = 'Update() requires either a single JavaScript ' + 'object or an alternating list of field/value pairs that can be ' + 'followed by an optional precondition.'; const usesVarargs = typeof dataOrField === 'string' || dataOrField instanceof path_1.FieldPath; if (usesVarargs) { try { for (let i = 1; i < arguments.length; i += 2) { if (i === arguments.length - 1) { validateUpdatePrecondition(i, arguments[i]); precondition = new document_1.Precondition(arguments[i]); } else { path_1.validateFieldPath(i, arguments[i]); // Unlike the `validateMinNumberOfArguments` invocation above, this // validation can be triggered both from `WriteBatch.update()` and // `DocumentReference.update()`. Hence, we don't use the fully // qualified API name in the error message. validate_1.validateMinNumberOfArguments('update', arguments, i + 1); const fieldPath = path_1.FieldPath.fromArgument(arguments[i]); validateFieldValue(i, arguments[i + 1], fieldPath); updateMap.set(fieldPath, arguments[i + 1]); } } } catch (err) { logger_1.logger('WriteBatch.update', null, 'Varargs validation failed:', err); // We catch the validation error here and re-throw to provide a better // error message. throw new Error(`${argumentError} ${err.message}`); } } else { try { validateUpdateMap('dataOrField', dataOrField); validate_1.validateMaxNumberOfArguments('update', arguments, 3); const data = dataOrField; Object.keys(data).forEach(key => { path_1.validateFieldPath(key, key); updateMap.set(path_1.FieldPath.fromArgument(key), data[key]); }); if (preconditionOrValues.length > 0) { validateUpdatePrecondition('preconditionOrValues', preconditionOrValues[0]); precondition = new document_1.Precondition(preconditionOrValues[0]); } } catch (err) { logger_1.logger('WriteBatch.update', null, 'Non-varargs validation failed:', err); // We catch the validation error here and prefix the error with a custom // message to describe the usage of update() better. throw new Error(`${argumentError} ${err.message}`); } } validateNoConflictingFields('dataOrField', updateMap); const transform = document_1.DocumentTransform.fromUpdateMap(documentRef, updateMap); transform.validate(); const documentMask = document_1.DocumentMask.fromUpdateMap(updateMap); const op = () => { const document = document_1.DocumentSnapshot.fromUpdateMap(documentRef, updateMap); let write = null; if (!document.isEmpty || !documentMask.isEmpty) { write = document.toProto(); write.updateMask = documentMask.toProto(); } return { write, transform: transform.toProto(this._serializer), precondition: precondition.toProto(), }; }; this._ops.push(op); return this; }
update(documentRef, dataOrField, ...preconditionOrValues) { validate_1.validateMinNumberOfArguments('WriteBatch.update', arguments, 2); reference_1.validateDocumentReference('documentRef', documentRef); this.verifyNotCommitted(); const updateMap = new Map(); let precondition = new document_1.Precondition({ exists: true }); const argumentError = 'Update() requires either a single JavaScript ' + 'object or an alternating list of field/value pairs that can be ' + 'followed by an optional precondition.'; const usesVarargs = typeof dataOrField === 'string' || dataOrField instanceof path_1.FieldPath; if (usesVarargs) { try { for (let i = 1; i < arguments.length; i += 2) { if (i === arguments.length - 1) { validateUpdatePrecondition(i, arguments[i]); precondition = new document_1.Precondition(arguments[i]); } else { path_1.validateFieldPath(i, arguments[i]); // Unlike the `validateMinNumberOfArguments` invocation above, this // validation can be triggered both from `WriteBatch.update()` and // `DocumentReference.update()`. Hence, we don't use the fully // qualified API name in the error message. validate_1.validateMinNumberOfArguments('update', arguments, i + 1); const fieldPath = path_1.FieldPath.fromArgument(arguments[i]); validateFieldValue(i, arguments[i + 1], fieldPath); updateMap.set(fieldPath, arguments[i + 1]); } } } catch (err) { logger_1.logger('WriteBatch.update', null, 'Varargs validation failed:', err); // We catch the validation error here and re-throw to provide a better // error message. throw new Error(`${argumentError} ${err.message}`); } } else { try { validateUpdateMap('dataOrField', dataOrField); validate_1.validateMaxNumberOfArguments('update', arguments, 3); const data = dataOrField; Object.keys(data).forEach(key => { path_1.validateFieldPath(key, key); updateMap.set(path_1.FieldPath.fromArgument(key), data[key]); }); if (preconditionOrValues.length > 0) { validateUpdatePrecondition('preconditionOrValues', preconditionOrValues[0]); precondition = new document_1.Precondition(preconditionOrValues[0]); } } catch (err) { logger_1.logger('WriteBatch.update', null, 'Non-varargs validation failed:', err); // We catch the validation error here and prefix the error with a custom // message to describe the usage of update() better. throw new Error(`${argumentError} ${err.message}`); } } validateNoConflictingFields('dataOrField', updateMap); const transform = document_1.DocumentTransform.fromUpdateMap(documentRef, updateMap); transform.validate(); const documentMask = document_1.DocumentMask.fromUpdateMap(updateMap); const op = () => { const document = document_1.DocumentSnapshot.fromUpdateMap(documentRef, updateMap); let write = null; if (!document.isEmpty || !documentMask.isEmpty) { write = document.toProto(); write.updateMask = documentMask.toProto(); } return { write, transform: transform.toProto(this._serializer), precondition: precondition.toProto(), }; }; this._ops.push(op); return this; }
JavaScript
async commit_(commitOptions) { // Note: We don't call `verifyNotCommitted()` to allow for retries. this._committed = true; const tag = (commitOptions && commitOptions.requestTag) || util_1.requestTag(); await this._firestore.initializeIfNeeded(tag); const database = this._firestore.formattedName; const request = { database }; // On GCF, we periodically force transactional commits to allow for // request retries in case GCF closes our backend connection. const explicitTransaction = commitOptions && commitOptions.transactionId; if (!explicitTransaction && this._shouldCreateTransaction()) { logger_1.logger('WriteBatch.commit', tag, 'Using transaction for commit'); return this._firestore .request('beginTransaction', request, tag, true) .then(resp => { return this.commit_({ transactionId: resp.transaction }); }); } const writes = this._ops.map(op => op()); request.writes = []; for (const req of writes) { assert(req.write || req.transform, 'Either a write or transform must be set'); if (req.precondition) { (req.write || req.transform).currentDocument = req.precondition; } if (req.write) { request.writes.push(req.write); } if (req.transform) { request.writes.push(req.transform); } } logger_1.logger('WriteBatch.commit', tag, 'Sending %d writes', request.writes.length); if (explicitTransaction) { request.transaction = explicitTransaction; } return this._firestore .request('commit', request, tag, /* allowRetries= */ false) .then(resp => { const writeResults = []; if (request.writes.length > 0) { assert(Array.isArray(resp.writeResults) && request.writes.length === resp.writeResults.length, `Expected one write result per operation, but got ${resp.writeResults.length} results for ${request.writes.length} operations.`); const commitTime = timestamp_1.Timestamp.fromProto(resp.commitTime); let offset = 0; for (let i = 0; i < writes.length; ++i) { const writeRequest = writes[i]; // Don't return two write results for a write that contains a // transform, as the fact that we have to split one write // operation into two distinct write requests is an implementation // detail. if (writeRequest.write && writeRequest.transform) { // The document transform is always sent last and produces the // latest update time. ++offset; } const writeResult = resp.writeResults[i + offset]; writeResults.push(new WriteResult(writeResult.updateTime ? timestamp_1.Timestamp.fromProto(writeResult.updateTime) : commitTime)); } } return writeResults; }); }
async commit_(commitOptions) { // Note: We don't call `verifyNotCommitted()` to allow for retries. this._committed = true; const tag = (commitOptions && commitOptions.requestTag) || util_1.requestTag(); await this._firestore.initializeIfNeeded(tag); const database = this._firestore.formattedName; const request = { database }; // On GCF, we periodically force transactional commits to allow for // request retries in case GCF closes our backend connection. const explicitTransaction = commitOptions && commitOptions.transactionId; if (!explicitTransaction && this._shouldCreateTransaction()) { logger_1.logger('WriteBatch.commit', tag, 'Using transaction for commit'); return this._firestore .request('beginTransaction', request, tag, true) .then(resp => { return this.commit_({ transactionId: resp.transaction }); }); } const writes = this._ops.map(op => op()); request.writes = []; for (const req of writes) { assert(req.write || req.transform, 'Either a write or transform must be set'); if (req.precondition) { (req.write || req.transform).currentDocument = req.precondition; } if (req.write) { request.writes.push(req.write); } if (req.transform) { request.writes.push(req.transform); } } logger_1.logger('WriteBatch.commit', tag, 'Sending %d writes', request.writes.length); if (explicitTransaction) { request.transaction = explicitTransaction; } return this._firestore .request('commit', request, tag, /* allowRetries= */ false) .then(resp => { const writeResults = []; if (request.writes.length > 0) { assert(Array.isArray(resp.writeResults) && request.writes.length === resp.writeResults.length, `Expected one write result per operation, but got ${resp.writeResults.length} results for ${request.writes.length} operations.`); const commitTime = timestamp_1.Timestamp.fromProto(resp.commitTime); let offset = 0; for (let i = 0; i < writes.length; ++i) { const writeRequest = writes[i]; // Don't return two write results for a write that contains a // transform, as the fact that we have to split one write // operation into two distinct write requests is an implementation // detail. if (writeRequest.write && writeRequest.transform) { // The document transform is always sent last and produces the // latest update time. ++offset; } const writeResult = resp.writeResults[i + offset]; writeResults.push(new WriteResult(writeResult.updateTime ? timestamp_1.Timestamp.fromProto(writeResult.updateTime) : commitTime)); } } return writeResults; }); }
JavaScript
function validateDocumentData(arg, obj, allowDeletes) { if (!serializer_1.isPlainObject(obj)) { throw new Error(validate_1.customObjectMessage(arg, obj)); } for (const prop of Object.keys(obj)) { serializer_1.validateUserInput(arg, obj[prop], 'Firestore document', { allowDeletes: allowDeletes ? 'all' : 'none', allowTransforms: true, }, new path_1.FieldPath(prop)); } }
function validateDocumentData(arg, obj, allowDeletes) { if (!serializer_1.isPlainObject(obj)) { throw new Error(validate_1.customObjectMessage(arg, obj)); } for (const prop of Object.keys(obj)) { serializer_1.validateUserInput(arg, obj[prop], 'Firestore document', { allowDeletes: allowDeletes ? 'all' : 'none', allowTransforms: true, }, new path_1.FieldPath(prop)); } }
JavaScript
function validateUpdateMap(arg, obj) { if (!serializer_1.isPlainObject(obj)) { throw new Error(validate_1.customObjectMessage(arg, obj)); } let isEmpty = true; if (obj) { for (const prop of Object.keys(obj)) { isEmpty = false; validateFieldValue(arg, obj[prop], new path_1.FieldPath(prop)); } } if (isEmpty) { throw new Error('At least one field must be updated.'); } }
function validateUpdateMap(arg, obj) { if (!serializer_1.isPlainObject(obj)) { throw new Error(validate_1.customObjectMessage(arg, obj)); } let isEmpty = true; if (obj) { for (const prop of Object.keys(obj)) { isEmpty = false; validateFieldValue(arg, obj[prop], new path_1.FieldPath(prop)); } } if (isEmpty) { throw new Error('At least one field must be updated.'); } }
JavaScript
runTransaction(updateFunction, transactionOptions) { validate_1.validateFunction('updateFunction', updateFunction); const defaultAttempts = 5; const tag = util_1.requestTag(); let attemptsRemaining; if (transactionOptions) { validate_1.validateObject('transactionOptions', transactionOptions); validate_1.validateInteger('transactionOptions.maxAttempts', transactionOptions.maxAttempts, { optional: true, minValue: 1 }); attemptsRemaining = transactionOptions.maxAttempts || defaultAttempts; } else { attemptsRemaining = defaultAttempts; } return this.initializeIfNeeded(tag).then(() => this._runTransaction(updateFunction, { requestTag: tag, attemptsRemaining })); }
runTransaction(updateFunction, transactionOptions) { validate_1.validateFunction('updateFunction', updateFunction); const defaultAttempts = 5; const tag = util_1.requestTag(); let attemptsRemaining; if (transactionOptions) { validate_1.validateObject('transactionOptions', transactionOptions); validate_1.validateInteger('transactionOptions.maxAttempts', transactionOptions.maxAttempts, { optional: true, minValue: 1 }); attemptsRemaining = transactionOptions.maxAttempts || defaultAttempts; } else { attemptsRemaining = defaultAttempts; } return this.initializeIfNeeded(tag).then(() => this._runTransaction(updateFunction, { requestTag: tag, attemptsRemaining })); }
JavaScript
async initializeIfNeeded(requestTag) { this._settingsFrozen = true; if (this._projectId === undefined) { this._projectId = await this._clientPool.run(requestTag, gapicClient => { return new Promise((resolve, reject) => { gapicClient.getProjectId((err, projectId) => { if (err) { logger_1.logger('Firestore._detectProjectId', null, 'Failed to detect project ID: %s', err); reject(err); } else { logger_1.logger('Firestore._detectProjectId', null, 'Detected project ID: %s', projectId); resolve(projectId); } }); }); }); } }
async initializeIfNeeded(requestTag) { this._settingsFrozen = true; if (this._projectId === undefined) { this._projectId = await this._clientPool.run(requestTag, gapicClient => { return new Promise((resolve, reject) => { gapicClient.getProjectId((err, projectId) => { if (err) { logger_1.logger('Firestore._detectProjectId', null, 'Failed to detect project ID: %s', err); reject(err); } else { logger_1.logger('Firestore._detectProjectId', null, 'Detected project ID: %s', projectId); resolve(projectId); } }); }); }); } }
JavaScript
createCallOptions() { return { otherArgs: { headers: Object.assign({ [CLOUD_RESOURCE_HEADER]: this.formattedName }, this._settings.customHeaders), }, }; }
createCallOptions() { return { otherArgs: { headers: Object.assign({ [CLOUD_RESOURCE_HEADER]: this.formattedName }, this._settings.customHeaders), }, }; }
JavaScript
_retry(attemptsRemaining, requestTag, func, delayMs = 0) { const self = this; const currentDelay = delayMs; const nextDelay = delayMs || 100; --attemptsRemaining; return new Promise(resolve => { setTimeout(resolve, currentDelay); }) .then(func) .then(result => { self._lastSuccessfulRequest = new Date().getTime(); return result; }) .catch(err => { if (err.code !== undefined && err.code !== GRPC_UNAVAILABLE) { logger_1.logger('Firestore._retry', requestTag, 'Request failed with unrecoverable error:', err); return Promise.reject(err); } if (attemptsRemaining === 0) { logger_1.logger('Firestore._retry', requestTag, 'Request failed with error:', err); return Promise.reject(err); } logger_1.logger('Firestore._retry', requestTag, 'Retrying request that failed with error:', err); return self._retry(attemptsRemaining, requestTag, func, nextDelay); }); }
_retry(attemptsRemaining, requestTag, func, delayMs = 0) { const self = this; const currentDelay = delayMs; const nextDelay = delayMs || 100; --attemptsRemaining; return new Promise(resolve => { setTimeout(resolve, currentDelay); }) .then(func) .then(result => { self._lastSuccessfulRequest = new Date().getTime(); return result; }) .catch(err => { if (err.code !== undefined && err.code !== GRPC_UNAVAILABLE) { logger_1.logger('Firestore._retry', requestTag, 'Request failed with unrecoverable error:', err); return Promise.reject(err); } if (attemptsRemaining === 0) { logger_1.logger('Firestore._retry', requestTag, 'Request failed with error:', err); return Promise.reject(err); } logger_1.logger('Firestore._retry', requestTag, 'Retrying request that failed with error:', err); return self._retry(attemptsRemaining, requestTag, func, nextDelay); }); }
JavaScript
request(methodName, request, requestTag, allowRetries) { const attempts = allowRetries ? MAX_REQUEST_RETRIES : 1; const callOptions = this.createCallOptions(); return this._clientPool.run(requestTag, gapicClient => { return this._retry(attempts, requestTag, () => { return new Promise((resolve, reject) => { logger_1.logger('Firestore.request', requestTag, 'Sending request: %j', request); gapicClient[methodName](request, callOptions, (err, result) => { if (err) { logger_1.logger('Firestore.request', requestTag, 'Received error:', err); reject(err); } else { logger_1.logger('Firestore.request', requestTag, 'Received response: %j', result); resolve(result); } }); }); }); }); }
request(methodName, request, requestTag, allowRetries) { const attempts = allowRetries ? MAX_REQUEST_RETRIES : 1; const callOptions = this.createCallOptions(); return this._clientPool.run(requestTag, gapicClient => { return this._retry(attempts, requestTag, () => { return new Promise((resolve, reject) => { logger_1.logger('Firestore.request', requestTag, 'Sending request: %j', request); gapicClient[methodName](request, callOptions, (err, result) => { if (err) { logger_1.logger('Firestore.request', requestTag, 'Received error:', err); reject(err); } else { logger_1.logger('Firestore.request', requestTag, 'Received response: %j', result); resolve(result); } }); }); }); }); }
JavaScript
readStream(methodName, request, requestTag, allowRetries) { const attempts = allowRetries ? MAX_REQUEST_RETRIES : 1; const callOptions = this.createCallOptions(); const result = new util_1.Deferred(); this._clientPool.run(requestTag, gapicClient => { // While we return the stream to the callee early, we don't want to // release the GAPIC client until the callee has finished processing the // stream. const lifetime = new util_1.Deferred(); this._retry(attempts, requestTag, async () => { logger_1.logger('Firestore.readStream', requestTag, 'Sending request: %j', request); const stream = gapicClient[methodName](request, callOptions); const logStream = through2.obj(function (chunk, enc, callback) { logger_1.logger('Firestore.readStream', requestTag, 'Received response: %j', chunk); this.push(chunk); callback(); }); const resultStream = bun([stream, logStream]); resultStream.on('close', lifetime.resolve); resultStream.on('end', lifetime.resolve); resultStream.on('error', lifetime.resolve); await this._initializeStream(resultStream, requestTag); result.resolve(resultStream); }).catch(err => { lifetime.resolve(); result.reject(err); }); return lifetime.promise; }); return result.promise; }
readStream(methodName, request, requestTag, allowRetries) { const attempts = allowRetries ? MAX_REQUEST_RETRIES : 1; const callOptions = this.createCallOptions(); const result = new util_1.Deferred(); this._clientPool.run(requestTag, gapicClient => { // While we return the stream to the callee early, we don't want to // release the GAPIC client until the callee has finished processing the // stream. const lifetime = new util_1.Deferred(); this._retry(attempts, requestTag, async () => { logger_1.logger('Firestore.readStream', requestTag, 'Sending request: %j', request); const stream = gapicClient[methodName](request, callOptions); const logStream = through2.obj(function (chunk, enc, callback) { logger_1.logger('Firestore.readStream', requestTag, 'Received response: %j', chunk); this.push(chunk); callback(); }); const resultStream = bun([stream, logStream]); resultStream.on('close', lifetime.resolve); resultStream.on('end', lifetime.resolve); resultStream.on('error', lifetime.resolve); await this._initializeStream(resultStream, requestTag); result.resolve(resultStream); }).catch(err => { lifetime.resolve(); result.reject(err); }); return lifetime.promise; }); return result.promise; }
JavaScript
readWriteStream(methodName, request, requestTag, allowRetries) { const attempts = allowRetries ? MAX_REQUEST_RETRIES : 1; const callOptions = this.createCallOptions(); const result = new util_1.Deferred(); this._clientPool.run(requestTag, gapicClient => { // While we return the stream to the callee early, we don't want to // release the GAPIC client until the callee has finished processing the // stream. const lifetime = new util_1.Deferred(); this._retry(attempts, requestTag, async () => { logger_1.logger('Firestore.readWriteStream', requestTag, 'Opening stream'); const requestStream = gapicClient[methodName](callOptions); const logStream = through2.obj(function (chunk, enc, callback) { logger_1.logger('Firestore.readWriteStream', requestTag, 'Received response: %j', chunk); this.push(chunk); callback(); }); const resultStream = bun([requestStream, logStream]); resultStream.on('close', lifetime.resolve); resultStream.on('finish', lifetime.resolve); resultStream.on('end', lifetime.resolve); resultStream.on('error', lifetime.resolve); await this._initializeStream(resultStream, requestTag, request); result.resolve(resultStream); }).catch(err => { lifetime.resolve(); result.reject(err); }); return lifetime.promise; }); return result.promise; }
readWriteStream(methodName, request, requestTag, allowRetries) { const attempts = allowRetries ? MAX_REQUEST_RETRIES : 1; const callOptions = this.createCallOptions(); const result = new util_1.Deferred(); this._clientPool.run(requestTag, gapicClient => { // While we return the stream to the callee early, we don't want to // release the GAPIC client until the callee has finished processing the // stream. const lifetime = new util_1.Deferred(); this._retry(attempts, requestTag, async () => { logger_1.logger('Firestore.readWriteStream', requestTag, 'Opening stream'); const requestStream = gapicClient[methodName](callOptions); const logStream = through2.obj(function (chunk, enc, callback) { logger_1.logger('Firestore.readWriteStream', requestTag, 'Received response: %j', chunk); this.push(chunk); callback(); }); const resultStream = bun([requestStream, logStream]); resultStream.on('close', lifetime.resolve); resultStream.on('finish', lifetime.resolve); resultStream.on('end', lifetime.resolve); resultStream.on('error', lifetime.resolve); await this._initializeStream(resultStream, requestTag, request); result.resolve(resultStream); }).catch(err => { lifetime.resolve(); result.reject(err); }); return lifetime.promise; }); return result.promise; }
JavaScript
function create(type, constructor, metadata) { const ctr = function scale() { var s = constructor(); if (!s.invertRange) { s.invertRange = s.invert ? invertRange(s) : s.invertExtent ? invertRangeExtent(s) : undefined; } s.type = type; return s; }; ctr.metadata = vegaUtil.toSet(vegaUtil.array(metadata)); return ctr; }
function create(type, constructor, metadata) { const ctr = function scale() { var s = constructor(); if (!s.invertRange) { s.invertRange = s.invert ? invertRange(s) : s.invertExtent ? invertRangeExtent(s) : undefined; } s.type = type; return s; }; ctr.metadata = vegaUtil.toSet(vegaUtil.array(metadata)); return ctr; }
JavaScript
function collect(cells) { var key, i, t, v; for (key in cells) { t = cells[key].tuple; for (i=0; i<n; ++i) { vals[i][(v = t[dims[i]])] = v; } } }
function collect(cells) { var key, i, t, v; for (key in cells) { t = cells[key].tuple; for (i=0; i<n; ++i) { vals[i][(v = t[dims[i]])] = v; } } }
JavaScript
function generate(base, tuple, index) { var name = dims[index], v = vals[index++], k, key; for (k in v) { tuple[name] = v[k]; key = base ? base + '|' + k : k; if (index < n) generate(key, tuple, index); else if (!curr[key]) aggr.cell(key, tuple); } }
function generate(base, tuple, index) { var name = dims[index], v = vals[index++], k, key; for (k in v) { tuple[name] = v[k]; key = base ? base + '|' + k : k; if (index < n) generate(key, tuple, index); else if (!curr[key]) aggr.cell(key, tuple); } }
JavaScript
function parse(def, data) { var func = def[FUNCTION]; if (!vegaUtil.hasOwnProperty(Distributions, func)) { vegaUtil.error('Unknown distribution function: ' + func); } var d = Distributions[func](); for (var name in def) { // if data field, extract values if (name === FIELD) { d.data((def.from || data()).map(def[name])); } // if distribution mixture, recurse to parse each definition else if (name === DISTRIBUTIONS) { d[name](def[name].map(function(_) { return parse(_, data); })); } // otherwise, simply set the parameter else if (typeof d[name] === FUNCTION) { d[name](def[name]); } } return d; }
function parse(def, data) { var func = def[FUNCTION]; if (!vegaUtil.hasOwnProperty(Distributions, func)) { vegaUtil.error('Unknown distribution function: ' + func); } var d = Distributions[func](); for (var name in def) { // if data field, extract values if (name === FIELD) { d.data((def.from || data()).map(def[name])); } // if distribution mixture, recurse to parse each definition else if (name === DISTRIBUTIONS) { d[name](def[name].map(function(_) { return parse(_, data); })); } // otherwise, simply set the parameter else if (typeof d[name] === FUNCTION) { d[name](def[name]); } } return d; }
JavaScript
function fieldNames(fields, as) { if (!fields) return null; return fields.map(function(f, i) { return as[i] || vegaUtil.accessorName(f); }); }
function fieldNames(fields, as) { if (!fields) return null; return fields.map(function(f, i) { return as[i] || vegaUtil.accessorName(f); }); }
JavaScript
function aggregateParams(_, pulse) { var key = _.field, value = _.value, op = (_.op === 'count' ? '__count__' : _.op) || 'sum', fields = vegaUtil.accessorFields(key).concat(vegaUtil.accessorFields(value)), keys = pivotKeys(key, _.limit || 0, pulse); // if data stream content changes, pivot fields may change // flag parameter modification to ensure re-initialization if (pulse.changed()) _.set('__pivot__', null, null, true); return { key: _.key, groupby: _.groupby, ops: keys.map(function() { return op; }), fields: keys.map(function(k) { return get(k, key, value, fields); }), as: keys.map(function(k) { return k + ''; }), modified: _.modified.bind(_) }; }
function aggregateParams(_, pulse) { var key = _.field, value = _.value, op = (_.op === 'count' ? '__count__' : _.op) || 'sum', fields = vegaUtil.accessorFields(key).concat(vegaUtil.accessorFields(value)), keys = pivotKeys(key, _.limit || 0, pulse); // if data stream content changes, pivot fields may change // flag parameter modification to ensure re-initialization if (pulse.changed()) _.set('__pivot__', null, null, true); return { key: _.key, groupby: _.groupby, ops: keys.map(function() { return op; }), fields: keys.map(function(k) { return get(k, key, value, fields); }), as: keys.map(function(k) { return k + ''; }), modified: _.modified.bind(_) }; }
JavaScript
function pivotKeys(key, limit, pulse) { var map = {}, list = []; pulse.visit(pulse.SOURCE, function(t) { var k = key(t); if (!map[k]) { map[k] = 1; list.push(k); } }); // TODO? Move this comparator to vega-util? list.sort(function(u, v) { return (u<v||u==null) && v!=null ? -1 : (u>v||v==null) && u!=null ? 1 : ((v=v instanceof Date?+v:v),(u=u instanceof Date?+u:u))!==u && v===v ? -1 : v!==v && u===u ? 1 : 0; }); return limit ? list.slice(0, limit) : list; }
function pivotKeys(key, limit, pulse) { var map = {}, list = []; pulse.visit(pulse.SOURCE, function(t) { var k = key(t); if (!map[k]) { map[k] = 1; list.push(k); } }); // TODO? Move this comparator to vega-util? list.sort(function(u, v) { return (u<v||u==null) && v!=null ? -1 : (u>v||v==null) && u!=null ? 1 : ((v=v instanceof Date?+v:v),(u=u instanceof Date?+u:u))!==u && v===v ? -1 : v!==v && u===u ? 1 : 0; }); return limit ? list.slice(0, limit) : list; }
JavaScript
function extent(array, f) { var i = 0, n, v, min, max; if (array && (n = array.length)) { if (f == null) { // find first valid value for (v = array[i]; i < n && (v == null || v !== v); v = array[++i]); min = max = v; // visit all other values for (; i<n; ++i) { v = array[i]; // skip null/undefined; NaN will fail all comparisons if (v != null) { if (v < min) min = v; if (v > max) max = v; } } } else { // find first valid value for (v = f(array[i]); i < n && (v == null || v !== v); v = f(array[++i])); min = max = v; // visit all other values for (; i<n; ++i) { v = f(array[i]); // skip null/undefined; NaN will fail all comparisons if (v != null) { if (v < min) min = v; if (v > max) max = v; } } } } return [min, max]; }
function extent(array, f) { var i = 0, n, v, min, max; if (array && (n = array.length)) { if (f == null) { // find first valid value for (v = array[i]; i < n && (v == null || v !== v); v = array[++i]); min = max = v; // visit all other values for (; i<n; ++i) { v = array[i]; // skip null/undefined; NaN will fail all comparisons if (v != null) { if (v < min) min = v; if (v > max) max = v; } } } else { // find first valid value for (v = f(array[i]); i < n && (v == null || v !== v); v = f(array[++i])); min = max = v; // visit all other values for (; i<n; ++i) { v = f(array[i]); // skip null/undefined; NaN will fail all comparisons if (v != null) { if (v < min) min = v; if (v > max) max = v; } } } } return [min, max]; }
JavaScript
function stableCompare(cmp, f) { return !cmp ? null : f ? (a, b) => cmp(a, b) || (tupleid(f(a)) - tupleid(f(b))) : (a, b) => cmp(a, b) || (tupleid(a) - tupleid(b)); }
function stableCompare(cmp, f) { return !cmp ? null : f ? (a, b) => cmp(a, b) || (tupleid(f(a)) - tupleid(f(b))) : (a, b) => cmp(a, b) || (tupleid(a) - tupleid(b)); }
JavaScript
function loaderFactory(fetch, fs) { return function(options) { return { options: options || {}, sanitize: sanitize, load: load, fileAccess: !!fs, file: fileLoader(fs), http: httpLoader(fetch) }; }; }
function loaderFactory(fetch, fs) { return function(options) { return { options: options || {}, sanitize: sanitize, load: load, fileAccess: !!fs, file: fileLoader(fs), http: httpLoader(fetch) }; }; }
JavaScript
async function load(uri, options) { const opt = await this.sanitize(uri, options), url = opt.href; return opt.localFile ? this.file(url) : this.http(url, options); }
async function load(uri, options) { const opt = await this.sanitize(uri, options), url = opt.href; return opt.localFile ? this.file(url) : this.http(url, options); }
JavaScript
async function sanitize(uri, options) { options = extend({}, this.options, options); const fileAccess = this.fileAccess, result = {href: null}; let isFile, loadFile, base; const isAllowed = allowed_re.test(uri.replace(whitespace_re, '')); if (uri == null || typeof uri !== 'string' || !isAllowed) { error('Sanitize failure, invalid URI: ' + $(uri)); } const hasProtocol = protocol_re.test(uri); // if relative url (no protocol/host), prepend baseURL if ((base = options.baseURL) && !hasProtocol) { // Ensure that there is a slash between the baseURL (e.g. hostname) and url if (!uri.startsWith('/') && base[base.length-1] !== '/') { uri = '/' + uri; } uri = base + uri; } // should we load from file system? loadFile = (isFile = uri.startsWith(fileProtocol)) || options.mode === 'file' || options.mode !== 'http' && !hasProtocol && fileAccess; if (isFile) { // strip file protocol uri = uri.slice(fileProtocol.length); } else if (uri.startsWith('//')) { if (options.defaultProtocol === 'file') { // if is file, strip protocol and set loadFile flag uri = uri.slice(2); loadFile = true; } else { // if relative protocol (starts with '//'), prepend default protocol uri = (options.defaultProtocol || 'http') + ':' + uri; } } // set non-enumerable mode flag to indicate local file load Object.defineProperty(result, 'localFile', {value: !!loadFile}); // set uri result.href = uri; // set default result target, if specified if (options.target) { result.target = options.target + ''; } // set default result rel, if specified (#1542) if (options.rel) { result.rel = options.rel + ''; } // return return result; }
async function sanitize(uri, options) { options = extend({}, this.options, options); const fileAccess = this.fileAccess, result = {href: null}; let isFile, loadFile, base; const isAllowed = allowed_re.test(uri.replace(whitespace_re, '')); if (uri == null || typeof uri !== 'string' || !isAllowed) { error('Sanitize failure, invalid URI: ' + $(uri)); } const hasProtocol = protocol_re.test(uri); // if relative url (no protocol/host), prepend baseURL if ((base = options.baseURL) && !hasProtocol) { // Ensure that there is a slash between the baseURL (e.g. hostname) and url if (!uri.startsWith('/') && base[base.length-1] !== '/') { uri = '/' + uri; } uri = base + uri; } // should we load from file system? loadFile = (isFile = uri.startsWith(fileProtocol)) || options.mode === 'file' || options.mode !== 'http' && !hasProtocol && fileAccess; if (isFile) { // strip file protocol uri = uri.slice(fileProtocol.length); } else if (uri.startsWith('//')) { if (options.defaultProtocol === 'file') { // if is file, strip protocol and set loadFile flag uri = uri.slice(2); loadFile = true; } else { // if relative protocol (starts with '//'), prepend default protocol uri = (options.defaultProtocol || 'http') + ':' + uri; } } // set non-enumerable mode flag to indicate local file load Object.defineProperty(result, 'localFile', {value: !!loadFile}); // set uri result.href = uri; // set default result target, if specified if (options.target) { result.target = options.target + ''; } // set default result rel, if specified (#1542) if (options.rel) { result.rel = options.rel + ''; } // return return result; }
JavaScript
async function request(url, format) { const df = this; let status = 0, data; try { data = await df.loader().load(url, { context: 'dataflow', response: responseType(format && format.type) }); try { data = parse$1(data, format); } catch (err) { status = -2; df.warn('Data ingestion failed', url, err); } } catch (err) { status = -1; df.warn('Loading failed', url, err); } return {data, status}; }
async function request(url, format) { const df = this; let status = 0, data; try { data = await df.loader().load(url, { context: 'dataflow', response: responseType(format && format.type) }); try { data = parse$1(data, format); } catch (err) { status = -2; df.warn('Data ingestion failed', url, err); } } catch (err) { status = -1; df.warn('Loading failed', url, err); } return {data, status}; }
JavaScript
async function evaluate(encode, prerun, postrun) { const df = this, level = df.logLevel(), async = []; // if the pulse value is set, this is a re-entrant call if (df._pulse) return reentrant(df); // wait for pending datasets to load if (df._pending) { await df._pending; } // invoke prerun function, if provided if (prerun) await asyncCallback(df, prerun); // exit early if there are no updates if (!df._touched.length) { df.info('Dataflow invoked, but nothing to do.'); return df; } // increment timestamp clock let stamp = ++df._clock, count = 0, op, next, dt, error; // set the current pulse df._pulse = new Pulse(df, stamp, encode); if (level >= Info) { dt = Date.now(); df.debug('-- START PROPAGATION (' + stamp + ') -----'); } // initialize priority queue, reset touched operators df._touched.forEach(op => df._enqueue(op, true)); df._touched = UniqueList(id); try { while (df._heap.size() > 0) { // dequeue operator with highest priority op = df._heap.pop(); // re-queue if rank changed if (op.rank !== op.qrank) { df._enqueue(op, true); continue; } // otherwise, evaluate the operator next = op.run(df._getPulse(op, encode)); if (next.then) { // await if operator returns a promise directly next = await next; } else if (next.async) { // queue parallel asynchronous execution async.push(next.async); next = StopPropagation; } if (level >= Debug) { df.debug(op.id, next === StopPropagation ? 'STOP' : next, op); } // propagate evaluation, enqueue dependent operators if (next !== StopPropagation) { if (op._targets) op._targets.forEach(op => df._enqueue(op)); } // increment visit counter ++count; } } catch (err) { df._heap.clear(); error = err; } // reset pulse map df._input = {}; df._pulse = null; if (level >= Info) { dt = Date.now() - dt; df.info('> Pulse ' + stamp + ': ' + count + ' operators; ' + dt + 'ms'); } if (error) { df._postrun = []; df.error(error); } // invoke callbacks queued via runAfter if (df._postrun.length) { const pr = df._postrun.sort((a, b) => b.priority - a.priority); df._postrun = []; for (let i=0; i<pr.length; ++i) { await asyncCallback(df, pr[i].callback); } } // invoke postrun function, if provided if (postrun) await asyncCallback(df, postrun); // handle non-blocking asynchronous callbacks if (async.length) { Promise.all(async).then(cb => df.runAsync(null, () => { cb.forEach(f => { try { f(df); } catch (err) { df.error(err); } }); })); } return df; }
async function evaluate(encode, prerun, postrun) { const df = this, level = df.logLevel(), async = []; // if the pulse value is set, this is a re-entrant call if (df._pulse) return reentrant(df); // wait for pending datasets to load if (df._pending) { await df._pending; } // invoke prerun function, if provided if (prerun) await asyncCallback(df, prerun); // exit early if there are no updates if (!df._touched.length) { df.info('Dataflow invoked, but nothing to do.'); return df; } // increment timestamp clock let stamp = ++df._clock, count = 0, op, next, dt, error; // set the current pulse df._pulse = new Pulse(df, stamp, encode); if (level >= Info) { dt = Date.now(); df.debug('-- START PROPAGATION (' + stamp + ') -----'); } // initialize priority queue, reset touched operators df._touched.forEach(op => df._enqueue(op, true)); df._touched = UniqueList(id); try { while (df._heap.size() > 0) { // dequeue operator with highest priority op = df._heap.pop(); // re-queue if rank changed if (op.rank !== op.qrank) { df._enqueue(op, true); continue; } // otherwise, evaluate the operator next = op.run(df._getPulse(op, encode)); if (next.then) { // await if operator returns a promise directly next = await next; } else if (next.async) { // queue parallel asynchronous execution async.push(next.async); next = StopPropagation; } if (level >= Debug) { df.debug(op.id, next === StopPropagation ? 'STOP' : next, op); } // propagate evaluation, enqueue dependent operators if (next !== StopPropagation) { if (op._targets) op._targets.forEach(op => df._enqueue(op)); } // increment visit counter ++count; } } catch (err) { df._heap.clear(); error = err; } // reset pulse map df._input = {}; df._pulse = null; if (level >= Info) { dt = Date.now() - dt; df.info('> Pulse ' + stamp + ': ' + count + ' operators; ' + dt + 'ms'); } if (error) { df._postrun = []; df.error(error); } // invoke callbacks queued via runAfter if (df._postrun.length) { const pr = df._postrun.sort((a, b) => b.priority - a.priority); df._postrun = []; for (let i=0; i<pr.length; ++i) { await asyncCallback(df, pr[i].callback); } } // invoke postrun function, if provided if (postrun) await asyncCallback(df, postrun); // handle non-blocking asynchronous callbacks if (async.length) { Promise.all(async).then(cb => df.runAsync(null, () => { cb.forEach(f => { try { f(df); } catch (err) { df.error(err); } }); })); } return df; }
JavaScript
async function runAsync(encode, prerun, postrun) { // await previously queued functions while (this._running) await this._running; // run dataflow, manage running promise const clear = () => this._running = null; (this._running = this.evaluate(encode, prerun, postrun)) .then(clear, clear); return this._running; }
async function runAsync(encode, prerun, postrun) { // await previously queued functions while (this._running) await this._running; // run dataflow, manage running promise const clear = () => this._running = null; (this._running = this.evaluate(encode, prerun, postrun)) .then(clear, clear); return this._running; }
JavaScript
function runAfter(callback, enqueue, priority) { if (this._pulse || enqueue) { // pulse propagation is currently running, queue to run after this._postrun.push({ priority: priority || 0, callback: callback }); } else { // pulse propagation already complete, invoke immediately try { callback(this); } catch (err) { this.error(err); } } }
function runAfter(callback, enqueue, priority) { if (this._pulse || enqueue) { // pulse propagation is currently running, queue to run after this._postrun.push({ priority: priority || 0, callback: callback }); } else { // pulse propagation already complete, invoke immediately try { callback(this); } catch (err) { this.error(err); } } }
JavaScript
function enqueue(op, force) { var q = op.stamp < this._clock; if (q) op.stamp = this._clock; if (q || force) { op.qrank = op.rank; this._heap.push(op); } }
function enqueue(op, force) { var q = op.stamp < this._clock; if (q) op.stamp = this._clock; if (q || force) { op.qrank = op.rank; this._heap.push(op); } }
JavaScript
function pulse(op, changeset, options) { this.touch(op, options || NO_OPT); var p = new Pulse(this, this._clock + (this._pulse ? 0 : 1)), t = op.pulse && op.pulse.source || []; p.target = op; this._input[op.id] = changeset.pulse(p, t); return this; }
function pulse(op, changeset, options) { this.touch(op, options || NO_OPT); var p = new Pulse(this, this._clock + (this._pulse ? 0 : 1)), t = op.pulse && op.pulse.source || []; p.target = op; this._input[op.id] = changeset.pulse(p, t); return this; }
JavaScript
function Dataflow() { this.logger(logger()); this.logLevel(Error$1); this._clock = 0; this._rank = 0; try { this._loader = loader(); } catch (e) { // do nothing if loader module is unavailable } this._touched = UniqueList(id); this._input = {}; this._pulse = null; this._heap = Heap((a, b) => a.qrank - b.qrank); this._postrun = []; }
function Dataflow() { this.logger(logger()); this.logLevel(Error$1); this._clock = 0; this._rank = 0; try { this._loader = loader(); } catch (e) { // do nothing if loader module is unavailable } this._touched = UniqueList(id); this._input = {}; this._pulse = null; this._heap = Heap((a, b) => a.qrank - b.qrank); this._postrun = []; }
JavaScript
function quickselect(array, k, left = 0, right = array.length - 1, compare = ascending) { while (right > left) { if (right - left > 600) { const n = right - left + 1; const m = k - left + 1; const z = Math.log(n); const s = 0.5 * Math.exp(2 * z / 3); const sd = 0.5 * Math.sqrt(z * s * (n - s) / n) * (m - n / 2 < 0 ? -1 : 1); const newLeft = Math.max(left, Math.floor(k - m * s / n + sd)); const newRight = Math.min(right, Math.floor(k + (n - m) * s / n + sd)); quickselect(array, k, newLeft, newRight, compare); } const t = array[k]; let i = left; let j = right; swap(array, left, k); if (compare(array[right], t) > 0) swap(array, left, right); while (i < j) { swap(array, i, j), ++i, --j; while (compare(array[i], t) < 0) ++i; while (compare(array[j], t) > 0) --j; } if (compare(array[left], t) === 0) swap(array, left, j); else ++j, swap(array, j, right); if (j <= k) left = j + 1; if (k <= j) right = j - 1; } return array; }
function quickselect(array, k, left = 0, right = array.length - 1, compare = ascending) { while (right > left) { if (right - left > 600) { const n = right - left + 1; const m = k - left + 1; const z = Math.log(n); const s = 0.5 * Math.exp(2 * z / 3); const sd = 0.5 * Math.sqrt(z * s * (n - s) / n) * (m - n / 2 < 0 ? -1 : 1); const newLeft = Math.max(left, Math.floor(k - m * s / n + sd)); const newRight = Math.min(right, Math.floor(k + (n - m) * s / n + sd)); quickselect(array, k, newLeft, newRight, compare); } const t = array[k]; let i = left; let j = right; swap(array, left, k); if (compare(array[right], t) > 0) swap(array, left, right); while (i < j) { swap(array, i, j), ++i, --j; while (compare(array[i], t) < 0) ++i; while (compare(array[j], t) > 0) --j; } if (compare(array[left], t) === 0) swap(array, left, j); else ++j, swap(array, j, right); if (j <= k) left = j + 1; if (k <= j) right = j - 1; } return array; }
JavaScript
function parse$2(def, data) { var func = def[FUNCTION]; if (!hasOwnProperty(Distributions, func)) { error('Unknown distribution function: ' + func); } var d = Distributions[func](); for (var name in def) { // if data field, extract values if (name === FIELD) { d.data((def.from || data()).map(def[name])); } // if distribution mixture, recurse to parse each definition else if (name === DISTRIBUTIONS) { d[name](def[name].map(function(_) { return parse$2(_, data); })); } // otherwise, simply set the parameter else if (typeof d[name] === FUNCTION) { d[name](def[name]); } } return d; }
function parse$2(def, data) { var func = def[FUNCTION]; if (!hasOwnProperty(Distributions, func)) { error('Unknown distribution function: ' + func); } var d = Distributions[func](); for (var name in def) { // if data field, extract values if (name === FIELD) { d.data((def.from || data()).map(def[name])); } // if distribution mixture, recurse to parse each definition else if (name === DISTRIBUTIONS) { d[name](def[name].map(function(_) { return parse$2(_, data); })); } // otherwise, simply set the parameter else if (typeof d[name] === FUNCTION) { d[name](def[name]); } } return d; }
JavaScript
function aggregateParams(_, pulse) { var key = _.field, value = _.value, op = (_.op === 'count' ? '__count__' : _.op) || 'sum', fields = accessorFields(key).concat(accessorFields(value)), keys = pivotKeys(key, _.limit || 0, pulse); // if data stream content changes, pivot fields may change // flag parameter modification to ensure re-initialization if (pulse.changed()) _.set('__pivot__', null, null, true); return { key: _.key, groupby: _.groupby, ops: keys.map(function() { return op; }), fields: keys.map(function(k) { return get(k, key, value, fields); }), as: keys.map(function(k) { return k + ''; }), modified: _.modified.bind(_) }; }
function aggregateParams(_, pulse) { var key = _.field, value = _.value, op = (_.op === 'count' ? '__count__' : _.op) || 'sum', fields = accessorFields(key).concat(accessorFields(value)), keys = pivotKeys(key, _.limit || 0, pulse); // if data stream content changes, pivot fields may change // flag parameter modification to ensure re-initialization if (pulse.changed()) _.set('__pivot__', null, null, true); return { key: _.key, groupby: _.groupby, ops: keys.map(function() { return op; }), fields: keys.map(function(k) { return get(k, key, value, fields); }), as: keys.map(function(k) { return k + ''; }), modified: _.modified.bind(_) }; }
JavaScript
function bind(item, el, sibling, tag, svg) { var node = item._svg, doc; // create a new dom node if needed if (!node) { doc = el.ownerDocument; node = domCreate(doc, tag, ns); item._svg = node; if (item.mark) { node.__data__ = item; node.__values__ = {fill: 'default'}; // if group, create background, content, and foreground elements if (tag === 'g') { var bg = domCreate(doc, 'path', ns); node.appendChild(bg); bg.__data__ = item; var cg = domCreate(doc, 'g', ns); node.appendChild(cg); cg.__data__ = item; var fg = domCreate(doc, 'path', ns); node.appendChild(fg); fg.__data__ = item; fg.__values__ = {fill: 'default'}; } } } // (re-)insert if (a) not contained in SVG or (b) sibling order has changed if (node.ownerSVGElement !== svg || siblingCheck(node, sibling)) { el.insertBefore(node, sibling ? sibling.nextSibling : el.firstChild); } return node; }
function bind(item, el, sibling, tag, svg) { var node = item._svg, doc; // create a new dom node if needed if (!node) { doc = el.ownerDocument; node = domCreate(doc, tag, ns); item._svg = node; if (item.mark) { node.__data__ = item; node.__values__ = {fill: 'default'}; // if group, create background, content, and foreground elements if (tag === 'g') { var bg = domCreate(doc, 'path', ns); node.appendChild(bg); bg.__data__ = item; var cg = domCreate(doc, 'g', ns); node.appendChild(cg); cg.__data__ = item; var fg = domCreate(doc, 'path', ns); node.appendChild(fg); fg.__data__ = item; fg.__values__ = {fill: 'default'}; } } } // (re-)insert if (a) not contained in SVG or (b) sibling order has changed if (node.ownerSVGElement !== svg || siblingCheck(node, sibling)) { el.insertBefore(node, sibling ? sibling.nextSibling : el.firstChild); } return node; }
JavaScript
function lookup$2(config, orient) { const opt = config[orient] || {}; return (key, d) => opt[key] != null ? opt[key] : config[key] != null ? config[key] : d; }
function lookup$2(config, orient) { const opt = config[orient] || {}; return (key, d) => opt[key] != null ? opt[key] : config[key] != null ? config[key] : d; }
JavaScript
function offsets(legends, value) { var max = -Infinity; legends.forEach(item => { if (item.offset != null) max = Math.max(max, item.offset); }); return max > -Infinity ? max : value; }
function offsets(legends, value) { var max = -Infinity; legends.forEach(item => { if (item.offset != null) max = Math.max(max, item.offset); }); return max > -Infinity ? max : value; }
JavaScript
function create(type, constructor, metadata) { const ctr = function scale() { var s = constructor(); if (!s.invertRange) { s.invertRange = s.invert ? invertRange(s) : s.invertExtent ? invertRangeExtent(s) : undefined; } s.type = type; return s; }; ctr.metadata = toSet(array(metadata)); return ctr; }
function create(type, constructor, metadata) { const ctr = function scale() { var s = constructor(); if (!s.invertRange) { s.invertRange = s.invert ? invertRange(s) : s.invertExtent ? invertRangeExtent(s) : undefined; } s.type = type; return s; }; ctr.metadata = toSet(array(metadata)); return ctr; }
JavaScript
function tickFormat(scale, count, specifier, formatType, noSkip) { var type = scale.type, format = (type === Time || formatType === Time) ? timeFormat(specifier) : (type === UTC || formatType === UTC) ? utcFormat(specifier) : scale.tickFormat ? scale.tickFormat(count, specifier) : specifier ? d3Format.format(specifier) : String; if (isLogarithmic(type)) { var logfmt = variablePrecision(specifier); format = noSkip || scale.bins ? logfmt : filter$1(format, logfmt); } return format; }
function tickFormat(scale, count, specifier, formatType, noSkip) { var type = scale.type, format = (type === Time || formatType === Time) ? timeFormat(specifier) : (type === UTC || formatType === UTC) ? utcFormat(specifier) : scale.tickFormat ? scale.tickFormat(count, specifier) : specifier ? d3Format.format(specifier) : String; if (isLogarithmic(type)) { var logfmt = variablePrecision(specifier); format = noSkip || scale.bins ? logfmt : filter$1(format, logfmt); } return format; }
JavaScript
function density2D() { var x = d => d[0], y = d => d[1], weight = one, bandwidth = [-1, -1], dx = 960, dy = 500, k = 2; // log2(cellSize) function density(data, counts) { const rx = radius(bandwidth[0], data, x) >> k, // blur x-radius ry = radius(bandwidth[1], data, y) >> k, // blur y-radius ox = rx ? rx + 2 : 0, // x-offset padding for blur oy = ry ? ry + 2 : 0, // y-offset padding for blur n = 2 * ox + (dx >> k), // grid width m = 2 * oy + (dy >> k), // grid height values0 = new Float32Array(n * m), values1 = new Float32Array(n * m); let values = values0; data.forEach(d => { const xi = ox + (+x(d) >> k), yi = oy + (+y(d) >> k); if (xi >= 0 && xi < n && yi >= 0 && yi < m) { values0[xi + yi * n] += +weight(d); } }); if (rx > 0 && ry > 0) { blurX(n, m, values0, values1, rx); blurY(n, m, values1, values0, ry); blurX(n, m, values0, values1, rx); blurY(n, m, values1, values0, ry); blurX(n, m, values0, values1, rx); blurY(n, m, values1, values0, ry); } else if (rx > 0) { blurX(n, m, values0, values1, rx); blurX(n, m, values1, values0, rx); blurX(n, m, values0, values1, rx); values = values1; } else if (ry > 0) { blurY(n, m, values0, values1, ry); blurY(n, m, values1, values0, ry); blurY(n, m, values0, values1, ry); values = values1; } // scale density estimates // density in points per square pixel or probability density let s = counts ? Math.pow(2, -2 * k) : 1 / sum(values); for (let i=0, sz=n*m; i<sz; ++i) values[i] *= s; return { values: values, scale: 1 << k, width: n, height: m, x1: ox, y1: oy, x2: ox + (dx >> k), y2: oy + (dy >> k) }; } density.x = function(_) { return arguments.length ? (x = number$4(_), density) : x; }; density.y = function(_) { return arguments.length ? (y = number$4(_), density) : y; }; density.weight = function(_) { return arguments.length ? (weight = number$4(_), density) : weight; }; density.size = function(_) { if (!arguments.length) return [dx, dy]; var _0 = Math.ceil(_[0]), _1 = Math.ceil(_[1]); if (!(_0 >= 0) && !(_0 >= 0)) error('invalid size'); return dx = _0, dy = _1, density; }; density.cellSize = function(_) { if (!arguments.length) return 1 << k; if (!((_ = +_) >= 1)) error('invalid cell size'); k = Math.floor(Math.log(_) / Math.LN2); return density; }; density.bandwidth = function(_) { if (!arguments.length) return bandwidth; _ = array(_); if (_.length === 1) _ = [+_[0], +_[0]]; if (_.length !== 2) error('invalid bandwidth'); return bandwidth = _, density; }; return density; }
function density2D() { var x = d => d[0], y = d => d[1], weight = one, bandwidth = [-1, -1], dx = 960, dy = 500, k = 2; // log2(cellSize) function density(data, counts) { const rx = radius(bandwidth[0], data, x) >> k, // blur x-radius ry = radius(bandwidth[1], data, y) >> k, // blur y-radius ox = rx ? rx + 2 : 0, // x-offset padding for blur oy = ry ? ry + 2 : 0, // y-offset padding for blur n = 2 * ox + (dx >> k), // grid width m = 2 * oy + (dy >> k), // grid height values0 = new Float32Array(n * m), values1 = new Float32Array(n * m); let values = values0; data.forEach(d => { const xi = ox + (+x(d) >> k), yi = oy + (+y(d) >> k); if (xi >= 0 && xi < n && yi >= 0 && yi < m) { values0[xi + yi * n] += +weight(d); } }); if (rx > 0 && ry > 0) { blurX(n, m, values0, values1, rx); blurY(n, m, values1, values0, ry); blurX(n, m, values0, values1, rx); blurY(n, m, values1, values0, ry); blurX(n, m, values0, values1, rx); blurY(n, m, values1, values0, ry); } else if (rx > 0) { blurX(n, m, values0, values1, rx); blurX(n, m, values1, values0, rx); blurX(n, m, values0, values1, rx); values = values1; } else if (ry > 0) { blurY(n, m, values0, values1, ry); blurY(n, m, values1, values0, ry); blurY(n, m, values0, values1, ry); values = values1; } // scale density estimates // density in points per square pixel or probability density let s = counts ? Math.pow(2, -2 * k) : 1 / sum(values); for (let i=0, sz=n*m; i<sz; ++i) values[i] *= s; return { values: values, scale: 1 << k, width: n, height: m, x1: ox, y1: oy, x2: ox + (dx >> k), y2: oy + (dy >> k) }; } density.x = function(_) { return arguments.length ? (x = number$4(_), density) : x; }; density.y = function(_) { return arguments.length ? (y = number$4(_), density) : y; }; density.weight = function(_) { return arguments.length ? (weight = number$4(_), density) : weight; }; density.size = function(_) { if (!arguments.length) return [dx, dy]; var _0 = Math.ceil(_[0]), _1 = Math.ceil(_[1]); if (!(_0 >= 0) && !(_0 >= 0)) error('invalid size'); return dx = _0, dy = _1, density; }; density.cellSize = function(_) { if (!arguments.length) return 1 << k; if (!((_ = +_) >= 1)) error('invalid cell size'); k = Math.floor(Math.log(_) / Math.LN2); return density; }; density.bandwidth = function(_) { if (!arguments.length) return bandwidth; _ = array(_); if (_.length === 1) _ = [+_[0], +_[0]]; if (_.length !== 2) error('invalid bandwidth'); return bandwidth = _, density; }; return density; }
JavaScript
function create$1(type, constructor) { return function projection() { var p = constructor(); p.type = type; p.path = d3Geo.geoPath().projection(p); p.copy = p.copy || function() { var c = projection(); projectionProperties.forEach(function(prop) { if (p[prop]) c[prop](p[prop]()); }); c.path.pointRadius(p.path.pointRadius()); return c; }; return p; }; }
function create$1(type, constructor) { return function projection() { var p = constructor(); p.type = type; p.path = d3Geo.geoPath().projection(p); p.copy = p.copy || function() { var c = projection(); projectionProperties.forEach(function(prop) { if (p[prop]) c[prop](p[prop]()); }); c.path.pointRadius(p.path.pointRadius()); return c; }; return p; }; }
JavaScript
function dependency(f) { if (!isFunction(f)) return false; const set = toSet(accessorFields(f)); return set.$x || set.$y || set.$value || set.$max; }
function dependency(f) { if (!isFunction(f)) return false; const set = toSet(accessorFields(f)); return set.$x || set.$y || set.$value || set.$max; }
JavaScript
function lookup$3(tree, key, filter) { var map = {}; tree.each(function(node) { var t = node.data; if (filter(t)) map[key(t)] = node; }); tree.lookup = map; return tree; }
function lookup$3(tree, key, filter) { var map = {}; tree.each(function(node) { var t = node.data; if (filter(t)) map[key(t)] = node; }); tree.lookup = map; return tree; }
JavaScript
_addTriangle(i0, i1, i2, a, b, c) { const t = this.trianglesLen; this._triangles[t] = i0; this._triangles[t + 1] = i1; this._triangles[t + 2] = i2; this._link(t, a); this._link(t + 1, b); this._link(t + 2, c); this.trianglesLen += 3; return t; }
_addTriangle(i0, i1, i2, a, b, c) { const t = this.trianglesLen; this._triangles[t] = i0; this._triangles[t + 1] = i1; this._triangles[t + 2] = i2; this._link(t, a); this._link(t + 1, b); this._link(t + 2, c); this.trianglesLen += 3; return t; }
JavaScript
function orientIfSure(px, py, rx, ry, qx, qy) { const l = (ry - py) * (qx - px); const r = (rx - px) * (qy - py); return Math.abs(l - r) >= 3.3306690738754716e-16 * Math.abs(l + r) ? l - r : 0; }
function orientIfSure(px, py, rx, ry, qx, qy) { const l = (ry - py) * (qx - px); const r = (rx - px) * (qy - py); return Math.abs(l - r) >= 3.3306690738754716e-16 * Math.abs(l + r) ? l - r : 0; }
JavaScript
function orient(rx, ry, qx, qy, px, py) { const sign = orientIfSure(px, py, rx, ry, qx, qy) || orientIfSure(rx, ry, qx, qy, px, py) || orientIfSure(qx, qy, px, py, rx, ry); return sign < 0; }
function orient(rx, ry, qx, qy, px, py) { const sign = orientIfSure(px, py, rx, ry, qx, qy) || orientIfSure(rx, ry, qx, qy, px, py) || orientIfSure(qx, qy, px, py, rx, ry); return sign < 0; }
JavaScript
function collinear$1(d) { const {triangles, coords} = d; for (let i = 0; i < triangles.length; i += 3) { const a = 2 * triangles[i], b = 2 * triangles[i + 1], c = 2 * triangles[i + 2], cross = (coords[c] - coords[a]) * (coords[b + 1] - coords[a + 1]) - (coords[b] - coords[a]) * (coords[c + 1] - coords[a + 1]); if (cross > 1e-10) return false; } return true; }
function collinear$1(d) { const {triangles, coords} = d; for (let i = 0; i < triangles.length; i += 3) { const a = 2 * triangles[i], b = 2 * triangles[i + 1], c = 2 * triangles[i + 2], cross = (coords[c] - coords[a]) * (coords[b + 1] - coords[a + 1]) - (coords[b] - coords[a]) * (coords[c + 1] - coords[a + 1]); if (cross > 1e-10) return false; } return true; }
JavaScript
function SortedIndex() { var index = array32(0), value = [], size = 0; function insert(key, data, base) { if (!data.length) return []; var n0 = size, n1 = data.length, addv = Array(n1), addi = array32(n1), oldv, oldi, i; for (i=0; i<n1; ++i) { addv[i] = key(data[i]); addi[i] = i; } addv = sort(addv, addi); if (n0) { oldv = value; oldi = index; value = Array(n0 + n1); index = array32(n0 + n1); merge$1(base, oldv, oldi, n0, addv, addi, n1, value, index); } else { if (base > 0) for (i=0; i<n1; ++i) { addi[i] += base; } value = addv; index = addi; } size = n0 + n1; return {index: addi, value: addv}; } function remove(num, map) { // map: index -> remove var n = size, idx, i, j; // seek forward to first removal for (i=0; !map[index[i]] && i<n; ++i); // condense index and value arrays for (j=i; i<n; ++i) { if (!map[idx=index[i]]) { index[j] = idx; value[j] = value[i]; ++j; } } size = n - num; } function reindex(map) { for (var i=0, n=size; i<n; ++i) { index[i] = map[index[i]]; } } function bisect(range, array) { var n; if (array) { n = array.length; } else { array = value; n = size; } return [ bisectLeft(array, range[0], 0, n), bisectRight(array, range[1], 0, n) ]; } return { insert: insert, remove: remove, bisect: bisect, reindex: reindex, index: function() { return index; }, size: function() { return size; } }; }
function SortedIndex() { var index = array32(0), value = [], size = 0; function insert(key, data, base) { if (!data.length) return []; var n0 = size, n1 = data.length, addv = Array(n1), addi = array32(n1), oldv, oldi, i; for (i=0; i<n1; ++i) { addv[i] = key(data[i]); addi[i] = i; } addv = sort(addv, addi); if (n0) { oldv = value; oldi = index; value = Array(n0 + n1); index = array32(n0 + n1); merge$1(base, oldv, oldi, n0, addv, addi, n1, value, index); } else { if (base > 0) for (i=0; i<n1; ++i) { addi[i] += base; } value = addv; index = addi; } size = n0 + n1; return {index: addi, value: addv}; } function remove(num, map) { // map: index -> remove var n = size, idx, i, j; // seek forward to first removal for (i=0; !map[index[i]] && i<n; ++i); // condense index and value arrays for (j=i; i<n; ++i) { if (!map[idx=index[i]]) { index[j] = idx; value[j] = value[i]; ++j; } } size = n - num; } function reindex(map) { for (var i=0, n=size; i<n; ++i) { index[i] = map[index[i]]; } } function bisect(range, array) { var n; if (array) { n = array.length; } else { array = value; n = size; } return [ bisectLeft(array, range[0], 0, n), bisectRight(array, range[1], 0, n) ]; } return { insert: insert, remove: remove, bisect: bisect, reindex: reindex, index: function() { return index; }, size: function() { return size; } }; }
JavaScript
function eventExtend(view, event, item) { var r = view._renderer, el = r && r.canvas(), p, e, translate; if (el) { translate = offset$3(view); e = event.changedTouches ? event.changedTouches[0] : event; p = point(e, el); p[0] -= translate[0]; p[1] -= translate[1]; } event.dataflow = view; event.item = item; event.vega = extension(view, item, p); return event; }
function eventExtend(view, event, item) { var r = view._renderer, el = r && r.canvas(), p, e, translate; if (el) { translate = offset$3(view); e = event.changedTouches ? event.changedTouches[0] : event; p = point(e, el); p[0] -= translate[0]; p[1] -= translate[1]; } event.dataflow = view; event.item = item; event.vega = extension(view, item, p); return event; }
JavaScript
function bind$1(view, el, binding) { if (!el) return; var param = binding.param, bind = binding.state; if (!bind) { bind = binding.state = { elements: null, active: false, set: null, update: function(value) { if (value !== view.signal(param.signal)) { view.runAsync(null, function() { bind.source = true; view.signal(param.signal, value); }); } } }; if (param.debounce) { bind.update = debounce(param.debounce, bind.update); } } generate(bind, el, param, view.signal(param.signal)); if (!bind.active) { view.on(view._signals[param.signal], null, function() { bind.source ? (bind.source = false) : bind.set(view.signal(param.signal)); }); bind.active = true; } return bind; }
function bind$1(view, el, binding) { if (!el) return; var param = binding.param, bind = binding.state; if (!bind) { bind = binding.state = { elements: null, active: false, set: null, update: function(value) { if (value !== view.signal(param.signal)) { view.runAsync(null, function() { bind.source = true; view.signal(param.signal, value); }); } } }; if (param.debounce) { bind.update = debounce(param.debounce, bind.update); } } generate(bind, el, param, view.signal(param.signal)); if (!bind.active) { view.on(view._signals[param.signal], null, function() { bind.source ? (bind.source = false) : bind.set(view.signal(param.signal)); }); bind.active = true; } return bind; }
JavaScript
function form(bind, el, param, value) { var node = element$1('input'); for (var key in param) { if (key !== 'signal' && key !== 'element') { node.setAttribute(key === 'input' ? 'type' : key, param[key]); } } node.setAttribute('name', param.signal); node.value = value; el.appendChild(node); node.addEventListener('input', function() { bind.update(node.value); }); bind.elements = [node]; bind.set = function(value) { node.value = value; }; }
function form(bind, el, param, value) { var node = element$1('input'); for (var key in param) { if (key !== 'signal' && key !== 'element') { node.setAttribute(key === 'input' ? 'type' : key, param[key]); } } node.setAttribute('name', param.signal); node.value = value; el.appendChild(node); node.addEventListener('input', function() { bind.update(node.value); }); bind.elements = [node]; bind.set = function(value) { node.value = value; }; }
JavaScript
function select(bind, el, param, value) { var node = element$1('select', {name: param.signal}), label = param.labels || []; param.options.forEach(function(option, i) { var attr = {value: option}; if (valuesEqual(option, value)) attr.selected = true; node.appendChild(element$1('option', attr, (label[i] || option)+'')); }); el.appendChild(node); node.addEventListener('change', function() { bind.update(param.options[node.selectedIndex]); }); bind.elements = [node]; bind.set = function(value) { for (var i=0, n=param.options.length; i<n; ++i) { if (valuesEqual(param.options[i], value)) { node.selectedIndex = i; return; } } }; }
function select(bind, el, param, value) { var node = element$1('select', {name: param.signal}), label = param.labels || []; param.options.forEach(function(option, i) { var attr = {value: option}; if (valuesEqual(option, value)) attr.selected = true; node.appendChild(element$1('option', attr, (label[i] || option)+'')); }); el.appendChild(node); node.addEventListener('change', function() { bind.update(param.options[node.selectedIndex]); }); bind.elements = [node]; bind.set = function(value) { for (var i=0, n=param.options.length; i<n; ++i) { if (valuesEqual(param.options[i], value)) { node.selectedIndex = i; return; } } }; }
JavaScript
function radio(bind, el, param, value) { var group = element$1('span', {'class': RadioClass}), label = param.labels || []; el.appendChild(group); bind.elements = param.options.map(function(option, i) { var id = OptionClass + param.signal + '-' + option; var attr = { id: id, type: 'radio', name: param.signal, value: option }; if (valuesEqual(option, value)) attr.checked = true; var input = element$1('input', attr); input.addEventListener('change', function() { bind.update(option); }); group.appendChild(input); group.appendChild(element$1('label', {'for': id}, (label[i] || option)+'')); return input; }); bind.set = function(value) { var nodes = bind.elements, i = 0, n = nodes.length; for (; i<n; ++i) { if (valuesEqual(nodes[i].value, value)) nodes[i].checked = true; } }; }
function radio(bind, el, param, value) { var group = element$1('span', {'class': RadioClass}), label = param.labels || []; el.appendChild(group); bind.elements = param.options.map(function(option, i) { var id = OptionClass + param.signal + '-' + option; var attr = { id: id, type: 'radio', name: param.signal, value: option }; if (valuesEqual(option, value)) attr.checked = true; var input = element$1('input', attr); input.addEventListener('change', function() { bind.update(option); }); group.appendChild(input); group.appendChild(element$1('label', {'for': id}, (label[i] || option)+'')); return input; }); bind.set = function(value) { var nodes = bind.elements, i = 0, n = nodes.length; for (; i<n; ++i) { if (valuesEqual(nodes[i].value, value)) nodes[i].checked = true; } }; }
JavaScript
function range(bind, el, param, value) { value = value !== undefined ? value : ((+param.max) + (+param.min)) / 2; var max = param.max != null ? param.max : Math.max(100, +value) || 100, min = param.min || Math.min(0, max, +value) || 0, step = param.step || tickStep(min, max, 100); var node = element$1('input', { type: 'range', name: param.signal, min: min, max: max, step: step }); node.value = value; var label = element$1('label', {}, +value); el.appendChild(node); el.appendChild(label); function update() { label.textContent = node.value; bind.update(+node.value); } // subscribe to both input and change node.addEventListener('input', update); node.addEventListener('change', update); bind.elements = [node]; bind.set = function(value) { node.value = value; label.textContent = value; }; }
function range(bind, el, param, value) { value = value !== undefined ? value : ((+param.max) + (+param.min)) / 2; var max = param.max != null ? param.max : Math.max(100, +value) || 100, min = param.min || Math.min(0, max, +value) || 0, step = param.step || tickStep(min, max, 100); var node = element$1('input', { type: 'range', name: param.signal, min: min, max: max, step: step }); node.value = value; var label = element$1('label', {}, +value); el.appendChild(node); el.appendChild(label); function update() { label.textContent = node.value; bind.update(+node.value); } // subscribe to both input and change node.addEventListener('input', update); node.addEventListener('change', update); bind.elements = [node]; bind.set = function(value) { node.value = value; label.textContent = value; }; }
JavaScript
async function renderHeadless(view, type, scaleFactor, opt) { const module = renderModule(type), ctr = module && module.headless; if (!ctr) error('Unrecognized renderer type: ' + type); await view.runAsync(); return initializeRenderer(view, null, null, ctr, scaleFactor, opt) .renderAsync(view._scenegraph.root); }
async function renderHeadless(view, type, scaleFactor, opt) { const module = renderModule(type), ctr = module && module.headless; if (!ctr) error('Unrecognized renderer type: ' + type); await view.runAsync(); return initializeRenderer(view, null, null, ctr, scaleFactor, opt) .renderAsync(view._scenegraph.root); }
JavaScript
async function renderToImageURL(type, scaleFactor) { if (type !== RenderType.Canvas && type !== RenderType.SVG && type !== RenderType.PNG) { error('Unrecognized image type: ' + type); } const r = await renderHeadless(this, type, scaleFactor); return type === RenderType.SVG ? toBlobURL(r.svg(), 'image/svg+xml') : r.canvas().toDataURL('image/png'); }
async function renderToImageURL(type, scaleFactor) { if (type !== RenderType.Canvas && type !== RenderType.SVG && type !== RenderType.PNG) { error('Unrecognized image type: ' + type); } const r = await renderHeadless(this, type, scaleFactor); return type === RenderType.SVG ? toBlobURL(r.svg(), 'image/svg+xml') : r.canvas().toDataURL('image/png'); }
JavaScript
function parseParameters(spec, ctx, params) { params = params || {}; var key, value; for (key in spec) { value = spec[key]; params[key] = isArray(value) ? value.map(function(v) { return parseParameter(v, ctx, params); }) : parseParameter(value, ctx, params); } return params; }
function parseParameters(spec, ctx, params) { params = params || {}; var key, value; for (key in spec) { value = spec[key]; params[key] = isArray(value) ? value.map(function(v) { return parseParameter(v, ctx, params); }) : parseParameter(value, ctx, params); } return params; }
JavaScript
function parseOperatorParameters(spec, ctx) { if (spec.params) { var op = ctx.get(spec.id); if (!op) error('Invalid operator id: ' + spec.id); ctx.dataflow.connect(op, op.parameters( parseParameters(spec.params, ctx), spec.react, spec.initonly )); } }
function parseOperatorParameters(spec, ctx) { if (spec.params) { var op = ctx.get(spec.id); if (!op) error('Invalid operator id: ' + spec.id); ctx.dataflow.connect(op, op.parameters( parseParameters(spec.params, ctx), spec.react, spec.initonly )); } }
JavaScript
function parseStream(spec, ctx) { var filter = spec.filter != null ? eventExpression(spec.filter, ctx) : undefined, stream = spec.stream != null ? ctx.get(spec.stream) : undefined, args; if (spec.source) { stream = ctx.events(spec.source, spec.type, filter); } else if (spec.merge) { args = spec.merge.map(ctx.get.bind(ctx)); stream = args[0].merge.apply(args[0], args.slice(1)); } if (spec.between) { args = spec.between.map(ctx.get.bind(ctx)); stream = stream.between(args[0], args[1]); } if (spec.filter) { stream = stream.filter(filter); } if (spec.throttle != null) { stream = stream.throttle(+spec.throttle); } if (spec.debounce != null) { stream = stream.debounce(+spec.debounce); } if (stream == null) { error('Invalid stream definition: ' + JSON.stringify(spec)); } if (spec.consume) stream.consume(true); ctx.stream(spec, stream); }
function parseStream(spec, ctx) { var filter = spec.filter != null ? eventExpression(spec.filter, ctx) : undefined, stream = spec.stream != null ? ctx.get(spec.stream) : undefined, args; if (spec.source) { stream = ctx.events(spec.source, spec.type, filter); } else if (spec.merge) { args = spec.merge.map(ctx.get.bind(ctx)); stream = args[0].merge.apply(args[0], args.slice(1)); } if (spec.between) { args = spec.between.map(ctx.get.bind(ctx)); stream = stream.between(args[0], args[1]); } if (spec.filter) { stream = stream.filter(filter); } if (spec.throttle != null) { stream = stream.throttle(+spec.throttle); } if (spec.debounce != null) { stream = stream.debounce(+spec.debounce); } if (stream == null) { error('Invalid stream definition: ' + JSON.stringify(spec)); } if (spec.consume) stream.consume(true); ctx.stream(spec, stream); }
JavaScript
function parseUpdate(spec, ctx) { var srcid = isObject(srcid = spec.source) ? srcid.$ref : srcid, source = ctx.get(srcid), target = null, update = spec.update, params = undefined; if (!source) error('Source not defined: ' + spec.source); if (spec.target && spec.target.$expr) { target = eventExpression(spec.target.$expr, ctx); } else { target = ctx.get(spec.target); } if (update && update.$expr) { if (update.$params) { params = parseParameters(update.$params, ctx); } update = handlerExpression(update.$expr, ctx); } ctx.update(spec, source, target, update, params); }
function parseUpdate(spec, ctx) { var srcid = isObject(srcid = spec.source) ? srcid.$ref : srcid, source = ctx.get(srcid), target = null, update = spec.update, params = undefined; if (!source) error('Source not defined: ' + spec.source); if (spec.target && spec.target.$expr) { target = eventExpression(spec.target.$expr, ctx); } else { target = ctx.get(spec.target); } if (update && update.$expr) { if (update.$params) { params = parseParameters(update.$params, ctx); } update = handlerExpression(update.$expr, ctx); } ctx.update(spec, source, target, update, params); }
JavaScript
function parse$4(spec, ctx) { var operators = spec.operators || []; // parse background if (spec.background) { ctx.background = spec.background; } // parse event configuration if (spec.eventConfig) { ctx.eventConfig = spec.eventConfig; } // parse operators operators.forEach(function(entry) { parseOperator(entry, ctx); }); // parse operator parameters operators.forEach(function(entry) { parseOperatorParameters(entry, ctx); }); // parse streams (spec.streams || []).forEach(function(entry) { parseStream(entry, ctx); }); // parse updates (spec.updates || []).forEach(function(entry) { parseUpdate(entry, ctx); }); return ctx.resolve(); }
function parse$4(spec, ctx) { var operators = spec.operators || []; // parse background if (spec.background) { ctx.background = spec.background; } // parse event configuration if (spec.eventConfig) { ctx.eventConfig = spec.eventConfig; } // parse operators operators.forEach(function(entry) { parseOperator(entry, ctx); }); // parse operator parameters operators.forEach(function(entry) { parseOperatorParameters(entry, ctx); }); // parse streams (spec.streams || []).forEach(function(entry) { parseStream(entry, ctx); }); // parse updates (spec.updates || []).forEach(function(entry) { parseUpdate(entry, ctx); }); return ctx.resolve(); }
JavaScript
function View(spec, options) { var view = this; options = options || {}; Dataflow.call(view); if (options.loader) view.loader(options.loader); if (options.logger) view.logger(options.logger); if (options.logLevel != null) view.logLevel(options.logLevel); view._el = null; view._elBind = null; view._renderType = options.renderer || RenderType.Canvas; view._scenegraph = new Scenegraph(); var root = view._scenegraph.root; // initialize renderer, handler and event management view._renderer = null; view._tooltip = options.tooltip || defaultTooltip$1, view._redraw = true; view._handler = new CanvasHandler().scene(root); view._preventDefault = false; view._timers = []; view._eventListeners = []; view._resizeListeners = []; // initialize event configuration view._eventConfig = initializeEventConfig(spec.eventConfig); // initialize dataflow graph var ctx = runtime(view, spec, options.functions); view._runtime = ctx; view._signals = ctx.signals; view._bind = (spec.bindings || []).map(function(_) { return { state: null, param: extend({}, _) }; }); // initialize scenegraph if (ctx.root) ctx.root.set(root); root.source = ctx.data.root.input; view.pulse( ctx.data.root.input, view.changeset().insert(root.items) ); // initialize background color view._background = options.background || ctx.background || null; // initialize view size view._width = view.width(); view._height = view.height(); view._viewWidth = viewWidth(view, view._width); view._viewHeight = viewHeight(view, view._height); view._origin = [0, 0]; view._resize = 0; view._autosize = 1; initializeResize(view); // initialize cursor cursor(view); // initialize hover proessing, if requested if (options.hover) view.hover(); // initialize DOM container(s) and renderer if (options.container) view.initialize(options.container, options.bind); }
function View(spec, options) { var view = this; options = options || {}; Dataflow.call(view); if (options.loader) view.loader(options.loader); if (options.logger) view.logger(options.logger); if (options.logLevel != null) view.logLevel(options.logLevel); view._el = null; view._elBind = null; view._renderType = options.renderer || RenderType.Canvas; view._scenegraph = new Scenegraph(); var root = view._scenegraph.root; // initialize renderer, handler and event management view._renderer = null; view._tooltip = options.tooltip || defaultTooltip$1, view._redraw = true; view._handler = new CanvasHandler().scene(root); view._preventDefault = false; view._timers = []; view._eventListeners = []; view._resizeListeners = []; // initialize event configuration view._eventConfig = initializeEventConfig(spec.eventConfig); // initialize dataflow graph var ctx = runtime(view, spec, options.functions); view._runtime = ctx; view._signals = ctx.signals; view._bind = (spec.bindings || []).map(function(_) { return { state: null, param: extend({}, _) }; }); // initialize scenegraph if (ctx.root) ctx.root.set(root); root.source = ctx.data.root.input; view.pulse( ctx.data.root.input, view.changeset().insert(root.items) ); // initialize background color view._background = options.background || ctx.background || null; // initialize view size view._width = view.width(); view._height = view.height(); view._viewWidth = viewWidth(view, view._width); view._viewHeight = viewHeight(view, view._height); view._origin = [0, 0]; view._resize = 0; view._autosize = 1; initializeResize(view); // initialize cursor cursor(view); // initialize hover proessing, if requested if (options.hover) view.hover(); // initialize DOM container(s) and renderer if (options.container) view.initialize(options.container, options.bind); }
JavaScript
function legendSymbolGroups(spec, config, userEncode, dataRef, columns) { var _ = lookup$5(spec, config), entries = userEncode.entries, interactive = !!(entries && entries.interactive), name = entries ? entries.name : undefined, height = _('clipHeight'), symbolOffset = _('symbolOffset'), valueRef = {data: 'value'}, encode = {}, xSignal = `${columns} ? datum.${Offset} : datum.${Size}`, yEncode = height ? encoder(height) : {field: Size}, index = `datum.${Index}`, ncols = `max(1, ${columns})`, enter, update, labelOffset, symbols, labels, nrows, sort; yEncode.mult = 0.5; // -- LEGEND SYMBOLS -- encode = { enter: enter = { opacity: zero$1, x: {signal: xSignal, mult: 0.5, offset: symbolOffset}, y: yEncode }, update: update = { opacity: one$1, x: enter.x, y: enter.y }, exit: { opacity: zero$1 } }; var baseFill = null, baseStroke = null; if (!spec.fill) { baseFill = config.symbolBaseFillColor; baseStroke = config.symbolBaseStrokeColor; } addEncoders(encode, { fill: _('symbolFillColor', baseFill), shape: _('symbolType'), size: _('symbolSize'), stroke: _('symbolStrokeColor', baseStroke), strokeDash: _('symbolDash'), strokeDashOffset: _('symbolDashOffset'), strokeWidth: _('symbolStrokeWidth') }, { // update opacity: _('symbolOpacity') }); LegendScales.forEach(function(scale) { if (spec[scale]) { update[scale] = enter[scale] = {scale: spec[scale], field: Value}; } }); symbols = guideMark( SymbolMark, LegendSymbolRole, null, Value, valueRef, encode, userEncode.symbols ); if (height) symbols.clip = true; // -- LEGEND LABELS -- labelOffset = encoder(symbolOffset); labelOffset.offset = _('labelOffset'); encode = { enter: enter = { opacity: zero$1, x: {signal: xSignal, offset: labelOffset}, y: yEncode }, update: update = { opacity: one$1, text: {field: Label}, x: enter.x, y: enter.y }, exit: { opacity: zero$1 } }; addEncoders(encode, { align: _('labelAlign'), baseline: _('labelBaseline'), fill: _('labelColor'), fillOpacity: _('labelOpacity'), font: _('labelFont'), fontSize: _('labelFontSize'), fontStyle: _('labelFontStyle'), fontWeight: _('labelFontWeight'), limit: _('labelLimit') }); labels = guideMark( TextMark, LegendLabelRole, GuideLabelStyle, Value, valueRef, encode, userEncode.labels ); // -- LEGEND ENTRY GROUPS -- encode = { enter: { noBound: {value: !height}, // ignore width/height in bounds calc width: zero$1, height: height ? encoder(height) : zero$1, opacity: zero$1 }, exit: {opacity: zero$1}, update: update = { opacity: one$1, row: {signal: null}, column: {signal: null} } }; // annotate and sort groups to ensure correct ordering if (_.isVertical(true)) { nrows = `ceil(item.mark.items.length / ${ncols})`; update.row.signal = `${index}%${nrows}`; update.column.signal = `floor(${index} / ${nrows})`; sort = {field: ['row', index]}; } else { update.row.signal = `floor(${index} / ${ncols})`; update.column.signal = `${index} % ${ncols}`; sort = {field: index}; } // handle zero column case (implies infinite columns) update.column.signal = `${columns}?${update.column.signal}:${index}`; // facet legend entries into sub-groups dataRef = {facet: {data: dataRef, name: 'value', groupby: Index}}; spec = guideGroup( ScopeRole$1, null, name, dataRef, interactive, extendEncode(encode, entries, Skip$1), [symbols, labels] ); spec.sort = sort; return spec; }
function legendSymbolGroups(spec, config, userEncode, dataRef, columns) { var _ = lookup$5(spec, config), entries = userEncode.entries, interactive = !!(entries && entries.interactive), name = entries ? entries.name : undefined, height = _('clipHeight'), symbolOffset = _('symbolOffset'), valueRef = {data: 'value'}, encode = {}, xSignal = `${columns} ? datum.${Offset} : datum.${Size}`, yEncode = height ? encoder(height) : {field: Size}, index = `datum.${Index}`, ncols = `max(1, ${columns})`, enter, update, labelOffset, symbols, labels, nrows, sort; yEncode.mult = 0.5; // -- LEGEND SYMBOLS -- encode = { enter: enter = { opacity: zero$1, x: {signal: xSignal, mult: 0.5, offset: symbolOffset}, y: yEncode }, update: update = { opacity: one$1, x: enter.x, y: enter.y }, exit: { opacity: zero$1 } }; var baseFill = null, baseStroke = null; if (!spec.fill) { baseFill = config.symbolBaseFillColor; baseStroke = config.symbolBaseStrokeColor; } addEncoders(encode, { fill: _('symbolFillColor', baseFill), shape: _('symbolType'), size: _('symbolSize'), stroke: _('symbolStrokeColor', baseStroke), strokeDash: _('symbolDash'), strokeDashOffset: _('symbolDashOffset'), strokeWidth: _('symbolStrokeWidth') }, { // update opacity: _('symbolOpacity') }); LegendScales.forEach(function(scale) { if (spec[scale]) { update[scale] = enter[scale] = {scale: spec[scale], field: Value}; } }); symbols = guideMark( SymbolMark, LegendSymbolRole, null, Value, valueRef, encode, userEncode.symbols ); if (height) symbols.clip = true; // -- LEGEND LABELS -- labelOffset = encoder(symbolOffset); labelOffset.offset = _('labelOffset'); encode = { enter: enter = { opacity: zero$1, x: {signal: xSignal, offset: labelOffset}, y: yEncode }, update: update = { opacity: one$1, text: {field: Label}, x: enter.x, y: enter.y }, exit: { opacity: zero$1 } }; addEncoders(encode, { align: _('labelAlign'), baseline: _('labelBaseline'), fill: _('labelColor'), fillOpacity: _('labelOpacity'), font: _('labelFont'), fontSize: _('labelFontSize'), fontStyle: _('labelFontStyle'), fontWeight: _('labelFontWeight'), limit: _('labelLimit') }); labels = guideMark( TextMark, LegendLabelRole, GuideLabelStyle, Value, valueRef, encode, userEncode.labels ); // -- LEGEND ENTRY GROUPS -- encode = { enter: { noBound: {value: !height}, // ignore width/height in bounds calc width: zero$1, height: height ? encoder(height) : zero$1, opacity: zero$1 }, exit: {opacity: zero$1}, update: update = { opacity: one$1, row: {signal: null}, column: {signal: null} } }; // annotate and sort groups to ensure correct ordering if (_.isVertical(true)) { nrows = `ceil(item.mark.items.length / ${ncols})`; update.row.signal = `${index}%${nrows}`; update.column.signal = `floor(${index} / ${nrows})`; sort = {field: ['row', index]}; } else { update.row.signal = `floor(${index} / ${ncols})`; update.column.signal = `${index} % ${ncols}`; sort = {field: index}; } // handle zero column case (implies infinite columns) update.column.signal = `${columns}?${update.column.signal}:${index}`; // facet legend entries into sub-groups dataRef = {facet: {data: dataRef, name: 'value', groupby: Index}}; spec = guideGroup( ScopeRole$1, null, name, dataRef, interactive, extendEncode(encode, entries, Skip$1), [symbols, labels] ); spec.sort = sort; return spec; }
JavaScript
function parseTransform(spec, scope) { var def = definition(spec.type); if (!def) error('Unrecognized transform type: ' + $(spec.type)); var t = entry(def.type.toLowerCase(), null, parseParameters$1(def, spec, scope)); if (spec.signal) scope.addSignal(spec.signal, scope.proxy(t)); t.metadata = def.metadata || {}; return t; }
function parseTransform(spec, scope) { var def = definition(spec.type); if (!def) error('Unrecognized transform type: ' + $(spec.type)); var t = entry(def.type.toLowerCase(), null, parseParameters$1(def, spec, scope)); if (spec.signal) scope.addSignal(spec.signal, scope.proxy(t)); t.metadata = def.metadata || {}; return t; }
JavaScript
function parseParameters$1(def, spec, scope) { var params = {}, pdef, i, n; for (i=0, n=def.params.length; i<n; ++i) { pdef = def.params[i]; params[pdef.name] = parseParameter$2(pdef, spec, scope); } return params; }
function parseParameters$1(def, spec, scope) { var params = {}, pdef, i, n; for (i=0, n=def.params.length; i<n; ++i) { pdef = def.params[i]; params[pdef.name] = parseParameter$2(pdef, spec, scope); } return params; }
JavaScript
function parseParameter$2(def, spec, scope) { var type = def.type, value = spec[def.name]; if (type === 'index') { return parseIndexParameter(def, spec, scope); } else if (value === undefined) { if (def.required) { error('Missing required ' + $(spec.type) + ' parameter: ' + $(def.name)); } return; } else if (type === 'param') { return parseSubParameters(def, spec, scope); } else if (type === 'projection') { return scope.projectionRef(spec[def.name]); } return def.array && !isSignal(value) ? value.map(function(v) { return parameterValue(def, v, scope); }) : parameterValue(def, value, scope); }
function parseParameter$2(def, spec, scope) { var type = def.type, value = spec[def.name]; if (type === 'index') { return parseIndexParameter(def, spec, scope); } else if (value === undefined) { if (def.required) { error('Missing required ' + $(spec.type) + ' parameter: ' + $(def.name)); } return; } else if (type === 'param') { return parseSubParameters(def, spec, scope); } else if (type === 'projection') { return scope.projectionRef(spec[def.name]); } return def.array && !isSignal(value) ? value.map(function(v) { return parameterValue(def, v, scope); }) : parameterValue(def, value, scope); }
JavaScript
function parameterValue(def, value, scope) { var type = def.type; if (isSignal(value)) { return isExpr$1(type) ? error('Expression references can not be signals.') : isField(type) ? scope.fieldRef(value) : isCompare(type) ? scope.compareRef(value) : scope.signalRef(value.signal); } else { var expr = def.expr || isField(type); return expr && outerExpr(value) ? scope.exprRef(value.expr, value.as) : expr && outerField(value) ? fieldRef(value.field, value.as) : isExpr$1(type) ? parseExpression$1(value, scope) : isData(type) ? ref(scope.getData(value).values) : isField(type) ? fieldRef(value) : isCompare(type) ? scope.compareRef(value) : value; } }
function parameterValue(def, value, scope) { var type = def.type; if (isSignal(value)) { return isExpr$1(type) ? error('Expression references can not be signals.') : isField(type) ? scope.fieldRef(value) : isCompare(type) ? scope.compareRef(value) : scope.signalRef(value.signal); } else { var expr = def.expr || isField(type); return expr && outerExpr(value) ? scope.exprRef(value.expr, value.as) : expr && outerField(value) ? fieldRef(value.field, value.as) : isExpr$1(type) ? parseExpression$1(value, scope) : isData(type) ? ref(scope.getData(value).values) : isField(type) ? fieldRef(value) : isCompare(type) ? scope.compareRef(value) : value; } }
JavaScript
function parseSubParameters(def, spec, scope) { var value = spec[def.name]; if (def.array) { if (!isArray(value)) { // signals not allowed! error('Expected an array of sub-parameters. Instead: ' + $(value)); } return value.map(function(v) { return parseSubParameter(def, v, scope); }); } else { return parseSubParameter(def, value, scope); } }
function parseSubParameters(def, spec, scope) { var value = spec[def.name]; if (def.array) { if (!isArray(value)) { // signals not allowed! error('Expected an array of sub-parameters. Instead: ' + $(value)); } return value.map(function(v) { return parseSubParameter(def, v, scope); }); } else { return parseSubParameter(def, value, scope); } }
JavaScript
function parseSubParameter(def, value, scope) { var params, pdef, k, i, n; // loop over defs to find matching key for (i=0, n=def.params.length; i<n; ++i) { pdef = def.params[i]; for (k in pdef.key) { if (pdef.key[k] !== value[k]) { pdef = null; break; } } if (pdef) break; } // raise error if matching key not found if (!pdef) error('Unsupported parameter: ' + $(value)); // parse params, create Params transform, return ref params = extend(parseParameters$1(pdef, value, scope), pdef.key); return ref(scope.add(Params$2(params))); }
function parseSubParameter(def, value, scope) { var params, pdef, k, i, n; // loop over defs to find matching key for (i=0, n=def.params.length; i<n; ++i) { pdef = def.params[i]; for (k in pdef.key) { if (pdef.key[k] !== value[k]) { pdef = null; break; } } if (pdef) break; } // raise error if matching key not found if (!pdef) error('Unsupported parameter: ' + $(value)); // parse params, create Params transform, return ref params = extend(parseParameters$1(pdef, value, scope), pdef.key); return ref(scope.add(Params$2(params))); }
JavaScript
function titleEncode(spec) { const encode = spec.encode; return (encode && encode.title) || extend({ name: spec.name, interactive: spec.interactive, style: spec.style }, encode); }
function titleEncode(spec) { const encode = spec.encode; return (encode && encode.title) || extend({ name: spec.name, interactive: spec.interactive, style: spec.style }, encode); }
JavaScript
function analyze(data, scope, ops) { var output = [], source = null, modify = false, generate = false, upstream, i, n, t, m; if (data.values) { // hard-wired input data set if (hasSignal(data.values) || hasSignal(data.format)) { // if either values or format has signal, use dynamic loader output.push(load$1(scope, data)); output.push(source = collect()); } else { // otherwise, ingest upon dataflow init output.push(source = collect({ $ingest: data.values, $format: data.format })); } } else if (data.url) { // load data from external source if (hasSignal(data.url) || hasSignal(data.format)) { // if either url or format has signal, use dynamic loader output.push(load$1(scope, data)); output.push(source = collect()); } else { // otherwise, request load upon dataflow init output.push(source = collect({ $request: data.url, $format: data.format })); } } else if (data.source) { // derives from one or more other data sets source = upstream = array(data.source).map(function(d) { return ref(scope.getData(d).output); }); output.push(null); // populate later } // scan data transforms, add collectors as needed for (i=0, n=ops.length; i<n; ++i) { t = ops[i]; m = t.metadata; if (!source && !m.source) { output.push(source = collect()); } output.push(t); if (m.generates) generate = true; if (m.modifies && !generate) modify = true; if (m.source) source = t; else if (m.changes) source = null; } if (upstream) { n = upstream.length - 1; output[0] = Relay$1({ derive: modify, pulse: n ? upstream : upstream[0] }); if (modify || n) { // collect derived and multi-pulse tuples output.splice(1, 0, collect()); } } if (!source) output.push(collect()); output.push(Sieve$1({})); return output; }
function analyze(data, scope, ops) { var output = [], source = null, modify = false, generate = false, upstream, i, n, t, m; if (data.values) { // hard-wired input data set if (hasSignal(data.values) || hasSignal(data.format)) { // if either values or format has signal, use dynamic loader output.push(load$1(scope, data)); output.push(source = collect()); } else { // otherwise, ingest upon dataflow init output.push(source = collect({ $ingest: data.values, $format: data.format })); } } else if (data.url) { // load data from external source if (hasSignal(data.url) || hasSignal(data.format)) { // if either url or format has signal, use dynamic loader output.push(load$1(scope, data)); output.push(source = collect()); } else { // otherwise, request load upon dataflow init output.push(source = collect({ $request: data.url, $format: data.format })); } } else if (data.source) { // derives from one or more other data sets source = upstream = array(data.source).map(function(d) { return ref(scope.getData(d).output); }); output.push(null); // populate later } // scan data transforms, add collectors as needed for (i=0, n=ops.length; i<n; ++i) { t = ops[i]; m = t.metadata; if (!source && !m.source) { output.push(source = collect()); } output.push(t); if (m.generates) generate = true; if (m.modifies && !generate) modify = true; if (m.source) source = t; else if (m.changes) source = null; } if (upstream) { n = upstream.length - 1; output[0] = Relay$1({ derive: modify, pulse: n ? upstream : upstream[0] }); if (modify || n) { // collect derived and multi-pulse tuples output.splice(1, 0, collect()); } } if (!source) output.push(collect()); output.push(Sieve$1({})); return output; }
JavaScript
function defaults() { return { // default padding around visualization padding: 0, // default for automatic sizing; options: 'none', 'pad', 'fit' // or provide an object (e.g., {'type': 'pad', 'resize': true}) autosize: 'pad', // default view background color // covers the entire view component background: null, // default event handling configuration // preventDefault for view-sourced event types except 'wheel' events: { defaults: {allow: ['wheel']} }, // defaults for top-level group marks // accepts mark properties (fill, stroke, etc) // covers the data rectangle within group width/height group: null, // defaults for basic mark types // each subset accepts mark properties (fill, stroke, etc) mark: null, arc: { fill: defaultColor }, area: { fill: defaultColor }, image: null, line: { stroke: defaultColor, strokeWidth: defaultStrokeWidth }, path: { stroke: defaultColor }, rect: { fill: defaultColor }, rule: { stroke: black }, shape: { stroke: defaultColor }, symbol: { fill: defaultColor, size: 64 }, text: { fill: black, font: defaultFont, fontSize: 11 }, // style definitions style: { // axis & legend labels 'guide-label': { fill: black, font: defaultFont, fontSize: 10 }, // axis & legend titles 'guide-title': { fill: black, font: defaultFont, fontSize: 11, fontWeight: 'bold' }, // headers, including chart title 'group-title': { fill: black, font: defaultFont, fontSize: 13, fontWeight: 'bold' }, // chart subtitle 'group-subtitle': { fill: black, font: defaultFont, fontSize: 12 }, // defaults for styled point marks in Vega-Lite point: { size: defaultSymbolSize, strokeWidth: defaultStrokeWidth, shape: 'circle' }, circle: { size: defaultSymbolSize, strokeWidth: defaultStrokeWidth }, square: { size: defaultSymbolSize, strokeWidth: defaultStrokeWidth, shape: 'square' }, // defaults for styled group marks in Vega-Lite cell: { fill: 'transparent', stroke: lightGray } }, // defaults for title title: { orient: 'top', anchor: 'middle', offset: 4, subtitlePadding: 3 }, // defaults for axes axis: { minExtent: 0, maxExtent: 200, bandPosition: 0.5, domain: true, domainWidth: 1, domainColor: gray, grid: false, gridWidth: 1, gridColor: lightGray, labels: true, labelAngle: 0, labelLimit: 180, labelPadding: 2, ticks: true, tickColor: gray, tickOffset: 0, tickRound: true, tickSize: 5, tickWidth: 1, titlePadding: 4 }, // correction for centering bias axisBand: { tickOffset: -0.5 }, // defaults for cartographic projection projection: { type: 'mercator' }, // defaults for legends legend: { orient: 'right', padding: 0, gridAlign: 'each', columnPadding: 10, rowPadding: 2, symbolDirection: 'vertical', gradientDirection: 'vertical', gradientLength: 200, gradientThickness: 16, gradientStrokeColor: lightGray, gradientStrokeWidth: 0, gradientLabelOffset: 2, labelAlign: 'left', labelBaseline: 'middle', labelLimit: 160, labelOffset: 4, labelOverlap: true, symbolLimit: 30, symbolType: 'circle', symbolSize: 100, symbolOffset: 0, symbolStrokeWidth: 1.5, symbolBaseFillColor: 'transparent', symbolBaseStrokeColor: gray, titleLimit: 180, titleOrient: 'top', titlePadding: 5, layout: { offset: 18, direction: 'horizontal', left: { direction: 'vertical' }, right: { direction: 'vertical' } } }, // defaults for scale ranges range: { category: { scheme: 'tableau10' }, ordinal: { scheme: 'blues' }, heatmap: { scheme: 'yellowgreenblue' }, ramp: { scheme: 'blues' }, diverging: { scheme: 'blueorange', extent: [1, 0] }, symbol: [ 'circle', 'square', 'triangle-up', 'cross', 'diamond', 'triangle-right', 'triangle-down', 'triangle-left' ] } }; }
function defaults() { return { // default padding around visualization padding: 0, // default for automatic sizing; options: 'none', 'pad', 'fit' // or provide an object (e.g., {'type': 'pad', 'resize': true}) autosize: 'pad', // default view background color // covers the entire view component background: null, // default event handling configuration // preventDefault for view-sourced event types except 'wheel' events: { defaults: {allow: ['wheel']} }, // defaults for top-level group marks // accepts mark properties (fill, stroke, etc) // covers the data rectangle within group width/height group: null, // defaults for basic mark types // each subset accepts mark properties (fill, stroke, etc) mark: null, arc: { fill: defaultColor }, area: { fill: defaultColor }, image: null, line: { stroke: defaultColor, strokeWidth: defaultStrokeWidth }, path: { stroke: defaultColor }, rect: { fill: defaultColor }, rule: { stroke: black }, shape: { stroke: defaultColor }, symbol: { fill: defaultColor, size: 64 }, text: { fill: black, font: defaultFont, fontSize: 11 }, // style definitions style: { // axis & legend labels 'guide-label': { fill: black, font: defaultFont, fontSize: 10 }, // axis & legend titles 'guide-title': { fill: black, font: defaultFont, fontSize: 11, fontWeight: 'bold' }, // headers, including chart title 'group-title': { fill: black, font: defaultFont, fontSize: 13, fontWeight: 'bold' }, // chart subtitle 'group-subtitle': { fill: black, font: defaultFont, fontSize: 12 }, // defaults for styled point marks in Vega-Lite point: { size: defaultSymbolSize, strokeWidth: defaultStrokeWidth, shape: 'circle' }, circle: { size: defaultSymbolSize, strokeWidth: defaultStrokeWidth }, square: { size: defaultSymbolSize, strokeWidth: defaultStrokeWidth, shape: 'square' }, // defaults for styled group marks in Vega-Lite cell: { fill: 'transparent', stroke: lightGray } }, // defaults for title title: { orient: 'top', anchor: 'middle', offset: 4, subtitlePadding: 3 }, // defaults for axes axis: { minExtent: 0, maxExtent: 200, bandPosition: 0.5, domain: true, domainWidth: 1, domainColor: gray, grid: false, gridWidth: 1, gridColor: lightGray, labels: true, labelAngle: 0, labelLimit: 180, labelPadding: 2, ticks: true, tickColor: gray, tickOffset: 0, tickRound: true, tickSize: 5, tickWidth: 1, titlePadding: 4 }, // correction for centering bias axisBand: { tickOffset: -0.5 }, // defaults for cartographic projection projection: { type: 'mercator' }, // defaults for legends legend: { orient: 'right', padding: 0, gridAlign: 'each', columnPadding: 10, rowPadding: 2, symbolDirection: 'vertical', gradientDirection: 'vertical', gradientLength: 200, gradientThickness: 16, gradientStrokeColor: lightGray, gradientStrokeWidth: 0, gradientLabelOffset: 2, labelAlign: 'left', labelBaseline: 'middle', labelLimit: 160, labelOffset: 4, labelOverlap: true, symbolLimit: 30, symbolType: 'circle', symbolSize: 100, symbolOffset: 0, symbolStrokeWidth: 1.5, symbolBaseFillColor: 'transparent', symbolBaseStrokeColor: gray, titleLimit: 180, titleOrient: 'top', titlePadding: 5, layout: { offset: 18, direction: 'horizontal', left: { direction: 'vertical' }, right: { direction: 'vertical' } } }, // defaults for scale ranges range: { category: { scheme: 'tableau10' }, ordinal: { scheme: 'blues' }, heatmap: { scheme: 'yellowgreenblue' }, ramp: { scheme: 'blues' }, diverging: { scheme: 'blueorange', extent: [1, 0] }, symbol: [ 'circle', 'square', 'triangle-up', 'cross', 'diamond', 'triangle-right', 'triangle-down', 'triangle-left' ] } }; }
JavaScript
function legendSymbolGroups(spec, config, userEncode, dataRef, columns) { var _ = lookup(spec, config), entries = userEncode.entries, interactive = !!(entries && entries.interactive), name = entries ? entries.name : undefined, height = _('clipHeight'), symbolOffset = _('symbolOffset'), valueRef = {data: 'value'}, encode = {}, xSignal = `${columns} ? datum.${Offset} : datum.${Size}`, yEncode = height ? encoder(height) : {field: Size}, index = `datum.${Index}`, ncols = `max(1, ${columns})`, enter, update, labelOffset, symbols, labels, nrows, sort; yEncode.mult = 0.5; // -- LEGEND SYMBOLS -- encode = { enter: enter = { opacity: zero, x: {signal: xSignal, mult: 0.5, offset: symbolOffset}, y: yEncode }, update: update = { opacity: one, x: enter.x, y: enter.y }, exit: { opacity: zero } }; var baseFill = null, baseStroke = null; if (!spec.fill) { baseFill = config.symbolBaseFillColor; baseStroke = config.symbolBaseStrokeColor; } addEncoders(encode, { fill: _('symbolFillColor', baseFill), shape: _('symbolType'), size: _('symbolSize'), stroke: _('symbolStrokeColor', baseStroke), strokeDash: _('symbolDash'), strokeDashOffset: _('symbolDashOffset'), strokeWidth: _('symbolStrokeWidth') }, { // update opacity: _('symbolOpacity') }); LegendScales.forEach(function(scale) { if (spec[scale]) { update[scale] = enter[scale] = {scale: spec[scale], field: Value}; } }); symbols = guideMark( SymbolMark, LegendSymbolRole, null, Value, valueRef, encode, userEncode.symbols ); if (height) symbols.clip = true; // -- LEGEND LABELS -- labelOffset = encoder(symbolOffset); labelOffset.offset = _('labelOffset'); encode = { enter: enter = { opacity: zero, x: {signal: xSignal, offset: labelOffset}, y: yEncode }, update: update = { opacity: one, text: {field: Label}, x: enter.x, y: enter.y }, exit: { opacity: zero } }; addEncoders(encode, { align: _('labelAlign'), baseline: _('labelBaseline'), fill: _('labelColor'), fillOpacity: _('labelOpacity'), font: _('labelFont'), fontSize: _('labelFontSize'), fontStyle: _('labelFontStyle'), fontWeight: _('labelFontWeight'), limit: _('labelLimit') }); labels = guideMark( TextMark, LegendLabelRole, GuideLabelStyle, Value, valueRef, encode, userEncode.labels ); // -- LEGEND ENTRY GROUPS -- encode = { enter: { noBound: {value: !height}, // ignore width/height in bounds calc width: zero, height: height ? encoder(height) : zero, opacity: zero }, exit: {opacity: zero}, update: update = { opacity: one, row: {signal: null}, column: {signal: null} } }; // annotate and sort groups to ensure correct ordering if (_.isVertical(true)) { nrows = `ceil(item.mark.items.length / ${ncols})`; update.row.signal = `${index}%${nrows}`; update.column.signal = `floor(${index} / ${nrows})`; sort = {field: ['row', index]}; } else { update.row.signal = `floor(${index} / ${ncols})`; update.column.signal = `${index} % ${ncols}`; sort = {field: index}; } // handle zero column case (implies infinite columns) update.column.signal = `${columns}?${update.column.signal}:${index}`; // facet legend entries into sub-groups dataRef = {facet: {data: dataRef, name: 'value', groupby: Index}}; spec = guideGroup( ScopeRole, null, name, dataRef, interactive, extendEncode(encode, entries, Skip), [symbols, labels] ); spec.sort = sort; return spec; }
function legendSymbolGroups(spec, config, userEncode, dataRef, columns) { var _ = lookup(spec, config), entries = userEncode.entries, interactive = !!(entries && entries.interactive), name = entries ? entries.name : undefined, height = _('clipHeight'), symbolOffset = _('symbolOffset'), valueRef = {data: 'value'}, encode = {}, xSignal = `${columns} ? datum.${Offset} : datum.${Size}`, yEncode = height ? encoder(height) : {field: Size}, index = `datum.${Index}`, ncols = `max(1, ${columns})`, enter, update, labelOffset, symbols, labels, nrows, sort; yEncode.mult = 0.5; // -- LEGEND SYMBOLS -- encode = { enter: enter = { opacity: zero, x: {signal: xSignal, mult: 0.5, offset: symbolOffset}, y: yEncode }, update: update = { opacity: one, x: enter.x, y: enter.y }, exit: { opacity: zero } }; var baseFill = null, baseStroke = null; if (!spec.fill) { baseFill = config.symbolBaseFillColor; baseStroke = config.symbolBaseStrokeColor; } addEncoders(encode, { fill: _('symbolFillColor', baseFill), shape: _('symbolType'), size: _('symbolSize'), stroke: _('symbolStrokeColor', baseStroke), strokeDash: _('symbolDash'), strokeDashOffset: _('symbolDashOffset'), strokeWidth: _('symbolStrokeWidth') }, { // update opacity: _('symbolOpacity') }); LegendScales.forEach(function(scale) { if (spec[scale]) { update[scale] = enter[scale] = {scale: spec[scale], field: Value}; } }); symbols = guideMark( SymbolMark, LegendSymbolRole, null, Value, valueRef, encode, userEncode.symbols ); if (height) symbols.clip = true; // -- LEGEND LABELS -- labelOffset = encoder(symbolOffset); labelOffset.offset = _('labelOffset'); encode = { enter: enter = { opacity: zero, x: {signal: xSignal, offset: labelOffset}, y: yEncode }, update: update = { opacity: one, text: {field: Label}, x: enter.x, y: enter.y }, exit: { opacity: zero } }; addEncoders(encode, { align: _('labelAlign'), baseline: _('labelBaseline'), fill: _('labelColor'), fillOpacity: _('labelOpacity'), font: _('labelFont'), fontSize: _('labelFontSize'), fontStyle: _('labelFontStyle'), fontWeight: _('labelFontWeight'), limit: _('labelLimit') }); labels = guideMark( TextMark, LegendLabelRole, GuideLabelStyle, Value, valueRef, encode, userEncode.labels ); // -- LEGEND ENTRY GROUPS -- encode = { enter: { noBound: {value: !height}, // ignore width/height in bounds calc width: zero, height: height ? encoder(height) : zero, opacity: zero }, exit: {opacity: zero}, update: update = { opacity: one, row: {signal: null}, column: {signal: null} } }; // annotate and sort groups to ensure correct ordering if (_.isVertical(true)) { nrows = `ceil(item.mark.items.length / ${ncols})`; update.row.signal = `${index}%${nrows}`; update.column.signal = `floor(${index} / ${nrows})`; sort = {field: ['row', index]}; } else { update.row.signal = `floor(${index} / ${ncols})`; update.column.signal = `${index} % ${ncols}`; sort = {field: index}; } // handle zero column case (implies infinite columns) update.column.signal = `${columns}?${update.column.signal}:${index}`; // facet legend entries into sub-groups dataRef = {facet: {data: dataRef, name: 'value', groupby: Index}}; spec = guideGroup( ScopeRole, null, name, dataRef, interactive, extendEncode(encode, entries, Skip), [symbols, labels] ); spec.sort = sort; return spec; }
JavaScript
function parseTransform(spec, scope) { var def = vegaDataflow.definition(spec.type); if (!def) vegaUtil.error('Unrecognized transform type: ' + vegaUtil.stringValue(spec.type)); var t = entry(def.type.toLowerCase(), null, parseParameters(def, spec, scope)); if (spec.signal) scope.addSignal(spec.signal, scope.proxy(t)); t.metadata = def.metadata || {}; return t; }
function parseTransform(spec, scope) { var def = vegaDataflow.definition(spec.type); if (!def) vegaUtil.error('Unrecognized transform type: ' + vegaUtil.stringValue(spec.type)); var t = entry(def.type.toLowerCase(), null, parseParameters(def, spec, scope)); if (spec.signal) scope.addSignal(spec.signal, scope.proxy(t)); t.metadata = def.metadata || {}; return t; }
JavaScript
function parseParameter$1(def, spec, scope) { var type = def.type, value = spec[def.name]; if (type === 'index') { return parseIndexParameter(def, spec, scope); } else if (value === undefined) { if (def.required) { vegaUtil.error('Missing required ' + vegaUtil.stringValue(spec.type) + ' parameter: ' + vegaUtil.stringValue(def.name)); } return; } else if (type === 'param') { return parseSubParameters(def, spec, scope); } else if (type === 'projection') { return scope.projectionRef(spec[def.name]); } return def.array && !isSignal(value) ? value.map(function(v) { return parameterValue(def, v, scope); }) : parameterValue(def, value, scope); }
function parseParameter$1(def, spec, scope) { var type = def.type, value = spec[def.name]; if (type === 'index') { return parseIndexParameter(def, spec, scope); } else if (value === undefined) { if (def.required) { vegaUtil.error('Missing required ' + vegaUtil.stringValue(spec.type) + ' parameter: ' + vegaUtil.stringValue(def.name)); } return; } else if (type === 'param') { return parseSubParameters(def, spec, scope); } else if (type === 'projection') { return scope.projectionRef(spec[def.name]); } return def.array && !isSignal(value) ? value.map(function(v) { return parameterValue(def, v, scope); }) : parameterValue(def, value, scope); }
JavaScript
function parameterValue(def, value, scope) { var type = def.type; if (isSignal(value)) { return isExpr$1(type) ? vegaUtil.error('Expression references can not be signals.') : isField(type) ? scope.fieldRef(value) : isCompare(type) ? scope.compareRef(value) : scope.signalRef(value.signal); } else { var expr = def.expr || isField(type); return expr && outerExpr(value) ? scope.exprRef(value.expr, value.as) : expr && outerField(value) ? fieldRef(value.field, value.as) : isExpr$1(type) ? parseExpression(value, scope) : isData(type) ? ref(scope.getData(value).values) : isField(type) ? fieldRef(value) : isCompare(type) ? scope.compareRef(value) : value; } }
function parameterValue(def, value, scope) { var type = def.type; if (isSignal(value)) { return isExpr$1(type) ? vegaUtil.error('Expression references can not be signals.') : isField(type) ? scope.fieldRef(value) : isCompare(type) ? scope.compareRef(value) : scope.signalRef(value.signal); } else { var expr = def.expr || isField(type); return expr && outerExpr(value) ? scope.exprRef(value.expr, value.as) : expr && outerField(value) ? fieldRef(value.field, value.as) : isExpr$1(type) ? parseExpression(value, scope) : isData(type) ? ref(scope.getData(value).values) : isField(type) ? fieldRef(value) : isCompare(type) ? scope.compareRef(value) : value; } }
JavaScript
function parseSubParameters(def, spec, scope) { var value = spec[def.name]; if (def.array) { if (!vegaUtil.isArray(value)) { // signals not allowed! vegaUtil.error('Expected an array of sub-parameters. Instead: ' + vegaUtil.stringValue(value)); } return value.map(function(v) { return parseSubParameter(def, v, scope); }); } else { return parseSubParameter(def, value, scope); } }
function parseSubParameters(def, spec, scope) { var value = spec[def.name]; if (def.array) { if (!vegaUtil.isArray(value)) { // signals not allowed! vegaUtil.error('Expected an array of sub-parameters. Instead: ' + vegaUtil.stringValue(value)); } return value.map(function(v) { return parseSubParameter(def, v, scope); }); } else { return parseSubParameter(def, value, scope); } }
JavaScript
function parseSubParameter(def, value, scope) { var params, pdef, k, i, n; // loop over defs to find matching key for (i=0, n=def.params.length; i<n; ++i) { pdef = def.params[i]; for (k in pdef.key) { if (pdef.key[k] !== value[k]) { pdef = null; break; } } if (pdef) break; } // raise error if matching key not found if (!pdef) vegaUtil.error('Unsupported parameter: ' + vegaUtil.stringValue(value)); // parse params, create Params transform, return ref params = vegaUtil.extend(parseParameters(pdef, value, scope), pdef.key); return ref(scope.add(Params(params))); }
function parseSubParameter(def, value, scope) { var params, pdef, k, i, n; // loop over defs to find matching key for (i=0, n=def.params.length; i<n; ++i) { pdef = def.params[i]; for (k in pdef.key) { if (pdef.key[k] !== value[k]) { pdef = null; break; } } if (pdef) break; } // raise error if matching key not found if (!pdef) vegaUtil.error('Unsupported parameter: ' + vegaUtil.stringValue(value)); // parse params, create Params transform, return ref params = vegaUtil.extend(parseParameters(pdef, value, scope), pdef.key); return ref(scope.add(Params(params))); }
JavaScript
function analyze(data, scope, ops) { var output = [], source = null, modify = false, generate = false, upstream, i, n, t, m; if (data.values) { // hard-wired input data set if (hasSignal(data.values) || hasSignal(data.format)) { // if either values or format has signal, use dynamic loader output.push(load(scope, data)); output.push(source = collect()); } else { // otherwise, ingest upon dataflow init output.push(source = collect({ $ingest: data.values, $format: data.format })); } } else if (data.url) { // load data from external source if (hasSignal(data.url) || hasSignal(data.format)) { // if either url or format has signal, use dynamic loader output.push(load(scope, data)); output.push(source = collect()); } else { // otherwise, request load upon dataflow init output.push(source = collect({ $request: data.url, $format: data.format })); } } else if (data.source) { // derives from one or more other data sets source = upstream = vegaUtil.array(data.source).map(function(d) { return ref(scope.getData(d).output); }); output.push(null); // populate later } // scan data transforms, add collectors as needed for (i=0, n=ops.length; i<n; ++i) { t = ops[i]; m = t.metadata; if (!source && !m.source) { output.push(source = collect()); } output.push(t); if (m.generates) generate = true; if (m.modifies && !generate) modify = true; if (m.source) source = t; else if (m.changes) source = null; } if (upstream) { n = upstream.length - 1; output[0] = Relay({ derive: modify, pulse: n ? upstream : upstream[0] }); if (modify || n) { // collect derived and multi-pulse tuples output.splice(1, 0, collect()); } } if (!source) output.push(collect()); output.push(Sieve({})); return output; }
function analyze(data, scope, ops) { var output = [], source = null, modify = false, generate = false, upstream, i, n, t, m; if (data.values) { // hard-wired input data set if (hasSignal(data.values) || hasSignal(data.format)) { // if either values or format has signal, use dynamic loader output.push(load(scope, data)); output.push(source = collect()); } else { // otherwise, ingest upon dataflow init output.push(source = collect({ $ingest: data.values, $format: data.format })); } } else if (data.url) { // load data from external source if (hasSignal(data.url) || hasSignal(data.format)) { // if either url or format has signal, use dynamic loader output.push(load(scope, data)); output.push(source = collect()); } else { // otherwise, request load upon dataflow init output.push(source = collect({ $request: data.url, $format: data.format })); } } else if (data.source) { // derives from one or more other data sets source = upstream = vegaUtil.array(data.source).map(function(d) { return ref(scope.getData(d).output); }); output.push(null); // populate later } // scan data transforms, add collectors as needed for (i=0, n=ops.length; i<n; ++i) { t = ops[i]; m = t.metadata; if (!source && !m.source) { output.push(source = collect()); } output.push(t); if (m.generates) generate = true; if (m.modifies && !generate) modify = true; if (m.source) source = t; else if (m.changes) source = null; } if (upstream) { n = upstream.length - 1; output[0] = Relay({ derive: modify, pulse: n ? upstream : upstream[0] }); if (modify || n) { // collect derived and multi-pulse tuples output.splice(1, 0, collect()); } } if (!source) output.push(collect()); output.push(Sieve({})); return output; }
JavaScript
collection(collectionPath) { path_1.validateResourcePath('collectionPath', collectionPath); const path = this._path.append(collectionPath); if (!path.isCollection) { throw new Error(`Value for argument "collectionPath" must point to a collection, but was "${collectionPath}". Your path does not contain an odd number of components.`); } return new CollectionReference(this._firestore, path); }
collection(collectionPath) { path_1.validateResourcePath('collectionPath', collectionPath); const path = this._path.append(collectionPath); if (!path.isCollection) { throw new Error(`Value for argument "collectionPath" must point to a collection, but was "${collectionPath}". Your path does not contain an odd number of components.`); } return new CollectionReference(this._firestore, path); }
JavaScript
update(dataOrField, ...preconditionOrValues) { validate_1.validateMinNumberOfArguments('DocumentReference.update', arguments, 1); const writeBatch = new write_batch_1.WriteBatch(this._firestore); return writeBatch.update .apply(writeBatch, [this, dataOrField, ...preconditionOrValues]) .commit() .then(([writeResult]) => writeResult); }
update(dataOrField, ...preconditionOrValues) { validate_1.validateMinNumberOfArguments('DocumentReference.update', arguments, 1); const writeBatch = new write_batch_1.WriteBatch(this._firestore); return writeBatch.update .apply(writeBatch, [this, dataOrField, ...preconditionOrValues]) .commit() .then(([writeResult]) => writeResult); }
JavaScript
toProto() { if (typeof this.value === 'number' && isNaN(this.value)) { return { unaryFilter: { field: { fieldPath: this.field.formattedName, }, op: 'IS_NAN', }, }; } if (this.value === null) { return { unaryFilter: { field: { fieldPath: this.field.formattedName, }, op: 'IS_NULL', }, }; } return { fieldFilter: { field: { fieldPath: this.field.formattedName, }, op: this.op, value: this.serializer.encodeValue(this.value), }, }; }
toProto() { if (typeof this.value === 'number' && isNaN(this.value)) { return { unaryFilter: { field: { fieldPath: this.field.formattedName, }, op: 'IS_NAN', }, }; } if (this.value === null) { return { unaryFilter: { field: { fieldPath: this.field.formattedName, }, op: 'IS_NULL', }, }; } return { fieldFilter: { field: { fieldPath: this.field.formattedName, }, op: this.op, value: this.serializer.encodeValue(this.value), }, }; }
JavaScript
forEach(callback, thisArg) { validate_1.validateFunction('callback', callback); for (const doc of this.docs) { callback.call(thisArg, doc); } }
forEach(callback, thisArg) { validate_1.validateFunction('callback', callback); for (const doc of this.docs) { callback.call(thisArg, doc); } }
JavaScript
static forCollectionGroupQuery(collectionId) { return new QueryOptions( /*parentPath=*/ path_1.ResourcePath.EMPTY, collectionId, /*allDescendants=*/ true, /*fieldFilters=*/ [], /*fieldOrders=*/ []); }
static forCollectionGroupQuery(collectionId) { return new QueryOptions( /*parentPath=*/ path_1.ResourcePath.EMPTY, collectionId, /*allDescendants=*/ true, /*fieldFilters=*/ [], /*fieldOrders=*/ []); }
JavaScript
static forCollectionQuery(collectionRef) { return new QueryOptions(collectionRef.parent(), collectionRef.id, /*allDescendants=*/ false, /*fieldFilters=*/ [], /*fieldOrders=*/ []); }
static forCollectionQuery(collectionRef) { return new QueryOptions(collectionRef.parent(), collectionRef.id, /*allDescendants=*/ false, /*fieldFilters=*/ [], /*fieldOrders=*/ []); }
JavaScript
static _extractFieldValues(documentSnapshot, fieldOrders) { const fieldValues = []; for (const fieldOrder of fieldOrders) { if (path_1.FieldPath.documentId().isEqual(fieldOrder.field)) { fieldValues.push(documentSnapshot.ref); } else { const fieldValue = documentSnapshot.get(fieldOrder.field); if (fieldValue === undefined) { throw new Error(`Field "${fieldOrder.field}" is missing in the provided DocumentSnapshot. ` + 'Please provide a document that contains values for all specified ' + 'orderBy() and where() constraints.'); } else { fieldValues.push(fieldValue); } } } return fieldValues; }
static _extractFieldValues(documentSnapshot, fieldOrders) { const fieldValues = []; for (const fieldOrder of fieldOrders) { if (path_1.FieldPath.documentId().isEqual(fieldOrder.field)) { fieldValues.push(documentSnapshot.ref); } else { const fieldValue = documentSnapshot.get(fieldOrder.field); if (fieldValue === undefined) { throw new Error(`Field "${fieldOrder.field}" is missing in the provided DocumentSnapshot. ` + 'Please provide a document that contains values for all specified ' + 'orderBy() and where() constraints.'); } else { fieldValues.push(fieldValue); } } } return fieldValues; }
JavaScript
where(fieldPath, opStr, value) { path_1.validateFieldPath('fieldPath', fieldPath); opStr = validateQueryOperator('opStr', opStr, value); validateQueryValue('value', value); if (this._queryOptions.startAt || this._queryOptions.endAt) { throw new Error('Cannot specify a where() filter after calling startAt(), ' + 'startAfter(), endBefore() or endAt().'); } const path = path_1.FieldPath.fromArgument(fieldPath); if (path_1.FieldPath.documentId().isEqual(path)) { value = this.validateReference(value); } const fieldFilter = new FieldFilter(this._serializer, path, comparisonOperators[opStr], value); const options = this._queryOptions.with({ fieldFilters: this._queryOptions.fieldFilters.concat(fieldFilter), }); return new Query(this._firestore, options); }
where(fieldPath, opStr, value) { path_1.validateFieldPath('fieldPath', fieldPath); opStr = validateQueryOperator('opStr', opStr, value); validateQueryValue('value', value); if (this._queryOptions.startAt || this._queryOptions.endAt) { throw new Error('Cannot specify a where() filter after calling startAt(), ' + 'startAfter(), endBefore() or endAt().'); } const path = path_1.FieldPath.fromArgument(fieldPath); if (path_1.FieldPath.documentId().isEqual(path)) { value = this.validateReference(value); } const fieldFilter = new FieldFilter(this._serializer, path, comparisonOperators[opStr], value); const options = this._queryOptions.with({ fieldFilters: this._queryOptions.fieldFilters.concat(fieldFilter), }); return new Query(this._firestore, options); }
JavaScript
select(...fieldPaths) { const fields = []; if (fieldPaths.length === 0) { fields.push({ fieldPath: path_1.FieldPath.documentId().formattedName }); } else { for (let i = 0; i < fieldPaths.length; ++i) { path_1.validateFieldPath(i, fieldPaths[i]); fields.push({ fieldPath: path_1.FieldPath.fromArgument(fieldPaths[i]).formattedName, }); } } const options = this._queryOptions.with({ projection: { fields } }); return new Query(this._firestore, options); }
select(...fieldPaths) { const fields = []; if (fieldPaths.length === 0) { fields.push({ fieldPath: path_1.FieldPath.documentId().formattedName }); } else { for (let i = 0; i < fieldPaths.length; ++i) { path_1.validateFieldPath(i, fieldPaths[i]); fields.push({ fieldPath: path_1.FieldPath.fromArgument(fieldPaths[i]).formattedName, }); } } const options = this._queryOptions.with({ projection: { fields } }); return new Query(this._firestore, options); }
JavaScript
limit(limit) { validate_1.validateInteger('limit', limit); const options = this._queryOptions.with({ limit }); return new Query(this._firestore, options); }
limit(limit) { validate_1.validateInteger('limit', limit); const options = this._queryOptions.with({ limit }); return new Query(this._firestore, options); }
JavaScript
createImplicitOrderBy(cursorValuesOrDocumentSnapshot) { if (!Query._isDocumentSnapshot(cursorValuesOrDocumentSnapshot)) { return this._queryOptions.fieldOrders; } const fieldOrders = this._queryOptions.fieldOrders.slice(); let hasDocumentId = false; if (fieldOrders.length === 0) { // If no explicit ordering is specified, use the first inequality to // define an implicit order. for (const fieldFilter of this._queryOptions.fieldFilters) { if (fieldFilter.isInequalityFilter()) { fieldOrders.push(new FieldOrder(fieldFilter.field)); break; } } } else { for (const fieldOrder of fieldOrders) { if (path_1.FieldPath.documentId().isEqual(fieldOrder.field)) { hasDocumentId = true; } } } if (!hasDocumentId) { // Add implicit sorting by name, using the last specified direction. const lastDirection = fieldOrders.length === 0 ? directionOperators.ASC : fieldOrders[fieldOrders.length - 1].direction; fieldOrders.push(new FieldOrder(path_1.FieldPath.documentId(), lastDirection)); } return fieldOrders; }
createImplicitOrderBy(cursorValuesOrDocumentSnapshot) { if (!Query._isDocumentSnapshot(cursorValuesOrDocumentSnapshot)) { return this._queryOptions.fieldOrders; } const fieldOrders = this._queryOptions.fieldOrders.slice(); let hasDocumentId = false; if (fieldOrders.length === 0) { // If no explicit ordering is specified, use the first inequality to // define an implicit order. for (const fieldFilter of this._queryOptions.fieldFilters) { if (fieldFilter.isInequalityFilter()) { fieldOrders.push(new FieldOrder(fieldFilter.field)); break; } } } else { for (const fieldOrder of fieldOrders) { if (path_1.FieldPath.documentId().isEqual(fieldOrder.field)) { hasDocumentId = true; } } } if (!hasDocumentId) { // Add implicit sorting by name, using the last specified direction. const lastDirection = fieldOrders.length === 0 ? directionOperators.ASC : fieldOrders[fieldOrders.length - 1].direction; fieldOrders.push(new FieldOrder(path_1.FieldPath.documentId(), lastDirection)); } return fieldOrders; }
JavaScript
createCursor(fieldOrders, cursorValuesOrDocumentSnapshot, before) { let fieldValues; if (Query._isDocumentSnapshot(cursorValuesOrDocumentSnapshot)) { fieldValues = Query._extractFieldValues(cursorValuesOrDocumentSnapshot[0], fieldOrders); } else { fieldValues = cursorValuesOrDocumentSnapshot; } if (fieldValues.length > fieldOrders.length) { throw new Error('Too many cursor values specified. The specified ' + 'values must match the orderBy() constraints of the query.'); } const options = { values: [] }; if (before) { options.before = true; } for (let i = 0; i < fieldValues.length; ++i) { let fieldValue = fieldValues[i]; if (path_1.FieldPath.documentId().isEqual(fieldOrders[i].field)) { fieldValue = this.validateReference(fieldValue); } validateQueryValue(i, fieldValue); options.values.push(fieldValue); } return options; }
createCursor(fieldOrders, cursorValuesOrDocumentSnapshot, before) { let fieldValues; if (Query._isDocumentSnapshot(cursorValuesOrDocumentSnapshot)) { fieldValues = Query._extractFieldValues(cursorValuesOrDocumentSnapshot[0], fieldOrders); } else { fieldValues = cursorValuesOrDocumentSnapshot; } if (fieldValues.length > fieldOrders.length) { throw new Error('Too many cursor values specified. The specified ' + 'values must match the orderBy() constraints of the query.'); } const options = { values: [] }; if (before) { options.before = true; } for (let i = 0; i < fieldValues.length; ++i) { let fieldValue = fieldValues[i]; if (path_1.FieldPath.documentId().isEqual(fieldOrders[i].field)) { fieldValue = this.validateReference(fieldValue); } validateQueryValue(i, fieldValue); options.values.push(fieldValue); } return options; }
JavaScript
validateReference(val) { const basePath = this._queryOptions.allDescendants ? this._queryOptions.parentPath : this._queryOptions.parentPath.append(this._queryOptions.collectionId); let reference; if (typeof val === 'string') { const path = basePath.append(val); if (this._queryOptions.allDescendants) { if (!path.isDocument) { throw new Error('When querying a collection group and ordering by ' + 'FieldPath.documentId(), the corresponding value must result in ' + `a valid document path, but '${val}' is not because it ` + 'contains an odd number of segments.'); } } else if (val.indexOf('/') !== -1) { throw new Error('When querying a collection and ordering by FieldPath.documentId(), ' + `the corresponding value must be a plain document ID, but '${val}' ` + 'contains a slash.'); } reference = new DocumentReference(this._firestore, basePath.append(val)); } else if (val instanceof DocumentReference) { reference = val; if (!basePath.isPrefixOf(reference._path)) { throw new Error(`"${reference.path}" is not part of the query result set and ` + 'cannot be used as a query boundary.'); } } else { throw new Error('The corresponding value for FieldPath.documentId() must be a ' + 'string or a DocumentReference.'); } if (!this._queryOptions.allDescendants && reference._path.parent().compareTo(basePath) !== 0) { throw new Error('Only a direct child can be used as a query boundary. ' + `Found: "${reference.path}".`); } return reference; }
validateReference(val) { const basePath = this._queryOptions.allDescendants ? this._queryOptions.parentPath : this._queryOptions.parentPath.append(this._queryOptions.collectionId); let reference; if (typeof val === 'string') { const path = basePath.append(val); if (this._queryOptions.allDescendants) { if (!path.isDocument) { throw new Error('When querying a collection group and ordering by ' + 'FieldPath.documentId(), the corresponding value must result in ' + `a valid document path, but '${val}' is not because it ` + 'contains an odd number of segments.'); } } else if (val.indexOf('/') !== -1) { throw new Error('When querying a collection and ordering by FieldPath.documentId(), ' + `the corresponding value must be a plain document ID, but '${val}' ` + 'contains a slash.'); } reference = new DocumentReference(this._firestore, basePath.append(val)); } else if (val instanceof DocumentReference) { reference = val; if (!basePath.isPrefixOf(reference._path)) { throw new Error(`"${reference.path}" is not part of the query result set and ` + 'cannot be used as a query boundary.'); } } else { throw new Error('The corresponding value for FieldPath.documentId() must be a ' + 'string or a DocumentReference.'); } if (!this._queryOptions.allDescendants && reference._path.parent().compareTo(basePath) !== 0) { throw new Error('Only a direct child can be used as a query boundary. ' + `Found: "${reference.path}".`); } return reference; }
JavaScript
startAt(...fieldValuesOrDocumentSnapshot) { validate_1.validateMinNumberOfArguments('Query.startAt', arguments, 1); const fieldOrders = this.createImplicitOrderBy(fieldValuesOrDocumentSnapshot); const startAt = this.createCursor(fieldOrders, fieldValuesOrDocumentSnapshot, true); const options = this._queryOptions.with({ fieldOrders, startAt }); return new Query(this._firestore, options); }
startAt(...fieldValuesOrDocumentSnapshot) { validate_1.validateMinNumberOfArguments('Query.startAt', arguments, 1); const fieldOrders = this.createImplicitOrderBy(fieldValuesOrDocumentSnapshot); const startAt = this.createCursor(fieldOrders, fieldValuesOrDocumentSnapshot, true); const options = this._queryOptions.with({ fieldOrders, startAt }); return new Query(this._firestore, options); }
JavaScript
startAfter(...fieldValuesOrDocumentSnapshot) { validate_1.validateMinNumberOfArguments('Query.startAfter', arguments, 1); const fieldOrders = this.createImplicitOrderBy(fieldValuesOrDocumentSnapshot); const startAt = this.createCursor(fieldOrders, fieldValuesOrDocumentSnapshot, false); const options = this._queryOptions.with({ fieldOrders, startAt }); return new Query(this._firestore, options); }
startAfter(...fieldValuesOrDocumentSnapshot) { validate_1.validateMinNumberOfArguments('Query.startAfter', arguments, 1); const fieldOrders = this.createImplicitOrderBy(fieldValuesOrDocumentSnapshot); const startAt = this.createCursor(fieldOrders, fieldValuesOrDocumentSnapshot, false); const options = this._queryOptions.with({ fieldOrders, startAt }); return new Query(this._firestore, options); }
JavaScript
endBefore(...fieldValuesOrDocumentSnapshot) { validate_1.validateMinNumberOfArguments('Query.endBefore', arguments, 1); const fieldOrders = this.createImplicitOrderBy(fieldValuesOrDocumentSnapshot); const endAt = this.createCursor(fieldOrders, fieldValuesOrDocumentSnapshot, true); const options = this._queryOptions.with({ fieldOrders, endAt }); return new Query(this._firestore, options); }
endBefore(...fieldValuesOrDocumentSnapshot) { validate_1.validateMinNumberOfArguments('Query.endBefore', arguments, 1); const fieldOrders = this.createImplicitOrderBy(fieldValuesOrDocumentSnapshot); const endAt = this.createCursor(fieldOrders, fieldValuesOrDocumentSnapshot, true); const options = this._queryOptions.with({ fieldOrders, endAt }); return new Query(this._firestore, options); }
JavaScript
endAt(...fieldValuesOrDocumentSnapshot) { validate_1.validateMinNumberOfArguments('Query.endAt', arguments, 1); const fieldOrders = this.createImplicitOrderBy(fieldValuesOrDocumentSnapshot); const endAt = this.createCursor(fieldOrders, fieldValuesOrDocumentSnapshot, false); const options = this._queryOptions.with({ fieldOrders, endAt }); return new Query(this._firestore, options); }
endAt(...fieldValuesOrDocumentSnapshot) { validate_1.validateMinNumberOfArguments('Query.endAt', arguments, 1); const fieldOrders = this.createImplicitOrderBy(fieldValuesOrDocumentSnapshot); const endAt = this.createCursor(fieldOrders, fieldValuesOrDocumentSnapshot, false); const options = this._queryOptions.with({ fieldOrders, endAt }); return new Query(this._firestore, options); }