text
stringlengths
2
1.04M
meta
dict
""" The Tornado Framework By Ali Pesaranghader University of Ottawa, Ontario, Canada E-mail: apesaran -at- uottawa -dot- ca / alipsgh -at- gmail -dot- com --- *** The Cumulative Sum (CUSUM) Method Implementation *** Paper: Page, Ewan S. "Continuous inspection schemes." Published in: Biometrika 41.1/2 (1954): 100-115. URL: http://www.jstor.org/stable/2333009 """ from dictionary.tornado_dictionary import TornadoDic from drift_detection.detector import SuperDetector class CUSUM(SuperDetector): """The Cumulative Sum (CUSUM) drift detection method class.""" DETECTOR_NAME = TornadoDic.CUSUM def __init__(self, min_instance=30, delta=0.005, lambda_=50): super().__init__() self.MINIMUM_NUM_INSTANCES = min_instance self.m_n = 1 self.x_mean = 0 self.sum = 0 self.delta = delta self.lambda_ = lambda_ def run(self, pr): pr = 1 if pr is False else 0 warning_status = False drift_status = False # 1. UPDATING STATS self.x_mean = self.x_mean + (pr - self.x_mean) / self.m_n self.sum = max([0, self.sum + pr - self.x_mean - self.delta]) self.m_n += 1 # 2. UPDATING WARNING AND DRIFT STATUSES if self.m_n >= self.MINIMUM_NUM_INSTANCES: if self.sum > self.lambda_: drift_status = True return warning_status, drift_status def reset(self): super().reset() self.m_n = 1 self.x_mean = 0 self.sum = 0 def get_settings(self): return [str(self.MINIMUM_NUM_INSTANCES) + "." + str(self.delta) + "." + str(self.lambda_), "$n_{min}$:" + str(self.MINIMUM_NUM_INSTANCES) + ", " + "$\delta$:" + str(self.delta).upper() + ", " + "$\lambda$:" + str(self.lambda_).upper()]
{ "content_hash": "3427bb2c643fbc0773149f3037b4f810", "timestamp": "", "source": "github", "line_count": 63, "max_line_length": 98, "avg_line_length": 30.206349206349206, "alnum_prop": 0.5643720441408303, "repo_name": "alipsgh/tornado", "id": "924f8dd801ae83a58b56e0228f112759ab0a8c5a", "size": "1903", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "drift_detection/cusum.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "224524" } ], "symlink_target": "" }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Runtime.InteropServices; using Microsoft.WindowsAPICodePack.Shell.Resources; using MS.WindowsAPICodePack.Internal; namespace Microsoft.WindowsAPICodePack.Shell { /// <summary> /// An exception thrown when an error occurs while dealing with ShellObjects. /// </summary> [Serializable] public class ShellException : ExternalException { /// <summary> /// Default constructor. /// </summary> public ShellException() { } /// <summary> /// Initializes a new exception using an HResult /// </summary> /// <param name="result">HResult error</param> internal ShellException(HResult result) : this((int)result) { } /// <summary> /// Initializes an excpetion with a custom message. /// </summary> /// <param name="message">Custom message</param> public ShellException(string message) : base(message) { } /// <summary> /// Initializes an exception with custom message and inner exception. /// </summary> /// <param name="message">Custom message</param> /// <param name="innerException">The original exception that preceded this exception</param> public ShellException(string message, Exception innerException) : base(message, innerException) { } /// <summary> /// Initializes an exception with custom message and error code. /// </summary> /// <param name="message">Custom message</param> /// <param name="errorCode">HResult error code</param> public ShellException(string message, int errorCode) : base(message, errorCode) { } /// <summary> /// Initializes an exception with custom message and error code. /// </summary> /// <param name="message"></param> /// <param name="errorCode"></param> internal ShellException(string message, HResult errorCode) : this(message, (int)errorCode) { } /// <summary> /// Initializes an exception with custom message and inner exception. /// </summary> /// <param name="errorCode">HRESULT of an operation</param> public ShellException(int errorCode) : base(LocalizedMessages.ShellExceptionDefaultText, errorCode) { } /// <summary> /// Initializes an exception from serialization info and a context. /// </summary> /// <param name="info"></param> /// <param name="context"></param> protected ShellException( System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context) : base(info, context) { } } }
{ "content_hash": "1381b51842a67d5e0b016ee8ce659382", "timestamp": "", "source": "github", "line_count": 81, "max_line_length": 102, "avg_line_length": 36.358024691358025, "alnum_prop": 0.5955857385398982, "repo_name": "manuth/EnhanceForm", "id": "fe5da6622fb4a6668b783f59b3ac6fe6f1f3d4e4", "size": "2947", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Microsoft.WindowsAPICodePack.Shell/Common/ShellException.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C#", "bytes": "3378631" } ], "symlink_target": "" }
package io.github.takzhanov.umbrella.hw01; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class Main { private static final Logger LOGGER = LoggerFactory.getLogger(Main.class); public static void main(String[] args) { final Double deltaDefault = 0.001; final Double delta = (args.length > 0) ? Double.parseDouble(args[0]) : deltaDefault; final Double gr = GoldenRatio.calc(delta); LOGGER.info("Golden Ration is {} with precision {}", gr, delta); } }
{ "content_hash": "be341638a9c0b870544f154994db131e", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 92, "avg_line_length": 34.266666666666666, "alnum_prop": 0.688715953307393, "repo_name": "takzhanov/otus-java-2017-10-takzhanov-yury", "id": "a4e3e70d8a801b669406d2e6749dfe9c4c93b55e", "size": "514", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "hw01-assembly/src/main/java/io/github/takzhanov/umbrella/hw01/Main.java", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "2004" }, { "name": "Java", "bytes": "133973" }, { "name": "JavaScript", "bytes": "886" }, { "name": "Roff", "bytes": "19192" }, { "name": "Shell", "bytes": "1638" } ], "symlink_target": "" }
FROM balenalib/odroid-xu4-alpine:3.10-run ENV NODE_VERSION 14.15.4 ENV YARN_VERSION 1.22.4 # Install dependencies RUN apk add --no-cache libgcc libstdc++ libuv \ && apk add --no-cache libssl1.0 || apk add --no-cache libssl1.1 RUN buildDeps='curl' \ && set -x \ && for key in \ 6A010C5166006599AA17F08146C2130DFD2497F5 \ ; do \ gpg --keyserver pgp.mit.edu --recv-keys "$key" || \ gpg --keyserver keyserver.pgp.com --recv-keys "$key" || \ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; \ done \ && apk add --no-cache $buildDeps \ && curl -SLO "http://resin-packages.s3.amazonaws.com/node/v$NODE_VERSION/node-v$NODE_VERSION-linux-alpine-armv7hf.tar.gz" \ && echo "9fc6c438cd4a893873c6bfa99e80a785e102123890506781c6f320c17928f4e7 node-v$NODE_VERSION-linux-alpine-armv7hf.tar.gz" | sha256sum -c - \ && tar -xzf "node-v$NODE_VERSION-linux-alpine-armv7hf.tar.gz" -C /usr/local --strip-components=1 \ && rm "node-v$NODE_VERSION-linux-alpine-armv7hf.tar.gz" \ && curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \ && curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \ && gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \ && mkdir -p /opt/yarn \ && tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \ && ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \ && ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \ && rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \ && npm config set unsafe-perm true -g --unsafe-perm \ && rm -rf /tmp/* CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"] RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/[email protected]" \ && echo "Running test-stack@node" \ && chmod +x [email protected] \ && bash [email protected] \ && rm -rf [email protected] RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Alpine Linux 3.10 \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v14.15.4, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info RUN echo $'#!/bin/bash\nbalena-info\nbusybox ln -sf /bin/busybox /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \ && chmod +x /bin/sh-shim \ && ln -f /bin/sh /bin/sh.real \ && ln -f /bin/sh-shim /bin/sh
{ "content_hash": "2e0d2a35411a7b8edae7b414dd68d674", "timestamp": "", "source": "github", "line_count": 48, "max_line_length": 697, "avg_line_length": 62.583333333333336, "alnum_prop": 0.7067243675099867, "repo_name": "nghiant2710/base-images", "id": "d934fe52cf38d5e896d99a5780ac28be35bfde0c", "size": "3025", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "balena-base-images/node/odroid-xu4/alpine/3.10/14.15.4/run/Dockerfile", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "144558581" }, { "name": "JavaScript", "bytes": "16316" }, { "name": "Shell", "bytes": "368690" } ], "symlink_target": "" }
/** @module ember-data */ import Ember from 'ember'; import { RecordArray, FilteredRecordArray, AdapterPopulatedRecordArray } from "ember-data/-private/system/record-arrays"; import OrderedSet from "ember-data/-private/system/ordered-set"; const { get, MapWithDefault, run: emberRun } = Ember; const { _addRecordToRecordArray, _recordWasChanged, _recordWasDeleted, array_flatten, array_remove, create, createAdapterPopulatedRecordArray, createFilteredRecordArray, createRecordArray, liveRecordArrayFor, populateLiveRecordArray, recordArraysForRecord, recordDidChange, recordWasLoaded, registerFilteredRecordArray, unregisterRecordArray, updateFilter, updateFilterRecordArray, updateRecordArrays } = heimdall.registerMonitor('recordArrayManager', '_addInternalModelToRecordArray', '_recordWasChanged', '_recordWasDeleted', 'array_fatten', 'array_remove', 'create', 'createAdapterPopulatedRecordArray', 'createFilteredRecordArray', 'createRecordArray', 'liveRecordArrayFor', 'populateLiveRecordArray', 'recordArraysForRecord', 'recordDidChange', 'recordWasLoaded', 'registerFilteredRecordArray', 'unregisterRecordArray', 'updateFilter', 'updateFilterRecordArray', 'updateRecordArrays' ); /** @class RecordArrayManager @namespace DS @private @extends Ember.Object */ export default Ember.Object.extend({ init() { heimdall.increment(create); this.filteredRecordArrays = MapWithDefault.create({ defaultValue() { return []; } }); this.liveRecordArrays = MapWithDefault.create({ defaultValue: modelClass => this.createRecordArray(modelClass) }); this.changedRecords = []; this._adapterPopulatedRecordArrays = []; }, recordDidChange(record) { heimdall.increment(recordDidChange); if (this.changedRecords.push(record) !== 1) { return; } emberRun.schedule('actions', this, this.updateRecordArrays); }, recordArraysForRecord(record) { heimdall.increment(recordArraysForRecord); record._recordArrays = record._recordArrays || OrderedSet.create(); return record._recordArrays; }, /** This method is invoked whenever data is loaded into the store by the adapter or updated by the adapter, or when a record has changed. It updates all record arrays that a record belongs to. To avoid thrashing, it only runs at most once per run loop. @method updateRecordArrays */ updateRecordArrays() { heimdall.increment(updateRecordArrays); this.changedRecords.forEach(internalModel => { if (internalModel.isDestroyed || internalModel.currentState.stateName === 'root.deleted.saved') { this._recordWasDeleted(internalModel); } else { this._recordWasChanged(internalModel); } }); this.changedRecords.length = 0; }, _recordWasDeleted(record) { heimdall.increment(_recordWasDeleted); let recordArrays = record._recordArrays; if (!recordArrays) { return; } recordArrays.forEach(array => array._removeInternalModels([record])); record._recordArrays = null; }, _recordWasChanged(record) { heimdall.increment(_recordWasChanged); let typeClass = record.type; let recordArrays = this.filteredRecordArrays.get(typeClass); let filter; recordArrays.forEach(array => { filter = get(array, 'filterFunction'); this.updateFilterRecordArray(array, filter, typeClass, record); }); }, //Need to update live arrays on loading recordWasLoaded(record) { heimdall.increment(recordWasLoaded); let typeClass = record.type; let recordArrays = this.filteredRecordArrays.get(typeClass); let filter; recordArrays.forEach(array => { filter = get(array, 'filterFunction'); this.updateFilterRecordArray(array, filter, typeClass, record); }); if (this.liveRecordArrays.has(typeClass)) { let liveRecordArray = this.liveRecordArrays.get(typeClass); this._addInternalModelToRecordArray(liveRecordArray, record); } }, /** Update an individual filter. @method updateFilterRecordArray @param {DS.FilteredRecordArray} array @param {Function} filter @param {DS.Model} modelClass @param {InternalModel} internalModel */ updateFilterRecordArray(array, filter, modelClass, internalModel) { heimdall.increment(updateFilterRecordArray); let shouldBeInArray = filter(internalModel.getRecord()); let recordArrays = this.recordArraysForRecord(internalModel); if (shouldBeInArray) { this._addInternalModelToRecordArray(array, internalModel); } else { recordArrays.delete(array); array._removeInternalModels([internalModel]); } }, _addInternalModelToRecordArray(array, internalModel) { heimdall.increment(_addRecordToRecordArray); let recordArrays = this.recordArraysForRecord(internalModel); if (!recordArrays.has(array)) { array._pushInternalModels([internalModel]); recordArrays.add(array); } }, syncLiveRecordArray(array, modelClass) { let hasNoPotentialDeletions = this.changedRecords.length === 0; let typeMap = this.store.typeMapFor(modelClass); let hasNoInsertionsOrRemovals = typeMap.records.length === array.length; /* Ideally the recordArrayManager has knowledge of the changes to be applied to liveRecordArrays, and is capable of strategically flushing those changes and applying small diffs if desired. However, until we've refactored recordArrayManager, this dirty check prevents us from unnecessarily wiping out live record arrays returned by peekAll. */ if (hasNoPotentialDeletions && hasNoInsertionsOrRemovals) { return; } this.populateLiveRecordArray(array, modelClass); }, populateLiveRecordArray(array, modelClass) { heimdall.increment(populateLiveRecordArray); let typeMap = this.store.typeMapFor(modelClass); let records = typeMap.records; let record; for (let i = 0; i < records.length; i++) { record = records[i]; if (!record.isDeleted() && !record.isEmpty()) { this._addInternalModelToRecordArray(array, record); } } }, /** This method is invoked if the `filterFunction` property is changed on a `DS.FilteredRecordArray`. It essentially re-runs the filter from scratch. This same method is invoked when the filter is created in th first place. @method updateFilter @param {Array} array @param {Class} modelClass @param {Function} filter */ updateFilter(array, modelClass, filter) { heimdall.increment(updateFilter); let typeMap = this.store.typeMapFor(modelClass); let records = typeMap.records; let record; for (let i = 0; i < records.length; i++) { record = records[i]; if (!record.isDeleted() && !record.isEmpty()) { this.updateFilterRecordArray(array, filter, modelClass, record); } } }, /** Get the `DS.RecordArray` for a type, which contains all loaded records of given type. @method liveRecordArrayFor @param {Class} typeClass @return {DS.RecordArray} */ liveRecordArrayFor(typeClass) { heimdall.increment(liveRecordArrayFor); return this.liveRecordArrays.get(typeClass); }, /** Create a `DS.RecordArray` for a type. @method createRecordArray @param {Class} modelClass @return {DS.RecordArray} */ createRecordArray(modelClass) { heimdall.increment(createRecordArray); return RecordArray.create({ type: modelClass, content: Ember.A(), store: this.store, isLoaded: true, manager: this }); }, /** Create a `DS.FilteredRecordArray` for a type and register it for updates. @method createFilteredRecordArray @param {DS.Model} typeClass @param {Function} filter @param {Object} query (optional @return {DS.FilteredRecordArray} */ createFilteredRecordArray(typeClass, filter, query) { heimdall.increment(createFilteredRecordArray); let array = FilteredRecordArray.create({ query: query, type: typeClass, content: Ember.A(), store: this.store, manager: this, filterFunction: filter }); this.registerFilteredRecordArray(array, typeClass, filter); return array; }, /** Create a `DS.AdapterPopulatedRecordArray` for a type with given query. @method createAdapterPopulatedRecordArray @param {DS.Model} typeClass @param {Object} query @return {DS.AdapterPopulatedRecordArray} */ createAdapterPopulatedRecordArray(typeClass, query) { heimdall.increment(createAdapterPopulatedRecordArray); let array = AdapterPopulatedRecordArray.create({ type: typeClass, query: query, content: Ember.A(), store: this.store, manager: this }); this._adapterPopulatedRecordArrays.push(array); return array; }, /** Register a RecordArray for a given type to be backed by a filter function. This will cause the array to update automatically when records of that type change attribute values or states. @method registerFilteredRecordArray @param {DS.RecordArray} array @param {DS.Model} typeClass @param {Function} filter */ registerFilteredRecordArray(array, typeClass, filter) { heimdall.increment(registerFilteredRecordArray); let recordArrays = this.filteredRecordArrays.get(typeClass); recordArrays.push(array); this.updateFilter(array, typeClass, filter); }, /** Unregister a RecordArray. So manager will not update this array. @method unregisterRecordArray @param {DS.RecordArray} array */ unregisterRecordArray(array) { heimdall.increment(unregisterRecordArray); let typeClass = array.type; // unregister filtered record array let recordArrays = this.filteredRecordArrays.get(typeClass); let removedFromFiltered = remove(recordArrays, array); // remove from adapter populated record array let removedFromAdapterPopulated = remove(this._adapterPopulatedRecordArrays, array); if (!removedFromFiltered && !removedFromAdapterPopulated) { // unregister live record array if (this.liveRecordArrays.has(typeClass)) { let liveRecordArrayForType = this.liveRecordArrayFor(typeClass); if (array === liveRecordArrayForType) { this.liveRecordArrays.delete(typeClass); } } } }, willDestroy() { this._super(...arguments); this.filteredRecordArrays.forEach(value => flatten(value).forEach(destroy)); this.liveRecordArrays.forEach(destroy); this._adapterPopulatedRecordArrays.forEach(destroy); } }); function destroy(entry) { entry.destroy(); } function flatten(list) { heimdall.increment(array_flatten); let length = list.length; let result = []; for (let i = 0; i < length; i++) { result = result.concat(list[i]); } return result; } function remove(array, item) { heimdall.increment(array_remove); let index = array.indexOf(item); if (index !== -1) { array.splice(index, 1); return true; } return false; }
{ "content_hash": "1f65da1114a86281a4e302cb954532cb", "timestamp": "", "source": "github", "line_count": 414, "max_line_length": 93, "avg_line_length": 27.00966183574879, "alnum_prop": 0.6986227866213558, "repo_name": "aldolipani/VisualPool", "id": "a1196df38a0437468113f3673efe452b9aa44c2d", "size": "11182", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "frontend/VisualPool/node_modules/ember-data/addon/-private/system/record-array-manager.js", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "1373" }, { "name": "CSS", "bytes": "179541" }, { "name": "HTML", "bytes": "32195" }, { "name": "Java", "bytes": "833" }, { "name": "JavaScript", "bytes": "5300458" }, { "name": "Scala", "bytes": "28402" }, { "name": "Shell", "bytes": "11894" }, { "name": "XSLT", "bytes": "62979" } ], "symlink_target": "" }
<div ng-controller="AboutCtrl"> <div class="main-container"> {{description}} </div> </div>
{ "content_hash": "f1daab1f9ee6fd5933b1753cb5ece6cd", "timestamp": "", "source": "github", "line_count": 5, "max_line_length": 32, "avg_line_length": 21.4, "alnum_prop": 0.5794392523364486, "repo_name": "makingsensetraining/angular-webapi", "id": "ac08fa48c44499a930abdafb10faa183801c43b7", "size": "109", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Source/Hiperion/Hiperion/App/views/about.html", "mode": "33188", "license": "mit", "language": [ { "name": "ASP", "bytes": "90" }, { "name": "C#", "bytes": "100891" }, { "name": "CSS", "bytes": "10324" }, { "name": "HTML", "bytes": "11491" }, { "name": "JavaScript", "bytes": "98988" } ], "symlink_target": "" }
describe ApplicationHelper::Button::StoragePerf do let(:view_context) { setup_view_context_with_sandbox({}) } let(:record) { FactoryBot.create(:storage) } let(:button) { described_class.new(view_context, {}, {'record' => record}, {}) } it_behaves_like 'a performance button', 'Datastore' end
{ "content_hash": "d06e29d4947c84405364b36df12761e7", "timestamp": "", "source": "github", "line_count": 7, "max_line_length": 82, "avg_line_length": 43, "alnum_prop": 0.6943521594684385, "repo_name": "ManageIQ/manageiq-ui-classic", "id": "356e200a7aba082ebd16852497737e5618b17bdd", "size": "301", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "spec/helpers/application_helper/buttons/storage_perf_spec.rb", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "181" }, { "name": "HTML", "bytes": "1292" }, { "name": "Haml", "bytes": "1139195" }, { "name": "JavaScript", "bytes": "1956794" }, { "name": "Ruby", "bytes": "5623827" }, { "name": "SCSS", "bytes": "114797" }, { "name": "Shell", "bytes": "1480" } ], "symlink_target": "" }
<div ng-controller="AuthenticationController"> <!--Seccion de encabezado --> <div class="text-center "><br><br> <div class="col-md-6 col-lg-6"> <img ng-src="/modules/core/client/img/logo_horizontal.jpg" width="100%" height="50%"> </div> <div class="col-md-6 col-lg-6"> <img ng-src="/modules/core/client/img/logo.png" width="90%" height="45%"> </div> </div> <br><br><br><br><br><br><br><br><br><br><br><br><br><br> <p class="hrline"> <!--Seccion de formulario --> <h3 class="col-md-12 text-center">Iniciar sesión</h3> <div class="col-xs-offset-2 col-xs-8 col-md-offset-4 col-md-4"> <form name="userForm" ng-submit="signin(userForm.$valid)" class="signin" novalidate autocomplete="off"> <fieldset> <div class="form-group" show-errors> <label for="username">Usuario</label> <input type="text" id="username" name="username" class="form-control" ng-model="credentials.username" placeholder="usuario" lowercase required> <div ng-messages="userForm.username.$error" role="alert"> <p class="help-block error-text" ng-message="required">Debe ingresar su nombre de usuario</p> </div> </div> <div class="form-group" show-errors> <label for="password">Contraseña</label> <input type="password" id="password" name="password" class="form-control" ng-model="credentials.password" placeholder="Contraseña" required> <div ng-messages="userForm.password.$error" role="alert"> <p class="help-block error-text" ng-message="required">Debe ingresar su contraseña</p> </div> </div> <div class="text-center form-group"> <button type="submit" class="btn btn-primary">Ingresar</button> </div> <alert type="danger" ng-show="error" class="text-center text-danger"> <span ng-bind="error"></span> </alert> </fieldset> </form> </div> </div>
{ "content_hash": "03d5545dd7616e378b93d9972e1707f3", "timestamp": "", "source": "github", "line_count": 42, "max_line_length": 153, "avg_line_length": 46.833333333333336, "alnum_prop": 0.6151499745805795, "repo_name": "jcmosqued/CDMI", "id": "efe56111bfb88470c1efd5c2659afac112a1a315", "size": "1971", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "modules/users/client/views/authentication/signin.client.view.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "1412" }, { "name": "HTML", "bytes": "58248" }, { "name": "JavaScript", "bytes": "298856" }, { "name": "Shell", "bytes": "685" } ], "symlink_target": "" }
export { LoaderComponent } from './loader.component';
{ "content_hash": "3110f0aafb91489e66eea959bdbb6a32", "timestamp": "", "source": "github", "line_count": 1, "max_line_length": 53, "avg_line_length": 54, "alnum_prop": 0.7407407407407407, "repo_name": "chantid/CovalentsShowcase", "id": "e8392a506d01c25e836afaacf4440997811fefef", "size": "54", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/app/trials/loader/index.ts", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "5970" }, { "name": "HTML", "bytes": "47808" }, { "name": "JavaScript", "bytes": "4227" }, { "name": "TypeScript", "bytes": "36135" } ], "symlink_target": "" }
from framework import * root.title("Normal/disabled outline color ($Revision: 1.3 $)") n = 200 for i in xrange(n): color = "blue" if randint(0, 1000) > 200: canv.create_line(coord(), coord(), coord(), coord(), fill=color) else: canv.create_line(coord(), coord(), coord(), coord(), fill=color, disabledfill="red", state=DISABLED) thread.start_new_thread(test, (canv, __file__, True)) root.mainloop()
{ "content_hash": "2aed9044f049929be5d5287c61f8bd29", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 62, "avg_line_length": 27.733333333333334, "alnum_prop": 0.65625, "repo_name": "WojciechMula/canvas2svg", "id": "b222483ff15ac8277419f2e0d8456301af6d4a9a", "size": "416", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/test-stroke.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Python", "bytes": "30920" } ], "symlink_target": "" }
// Copyright (c) 2010 Satoshi Nakamoto // Copyright (c) 2009-2012 The Bitcoin developers // Distributed under the MIT/X11 software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php. #include "init.h" #include "util.h" #include "sync.h" #include "ui_interface.h" #include "base58.h" #include "bitcoinrpc.h" #include "db.h" #undef printf #include <boost/asio.hpp> #include <boost/asio/ip/v6_only.hpp> #include <boost/bind.hpp> #include <boost/filesystem.hpp> #include <boost/foreach.hpp> #include <boost/iostreams/concepts.hpp> #include <boost/iostreams/stream.hpp> #include <boost/algorithm/string.hpp> #include <boost/lexical_cast.hpp> #include <boost/asio/ssl.hpp> #include <boost/filesystem/fstream.hpp> #include <boost/shared_ptr.hpp> #include <list> #define printf OutputDebugStringF using namespace std; using namespace boost; using namespace boost::asio; using namespace json_spirit; void ThreadRPCServer2(void* parg); static std::string strRPCUserColonPass; const Object emptyobj; void ThreadRPCServer3(void* parg); static inline unsigned short GetDefaultRPCPort() { return GetBoolArg("-testnet", false) ? 30802 : 12147; } Object JSONRPCError(int code, const string& message) { Object error; error.push_back(Pair("code", code)); error.push_back(Pair("message", message)); return error; } void RPCTypeCheck(const Array& params, const list<Value_type>& typesExpected, bool fAllowNull) { unsigned int i = 0; BOOST_FOREACH(Value_type t, typesExpected) { if (params.size() <= i) break; const Value& v = params[i]; if (!((v.type() == t) || (fAllowNull && (v.type() == null_type)))) { string err = strprintf("Expected type %s, got %s", Value_type_name[t], Value_type_name[v.type()]); throw JSONRPCError(RPC_TYPE_ERROR, err); } i++; } } void RPCTypeCheck(const Object& o, const map<string, Value_type>& typesExpected, bool fAllowNull) { BOOST_FOREACH(const PAIRTYPE(string, Value_type)& t, typesExpected) { const Value& v = find_value(o, t.first); if (!fAllowNull && v.type() == null_type) throw JSONRPCError(RPC_TYPE_ERROR, strprintf("Missing %s", t.first.c_str())); if (!((v.type() == t.second) || (fAllowNull && (v.type() == null_type)))) { string err = strprintf("Expected type %s for %s, got %s", Value_type_name[t.second], t.first.c_str(), Value_type_name[v.type()]); throw JSONRPCError(RPC_TYPE_ERROR, err); } } } int64 AmountFromValue(const Value& value) { double dAmount = value.get_real(); if (dAmount <= 0.0 || dAmount > MAX_MONEY) throw JSONRPCError(RPC_TYPE_ERROR, "Invalid amount"); int64 nAmount = roundint64(dAmount * COIN); if (!MoneyRange(nAmount)) throw JSONRPCError(RPC_TYPE_ERROR, "Invalid amount"); return nAmount; } Value ValueFromAmount(int64 amount) { return (double)amount / (double)COIN; } std::string HexBits(unsigned int nBits) { union { int32_t nBits; char cBits[4]; } uBits; uBits.nBits = htonl((int32_t)nBits); return HexStr(BEGIN(uBits.cBits), END(uBits.cBits)); } /// /// Note: This interface may still be subject to change. /// string CRPCTable::help(string strCommand) const { string strRet; set<rpcfn_type> setDone; for (map<string, const CRPCCommand*>::const_iterator mi = mapCommands.begin(); mi != mapCommands.end(); ++mi) { const CRPCCommand *pcmd = mi->second; string strMethod = mi->first; // We already filter duplicates, but these deprecated screw up the sort order if (strMethod.find("label") != string::npos) continue; if (strCommand != "" && strMethod != strCommand) continue; try { Array params; rpcfn_type pfn = pcmd->actor; if (setDone.insert(pfn).second) (*pfn)(params, true); } catch (std::exception& e) { // Help text is returned in an exception string strHelp = string(e.what()); if (strCommand == "") if (strHelp.find('\n') != string::npos) strHelp = strHelp.substr(0, strHelp.find('\n')); strRet += strHelp + "\n"; } } if (strRet == "") strRet = strprintf("help: unknown command: %s\n", strCommand.c_str()); strRet = strRet.substr(0,strRet.size()-1); return strRet; } Value help(const Array& params, bool fHelp) { if (fHelp || params.size() > 1) throw runtime_error( "help [command]\n" "List commands, or get help for a command."); string strCommand; if (params.size() > 0) strCommand = params[0].get_str(); return tableRPC.help(strCommand); } Value stop(const Array& params, bool fHelp) { if (fHelp || params.size() > 1) throw runtime_error( "stop <detach>\n" "<detach> is true or false to detach the database or not for this stop only\n" "Stop InformationCoin server (and possibly override the detachdb config value)."); // Shutdown will take long enough that the response should get back if (params.size() > 0) bitdb.SetDetach(params[0].get_bool()); StartShutdown(); return "InformationCoin server stopping"; } // // Call Table // static const CRPCCommand vRPCCommands[] = { // name function safemd unlocked // ------------------------ ----------------------- ------ -------- { "help", &help, true, true }, { "stop", &stop, true, true }, { "getblockcount", &getblockcount, true, false }, { "getconnectioncount", &getconnectioncount, true, false }, { "getpeerinfo", &getpeerinfo, true, false }, { "getdifficulty", &getdifficulty, true, false }, { "getgenerate", &getgenerate, true, false }, { "setgenerate", &setgenerate, true, false }, { "gethashespersec", &gethashespersec, true, false }, { "getinfo", &getinfo, true, false }, { "getmininginfo", &getmininginfo, true, false }, { "getnewaddress", &getnewaddress, true, false }, { "getnewpubkey", &getnewpubkey, true, false }, { "getaccountaddress", &getaccountaddress, true, false }, { "setaccount", &setaccount, true, false }, { "getaccount", &getaccount, false, false }, { "getaddressesbyaccount", &getaddressesbyaccount, true, false }, { "sendtoaddress", &sendtoaddress, false, false }, { "getreceivedbyaddress", &getreceivedbyaddress, false, false }, { "getreceivedbyaccount", &getreceivedbyaccount, false, false }, { "listreceivedbyaddress", &listreceivedbyaddress, false, false }, { "listreceivedbyaccount", &listreceivedbyaccount, false, false }, { "backupwallet", &backupwallet, true, false }, { "keypoolrefill", &keypoolrefill, true, false }, { "walletpassphrase", &walletpassphrase, true, false }, { "walletpassphrasechange", &walletpassphrasechange, false, false }, { "walletlock", &walletlock, true, false }, { "encryptwallet", &encryptwallet, false, false }, { "validateaddress", &validateaddress, true, false }, { "validatepubkey", &validatepubkey, true, false }, { "getbalance", &getbalance, false, false }, { "move", &movecmd, false, false }, { "sendfrom", &sendfrom, false, false }, { "sendmany", &sendmany, false, false }, { "addmultisigaddress", &addmultisigaddress, false, false }, { "getrawmempool", &getrawmempool, true, false }, { "getblock", &getblock, false, false }, { "getblockbynumber", &getblockbynumber, false, false }, { "getblockhash", &getblockhash, false, false }, { "gettransaction", &gettransaction, false, false }, { "listtransactions", &listtransactions, false, false }, { "listaddressgroupings", &listaddressgroupings, false, false }, { "signmessage", &signmessage, false, false }, { "verifymessage", &verifymessage, false, false }, { "getwork", &getwork, true, false }, { "getworkex", &getworkex, true, false }, { "listaccounts", &listaccounts, false, false }, { "settxfee", &settxfee, false, false }, { "getblocktemplate", &getblocktemplate, true, false }, { "submitblock", &submitblock, false, false }, { "listsinceblock", &listsinceblock, false, false }, { "dumpprivkey", &dumpprivkey, false, false }, { "importprivkey", &importprivkey, false, false }, { "listunspent", &listunspent, false, false }, { "getrawtransaction", &getrawtransaction, false, false }, { "createrawtransaction", &createrawtransaction, false, false }, { "decoderawtransaction", &decoderawtransaction, false, false }, { "signrawtransaction", &signrawtransaction, false, false }, { "sendrawtransaction", &sendrawtransaction, false, false }, { "getcheckpoint", &getcheckpoint, true, false }, { "reservebalance", &reservebalance, false, true}, { "checkwallet", &checkwallet, false, true}, { "repairwallet", &repairwallet, false, true}, { "resendtx", &resendtx, false, true}, { "makekeypair", &makekeypair, false, true}, { "sendalert", &sendalert, false, false}, }; CRPCTable::CRPCTable() { unsigned int vcidx; for (vcidx = 0; vcidx < (sizeof(vRPCCommands) / sizeof(vRPCCommands[0])); vcidx++) { const CRPCCommand *pcmd; pcmd = &vRPCCommands[vcidx]; mapCommands[pcmd->name] = pcmd; } } const CRPCCommand *CRPCTable::operator[](string name) const { map<string, const CRPCCommand*>::const_iterator it = mapCommands.find(name); if (it == mapCommands.end()) return NULL; return (*it).second; } // // HTTP protocol // // This ain't Apache. We're just using HTTP header for the length field // and to be compatible with other JSON-RPC implementations. // string HTTPPost(const string& strMsg, const map<string,string>& mapRequestHeaders) { ostringstream s; s << "POST / HTTP/1.1\r\n" << "User-Agent: InformationCoin-json-rpc/" << FormatFullVersion() << "\r\n" << "Host: 127.0.0.1\r\n" << "Content-Type: application/json\r\n" << "Content-Length: " << strMsg.size() << "\r\n" << "Connection: close\r\n" << "Accept: application/json\r\n"; BOOST_FOREACH(const PAIRTYPE(string, string)& item, mapRequestHeaders) s << item.first << ": " << item.second << "\r\n"; s << "\r\n" << strMsg; return s.str(); } string rfc1123Time() { char buffer[64]; time_t now; time(&now); struct tm* now_gmt = gmtime(&now); string locale(setlocale(LC_TIME, NULL)); setlocale(LC_TIME, "C"); // we want POSIX (aka "C") weekday/month strings strftime(buffer, sizeof(buffer), "%a, %d %b %Y %H:%M:%S +0000", now_gmt); setlocale(LC_TIME, locale.c_str()); return string(buffer); } static string HTTPReply(int nStatus, const string& strMsg, bool keepalive) { if (nStatus == HTTP_UNAUTHORIZED) return strprintf("HTTP/1.0 401 Authorization Required\r\n" "Date: %s\r\n" "Server: InformationCoin-json-rpc/%s\r\n" "WWW-Authenticate: Basic realm=\"jsonrpc\"\r\n" "Content-Type: text/html\r\n" "Content-Length: 296\r\n" "\r\n" "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\"\r\n" "\"http://www.w3.org/TR/1999/REC-html401-19991224/loose.dtd\">\r\n" "<HTML>\r\n" "<HEAD>\r\n" "<TITLE>Error</TITLE>\r\n" "<META HTTP-EQUIV='Content-Type' CONTENT='text/html; charset=ISO-8859-1'>\r\n" "</HEAD>\r\n" "<BODY><H1>401 Unauthorized.</H1></BODY>\r\n" "</HTML>\r\n", rfc1123Time().c_str(), FormatFullVersion().c_str()); const char *cStatus; if (nStatus == HTTP_OK) cStatus = "OK"; else if (nStatus == HTTP_BAD_REQUEST) cStatus = "Bad Request"; else if (nStatus == HTTP_FORBIDDEN) cStatus = "Forbidden"; else if (nStatus == HTTP_NOT_FOUND) cStatus = "Not Found"; else if (nStatus == HTTP_INTERNAL_SERVER_ERROR) cStatus = "Internal Server Error"; else cStatus = ""; return strprintf( "HTTP/1.1 %d %s\r\n" "Date: %s\r\n" "Connection: %s\r\n" "Content-Length: %"PRIszu"\r\n" "Content-Type: application/json\r\n" "Server: InformationCoin-json-rpc/%s\r\n" "\r\n" "%s", nStatus, cStatus, rfc1123Time().c_str(), keepalive ? "keep-alive" : "close", strMsg.size(), FormatFullVersion().c_str(), strMsg.c_str()); } int ReadHTTPStatus(std::basic_istream<char>& stream, int &proto) { string str; getline(stream, str); vector<string> vWords; boost::split(vWords, str, boost::is_any_of(" ")); if (vWords.size() < 2) return HTTP_INTERNAL_SERVER_ERROR; proto = 0; const char *ver = strstr(str.c_str(), "HTTP/1."); if (ver != NULL) proto = atoi(ver+7); return atoi(vWords[1].c_str()); } int ReadHTTPHeader(std::basic_istream<char>& stream, map<string, string>& mapHeadersRet) { int nLen = 0; loop { string str; std::getline(stream, str); if (str.empty() || str == "\r") break; string::size_type nColon = str.find(":"); if (nColon != string::npos) { string strHeader = str.substr(0, nColon); boost::trim(strHeader); boost::to_lower(strHeader); string strValue = str.substr(nColon+1); boost::trim(strValue); mapHeadersRet[strHeader] = strValue; if (strHeader == "content-length") nLen = atoi(strValue.c_str()); } } return nLen; } int ReadHTTP(std::basic_istream<char>& stream, map<string, string>& mapHeadersRet, string& strMessageRet) { mapHeadersRet.clear(); strMessageRet = ""; // Read status int nProto = 0; int nStatus = ReadHTTPStatus(stream, nProto); // Read header int nLen = ReadHTTPHeader(stream, mapHeadersRet); if (nLen < 0 || nLen > (int)MAX_SIZE) return HTTP_INTERNAL_SERVER_ERROR; // Read message if (nLen > 0) { vector<char> vch(nLen); stream.read(&vch[0], nLen); strMessageRet = string(vch.begin(), vch.end()); } string sConHdr = mapHeadersRet["connection"]; if ((sConHdr != "close") && (sConHdr != "keep-alive")) { if (nProto >= 1) mapHeadersRet["connection"] = "keep-alive"; else mapHeadersRet["connection"] = "close"; } return nStatus; } bool HTTPAuthorized(map<string, string>& mapHeaders) { string strAuth = mapHeaders["authorization"]; if (strAuth.substr(0,6) != "Basic ") return false; string strUserPass64 = strAuth.substr(6); boost::trim(strUserPass64); string strUserPass = DecodeBase64(strUserPass64); return strUserPass == strRPCUserColonPass; } // // JSON-RPC protocol. Bitcoin speaks version 1.0 for maximum compatibility, // but uses JSON-RPC 1.1/2.0 standards for parts of the 1.0 standard that were // unspecified (HTTP errors and contents of 'error'). // // 1.0 spec: http://json-rpc.org/wiki/specification // 1.2 spec: http://groups.google.com/group/json-rpc/web/json-rpc-over-http // http://www.codeproject.com/KB/recipes/JSON_Spirit.aspx // string JSONRPCRequest(const string& strMethod, const Array& params, const Value& id) { Object request; request.push_back(Pair("method", strMethod)); request.push_back(Pair("params", params)); request.push_back(Pair("id", id)); return write_string(Value(request), false) + "\n"; } Object JSONRPCReplyObj(const Value& result, const Value& error, const Value& id) { Object reply; if (error.type() != null_type) reply.push_back(Pair("result", Value::null)); else reply.push_back(Pair("result", result)); reply.push_back(Pair("error", error)); reply.push_back(Pair("id", id)); return reply; } string JSONRPCReply(const Value& result, const Value& error, const Value& id) { Object reply = JSONRPCReplyObj(result, error, id); return write_string(Value(reply), false) + "\n"; } void ErrorReply(std::ostream& stream, const Object& objError, const Value& id) { // Send error reply from json-rpc error object int nStatus = HTTP_INTERNAL_SERVER_ERROR; int code = find_value(objError, "code").get_int(); if (code == RPC_INVALID_REQUEST) nStatus = HTTP_BAD_REQUEST; else if (code == RPC_METHOD_NOT_FOUND) nStatus = HTTP_NOT_FOUND; string strReply = JSONRPCReply(Value::null, objError, id); stream << HTTPReply(nStatus, strReply, false) << std::flush; } bool ClientAllowed(const boost::asio::ip::address& address) { // Make sure that IPv4-compatible and IPv4-mapped IPv6 addresses are treated as IPv4 addresses if (address.is_v6() && (address.to_v6().is_v4_compatible() || address.to_v6().is_v4_mapped())) return ClientAllowed(address.to_v6().to_v4()); std::string ipv4addr = address.to_string(); if (address == asio::ip::address_v4::loopback() || address == asio::ip::address_v6::loopback() || (address.is_v4() // Check whether IPv4 addresses match 127.0.0.0/8 (loopback subnet) && (address.to_v4().to_ulong() & 0xff000000) == 0x7f000000)) return true; const string strAddress = address.to_string(); const vector<string>& vAllow = mapMultiArgs["-rpcallowip"]; BOOST_FOREACH(string strAllow, vAllow) if (WildcardMatch(strAddress, strAllow)) return true; return false; } // // IOStream device that speaks SSL but can also speak non-SSL // template <typename Protocol> class SSLIOStreamDevice : public iostreams::device<iostreams::bidirectional> { public: SSLIOStreamDevice(asio::ssl::stream<typename Protocol::socket> &streamIn, bool fUseSSLIn) : stream(streamIn) { fUseSSL = fUseSSLIn; fNeedHandshake = fUseSSLIn; } void handshake(ssl::stream_base::handshake_type role) { if (!fNeedHandshake) return; fNeedHandshake = false; stream.handshake(role); } std::streamsize read(char* s, std::streamsize n) { handshake(ssl::stream_base::server); // HTTPS servers read first if (fUseSSL) return stream.read_some(asio::buffer(s, n)); return stream.next_layer().read_some(asio::buffer(s, n)); } std::streamsize write(const char* s, std::streamsize n) { handshake(ssl::stream_base::client); // HTTPS clients write first if (fUseSSL) return asio::write(stream, asio::buffer(s, n)); return asio::write(stream.next_layer(), asio::buffer(s, n)); } bool connect(const std::string& server, const std::string& port) { ip::tcp::resolver resolver(stream.get_io_service()); ip::tcp::resolver::query query(server.c_str(), port.c_str()); ip::tcp::resolver::iterator endpoint_iterator = resolver.resolve(query); ip::tcp::resolver::iterator end; boost::system::error_code error = asio::error::host_not_found; while (error && endpoint_iterator != end) { stream.lowest_layer().close(); stream.lowest_layer().connect(*endpoint_iterator++, error); } if (error) return false; return true; } private: bool fNeedHandshake; bool fUseSSL; asio::ssl::stream<typename Protocol::socket>& stream; }; class AcceptedConnection { public: virtual ~AcceptedConnection() {} virtual std::iostream& stream() = 0; virtual std::string peer_address_to_string() const = 0; virtual void close() = 0; }; template <typename Protocol> class AcceptedConnectionImpl : public AcceptedConnection { public: AcceptedConnectionImpl( asio::io_service& io_service, ssl::context &context, bool fUseSSL) : sslStream(io_service, context), _d(sslStream, fUseSSL), _stream(_d) { } virtual std::iostream& stream() { return _stream; } virtual std::string peer_address_to_string() const { return peer.address().to_string(); } virtual void close() { _stream.close(); } typename Protocol::endpoint peer; asio::ssl::stream<typename Protocol::socket> sslStream; private: SSLIOStreamDevice<Protocol> _d; iostreams::stream< SSLIOStreamDevice<Protocol> > _stream; }; void ThreadRPCServer(void* parg) { // Make this thread recognisable as the RPC listener RenameThread("bitcoin-rpclist"); try { vnThreadsRunning[THREAD_RPCLISTENER]++; ThreadRPCServer2(parg); vnThreadsRunning[THREAD_RPCLISTENER]--; } catch (std::exception& e) { vnThreadsRunning[THREAD_RPCLISTENER]--; PrintException(&e, "ThreadRPCServer()"); } catch (...) { vnThreadsRunning[THREAD_RPCLISTENER]--; PrintException(NULL, "ThreadRPCServer()"); } printf("ThreadRPCServer exited\n"); } // Forward declaration required for RPCListen template <typename Protocol, typename SocketAcceptorService> static void RPCAcceptHandler(boost::shared_ptr< basic_socket_acceptor<Protocol, SocketAcceptorService> > acceptor, ssl::context& context, bool fUseSSL, AcceptedConnection* conn, const boost::system::error_code& error); /** * Sets up I/O resources to accept and handle a new connection. */ template <typename Protocol, typename SocketAcceptorService> static void RPCListen(boost::shared_ptr< basic_socket_acceptor<Protocol, SocketAcceptorService> > acceptor, ssl::context& context, const bool fUseSSL) { // Accept connection AcceptedConnectionImpl<Protocol>* conn = new AcceptedConnectionImpl<Protocol>(acceptor->get_io_service(), context, fUseSSL); acceptor->async_accept( conn->sslStream.lowest_layer(), conn->peer, boost::bind(&RPCAcceptHandler<Protocol, SocketAcceptorService>, acceptor, boost::ref(context), fUseSSL, conn, boost::asio::placeholders::error)); } /** * Accept and handle incoming connection. */ template <typename Protocol, typename SocketAcceptorService> static void RPCAcceptHandler(boost::shared_ptr< basic_socket_acceptor<Protocol, SocketAcceptorService> > acceptor, ssl::context& context, const bool fUseSSL, AcceptedConnection* conn, const boost::system::error_code& error) { vnThreadsRunning[THREAD_RPCLISTENER]++; // Immediately start accepting new connections, except when we're cancelled or our socket is closed. if (error != asio::error::operation_aborted && acceptor->is_open()) RPCListen(acceptor, context, fUseSSL); AcceptedConnectionImpl<ip::tcp>* tcp_conn = dynamic_cast< AcceptedConnectionImpl<ip::tcp>* >(conn); // TODO: Actually handle errors if (error) { delete conn; } // Restrict callers by IP. It is important to // do this before starting client thread, to filter out // certain DoS and misbehaving clients. else if (tcp_conn && !ClientAllowed(tcp_conn->peer.address())) { // Only send a 403 if we're not using SSL to prevent a DoS during the SSL handshake. if (!fUseSSL) conn->stream() << HTTPReply(HTTP_FORBIDDEN, "", false) << std::flush; delete conn; } // start HTTP client thread else if (!NewThread(ThreadRPCServer3, conn)) { printf("Failed to create RPC server client thread\n"); delete conn; } vnThreadsRunning[THREAD_RPCLISTENER]--; } void ThreadRPCServer2(void* parg) { printf("ThreadRPCServer started\n"); strRPCUserColonPass = mapArgs["-rpcuser"] + ":" + mapArgs["-rpcpassword"]; if (mapArgs["-rpcpassword"] == "") { unsigned char rand_pwd[32]; RAND_bytes(rand_pwd, 32); string strWhatAmI = "To use InformationCoind"; if (mapArgs.count("-server")) strWhatAmI = strprintf(_("To use the %s option"), "\"-server\""); else if (mapArgs.count("-daemon")) strWhatAmI = strprintf(_("To use the %s option"), "\"-daemon\""); uiInterface.ThreadSafeMessageBox(strprintf( _("%s, you must set a rpcpassword in the configuration file:\n %s\n" "It is recommended you use the following random password:\n" "rpcuser=bitcoinrpc\n" "rpcpassword=%s\n" "(you do not need to remember this password)\n" "If the file does not exist, create it with owner-readable-only file permissions.\n"), strWhatAmI.c_str(), GetConfigFile().string().c_str(), EncodeBase58(&rand_pwd[0],&rand_pwd[0]+32).c_str()), _("Error"), CClientUIInterface::OK | CClientUIInterface::MODAL); StartShutdown(); return; } const bool fUseSSL = GetBoolArg("-rpcssl"); asio::io_service io_service; ssl::context context(io_service, ssl::context::sslv23); if (fUseSSL) { context.set_options(ssl::context::no_sslv2); filesystem::path pathCertFile(GetArg("-rpcsslcertificatechainfile", "server.cert")); if (!pathCertFile.is_complete()) pathCertFile = filesystem::path(GetDataDir()) / pathCertFile; if (filesystem::exists(pathCertFile)) context.use_certificate_chain_file(pathCertFile.string()); else printf("ThreadRPCServer ERROR: missing server certificate file %s\n", pathCertFile.string().c_str()); filesystem::path pathPKFile(GetArg("-rpcsslprivatekeyfile", "server.pem")); if (!pathPKFile.is_complete()) pathPKFile = filesystem::path(GetDataDir()) / pathPKFile; if (filesystem::exists(pathPKFile)) context.use_private_key_file(pathPKFile.string(), ssl::context::pem); else printf("ThreadRPCServer ERROR: missing server private key file %s\n", pathPKFile.string().c_str()); string strCiphers = GetArg("-rpcsslciphers", "TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH"); SSL_CTX_set_cipher_list(context.impl(), strCiphers.c_str()); } // Try a dual IPv6/IPv4 socket, falling back to separate IPv4 and IPv6 sockets const bool loopback = !mapArgs.count("-rpcallowip"); asio::ip::address bindAddress = loopback ? asio::ip::address_v6::loopback() : asio::ip::address_v6::any(); ip::tcp::endpoint endpoint(bindAddress, GetArg("-rpcport", GetDefaultRPCPort())); boost::system::error_code v6_only_error; boost::shared_ptr<ip::tcp::acceptor> acceptor(new ip::tcp::acceptor(io_service)); boost::signals2::signal<void ()> StopRequests; bool fListening = false; std::string strerr; try { acceptor->open(endpoint.protocol()); acceptor->set_option(boost::asio::ip::tcp::acceptor::reuse_address(true)); // Try making the socket dual IPv6/IPv4 (if listening on the "any" address) acceptor->set_option(boost::asio::ip::v6_only(loopback), v6_only_error); acceptor->bind(endpoint); acceptor->listen(socket_base::max_connections); RPCListen(acceptor, context, fUseSSL); // Cancel outstanding listen-requests for this acceptor when shutting down StopRequests.connect(signals2::slot<void ()>( static_cast<void (ip::tcp::acceptor::*)()>(&ip::tcp::acceptor::close), acceptor.get()) .track(acceptor)); fListening = true; } catch(boost::system::system_error &e) { strerr = strprintf(_("An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s"), endpoint.port(), e.what()); } try { // If dual IPv6/IPv4 failed (or we're opening loopback interfaces only), open IPv4 separately if (!fListening || loopback || v6_only_error) { bindAddress = loopback ? asio::ip::address_v4::loopback() : asio::ip::address_v4::any(); endpoint.address(bindAddress); acceptor.reset(new ip::tcp::acceptor(io_service)); acceptor->open(endpoint.protocol()); acceptor->set_option(boost::asio::ip::tcp::acceptor::reuse_address(true)); acceptor->bind(endpoint); acceptor->listen(socket_base::max_connections); RPCListen(acceptor, context, fUseSSL); // Cancel outstanding listen-requests for this acceptor when shutting down StopRequests.connect(signals2::slot<void ()>( static_cast<void (ip::tcp::acceptor::*)()>(&ip::tcp::acceptor::close), acceptor.get()) .track(acceptor)); fListening = true; } } catch(boost::system::system_error &e) { strerr = strprintf(_("An error occurred while setting up the RPC port %u for listening on IPv4: %s"), endpoint.port(), e.what()); } if (!fListening) { uiInterface.ThreadSafeMessageBox(strerr, _("Error"), CClientUIInterface::OK | CClientUIInterface::MODAL); StartShutdown(); return; } vnThreadsRunning[THREAD_RPCLISTENER]--; while (!fShutdown) io_service.run_one(); vnThreadsRunning[THREAD_RPCLISTENER]++; StopRequests(); } class JSONRequest { public: Value id; string strMethod; Array params; JSONRequest() { id = Value::null; } void parse(const Value& valRequest); }; void JSONRequest::parse(const Value& valRequest) { // Parse request if (valRequest.type() != obj_type) throw JSONRPCError(RPC_INVALID_REQUEST, "Invalid Request object"); const Object& request = valRequest.get_obj(); // Parse id now so errors from here on will have the id id = find_value(request, "id"); // Parse method Value valMethod = find_value(request, "method"); if (valMethod.type() == null_type) throw JSONRPCError(RPC_INVALID_REQUEST, "Missing method"); if (valMethod.type() != str_type) throw JSONRPCError(RPC_INVALID_REQUEST, "Method must be a string"); strMethod = valMethod.get_str(); if (strMethod != "getwork" && strMethod != "getblocktemplate") printf("ThreadRPCServer method=%s\n", strMethod.c_str()); // Parse params Value valParams = find_value(request, "params"); if (valParams.type() == array_type) params = valParams.get_array(); else if (valParams.type() == null_type) params = Array(); else throw JSONRPCError(RPC_INVALID_REQUEST, "Params must be an array"); } static Object JSONRPCExecOne(const Value& req) { Object rpc_result; JSONRequest jreq; try { jreq.parse(req); Value result = tableRPC.execute(jreq.strMethod, jreq.params); rpc_result = JSONRPCReplyObj(result, Value::null, jreq.id); } catch (Object& objError) { rpc_result = JSONRPCReplyObj(Value::null, objError, jreq.id); } catch (std::exception& e) { rpc_result = JSONRPCReplyObj(Value::null, JSONRPCError(RPC_PARSE_ERROR, e.what()), jreq.id); } return rpc_result; } static string JSONRPCExecBatch(const Array& vReq) { Array ret; for (unsigned int reqIdx = 0; reqIdx < vReq.size(); reqIdx++) ret.push_back(JSONRPCExecOne(vReq[reqIdx])); return write_string(Value(ret), false) + "\n"; } static CCriticalSection cs_THREAD_RPCHANDLER; void ThreadRPCServer3(void* parg) { // Make this thread recognisable as the RPC handler RenameThread("bitcoin-rpchand"); { LOCK(cs_THREAD_RPCHANDLER); vnThreadsRunning[THREAD_RPCHANDLER]++; } AcceptedConnection *conn = (AcceptedConnection *) parg; bool fRun = true; loop { if (fShutdown || !fRun) { conn->close(); delete conn; { LOCK(cs_THREAD_RPCHANDLER); --vnThreadsRunning[THREAD_RPCHANDLER]; } return; } map<string, string> mapHeaders; string strRequest; ReadHTTP(conn->stream(), mapHeaders, strRequest); // Check authorization if (mapHeaders.count("authorization") == 0) { conn->stream() << HTTPReply(HTTP_UNAUTHORIZED, "", false) << std::flush; break; } if (!HTTPAuthorized(mapHeaders)) { printf("ThreadRPCServer incorrect password attempt from %s\n", conn->peer_address_to_string().c_str()); /* Deter brute-forcing short passwords. If this results in a DOS the user really shouldn't have their RPC port exposed.*/ if (mapArgs["-rpcpassword"].size() < 20) Sleep(250); conn->stream() << HTTPReply(HTTP_UNAUTHORIZED, "", false) << std::flush; break; } if (mapHeaders["connection"] == "close") fRun = false; JSONRequest jreq; try { // Parse request Value valRequest; if (!read_string(strRequest, valRequest)) throw JSONRPCError(RPC_PARSE_ERROR, "Parse error"); string strReply; // singleton request if (valRequest.type() == obj_type) { jreq.parse(valRequest); Value result = tableRPC.execute(jreq.strMethod, jreq.params); // Send reply strReply = JSONRPCReply(result, Value::null, jreq.id); // array of requests } else if (valRequest.type() == array_type) strReply = JSONRPCExecBatch(valRequest.get_array()); else throw JSONRPCError(RPC_PARSE_ERROR, "Top-level object parse error"); conn->stream() << HTTPReply(HTTP_OK, strReply, fRun) << std::flush; } catch (Object& objError) { ErrorReply(conn->stream(), objError, jreq.id); break; } catch (std::exception& e) { ErrorReply(conn->stream(), JSONRPCError(RPC_PARSE_ERROR, e.what()), jreq.id); break; } } delete conn; { LOCK(cs_THREAD_RPCHANDLER); vnThreadsRunning[THREAD_RPCHANDLER]--; } } json_spirit::Value CRPCTable::execute(const std::string &strMethod, const json_spirit::Array &params) const { // Find method const CRPCCommand *pcmd = tableRPC[strMethod]; if (!pcmd) throw JSONRPCError(RPC_METHOD_NOT_FOUND, "Method not found"); // Observe safe mode string strWarning = GetWarnings("rpc"); if (strWarning != "" && !GetBoolArg("-disablesafemode") && !pcmd->okSafeMode) throw JSONRPCError(RPC_FORBIDDEN_BY_SAFE_MODE, string("Safe mode: ") + strWarning); try { // Execute Value result; { if (pcmd->unlocked) result = pcmd->actor(params, false); else { LOCK2(cs_main, pwalletMain->cs_wallet); result = pcmd->actor(params, false); } } return result; } catch (std::exception& e) { throw JSONRPCError(RPC_MISC_ERROR, e.what()); } } Object CallRPC(const string& strMethod, const Array& params) { if (mapArgs["-rpcuser"] == "" && mapArgs["-rpcpassword"] == "") throw runtime_error(strprintf( _("You must set rpcpassword=<password> in the configuration file:\n%s\n" "If the file does not exist, create it with owner-readable-only file permissions."), GetConfigFile().string().c_str())); // Connect to localhost bool fUseSSL = GetBoolArg("-rpcssl"); asio::io_service io_service; ssl::context context(io_service, ssl::context::sslv23); context.set_options(ssl::context::no_sslv2); asio::ssl::stream<asio::ip::tcp::socket> sslStream(io_service, context); SSLIOStreamDevice<asio::ip::tcp> d(sslStream, fUseSSL); iostreams::stream< SSLIOStreamDevice<asio::ip::tcp> > stream(d); if (!d.connect(GetArg("-rpcconnect", "127.0.0.1"), GetArg("-rpcport", itostr(GetDefaultRPCPort())))) throw runtime_error("couldn't connect to server"); // HTTP basic authentication string strUserPass64 = EncodeBase64(mapArgs["-rpcuser"] + ":" + mapArgs["-rpcpassword"]); map<string, string> mapRequestHeaders; mapRequestHeaders["Authorization"] = string("Basic ") + strUserPass64; // Send request string strRequest = JSONRPCRequest(strMethod, params, 1); string strPost = HTTPPost(strRequest, mapRequestHeaders); stream << strPost << std::flush; // Receive reply map<string, string> mapHeaders; string strReply; int nStatus = ReadHTTP(stream, mapHeaders, strReply); if (nStatus == HTTP_UNAUTHORIZED) throw runtime_error("incorrect rpcuser or rpcpassword (authorization failed)"); else if (nStatus >= 400 && nStatus != HTTP_BAD_REQUEST && nStatus != HTTP_NOT_FOUND && nStatus != HTTP_INTERNAL_SERVER_ERROR) throw runtime_error(strprintf("server returned HTTP error %d", nStatus)); else if (strReply.empty()) throw runtime_error("no response from server"); // Parse reply Value valReply; if (!read_string(strReply, valReply)) throw runtime_error("couldn't parse reply from server"); const Object& reply = valReply.get_obj(); if (reply.empty()) throw runtime_error("expected reply to have result, error and id properties"); return reply; } template<typename T> void ConvertTo(Value& value, bool fAllowNull=false) { if (fAllowNull && value.type() == null_type) return; if (value.type() == str_type) { // reinterpret string as unquoted json value Value value2; string strJSON = value.get_str(); if (!read_string(strJSON, value2)) throw runtime_error(string("Error parsing JSON:")+strJSON); ConvertTo<T>(value2, fAllowNull); value = value2; } else { value = value.get_value<T>(); } } // Convert strings to command-specific RPC representation Array RPCConvertValues(const std::string &strMethod, const std::vector<std::string> &strParams) { Array params; BOOST_FOREACH(const std::string &param, strParams) params.push_back(param); int n = params.size(); // // Special case non-string parameter types // if (strMethod == "stop" && n > 0) ConvertTo<bool>(params[0]); if (strMethod == "setgenerate" && n > 0) ConvertTo<bool>(params[0]); if (strMethod == "setgenerate" && n > 1) ConvertTo<boost::int64_t>(params[1]); if (strMethod == "sendtoaddress" && n > 1) ConvertTo<double>(params[1]); if (strMethod == "settxfee" && n > 0) ConvertTo<double>(params[0]); if (strMethod == "getreceivedbyaddress" && n > 1) ConvertTo<boost::int64_t>(params[1]); if (strMethod == "getreceivedbyaccount" && n > 1) ConvertTo<boost::int64_t>(params[1]); if (strMethod == "listreceivedbyaddress" && n > 0) ConvertTo<boost::int64_t>(params[0]); if (strMethod == "listreceivedbyaddress" && n > 1) ConvertTo<bool>(params[1]); if (strMethod == "listreceivedbyaccount" && n > 0) ConvertTo<boost::int64_t>(params[0]); if (strMethod == "listreceivedbyaccount" && n > 1) ConvertTo<bool>(params[1]); if (strMethod == "getbalance" && n > 1) ConvertTo<boost::int64_t>(params[1]); if (strMethod == "getblock" && n > 1) ConvertTo<bool>(params[1]); if (strMethod == "getblockbynumber" && n > 0) ConvertTo<boost::int64_t>(params[0]); if (strMethod == "getblockbynumber" && n > 1) ConvertTo<bool>(params[1]); if (strMethod == "getblockhash" && n > 0) ConvertTo<boost::int64_t>(params[0]); if (strMethod == "move" && n > 2) ConvertTo<double>(params[2]); if (strMethod == "move" && n > 3) ConvertTo<boost::int64_t>(params[3]); if (strMethod == "sendfrom" && n > 2) ConvertTo<double>(params[2]); if (strMethod == "sendfrom" && n > 3) ConvertTo<boost::int64_t>(params[3]); if (strMethod == "listtransactions" && n > 1) ConvertTo<boost::int64_t>(params[1]); if (strMethod == "listtransactions" && n > 2) ConvertTo<boost::int64_t>(params[2]); if (strMethod == "listaccounts" && n > 0) ConvertTo<boost::int64_t>(params[0]); if (strMethod == "walletpassphrase" && n > 1) ConvertTo<boost::int64_t>(params[1]); if (strMethod == "walletpassphrase" && n > 2) ConvertTo<bool>(params[2]); if (strMethod == "getblocktemplate" && n > 0) ConvertTo<Object>(params[0]); if (strMethod == "listsinceblock" && n > 1) ConvertTo<boost::int64_t>(params[1]); if (strMethod == "sendmany" && n > 1) ConvertTo<Object>(params[1]); if (strMethod == "sendmany" && n > 2) ConvertTo<boost::int64_t>(params[2]); if (strMethod == "reservebalance" && n > 0) ConvertTo<bool>(params[0]); if (strMethod == "reservebalance" && n > 1) ConvertTo<double>(params[1]); if (strMethod == "addmultisigaddress" && n > 0) ConvertTo<boost::int64_t>(params[0]); if (strMethod == "addmultisigaddress" && n > 1) ConvertTo<Array>(params[1]); if (strMethod == "listunspent" && n > 0) ConvertTo<boost::int64_t>(params[0]); if (strMethod == "listunspent" && n > 1) ConvertTo<boost::int64_t>(params[1]); if (strMethod == "listunspent" && n > 2) ConvertTo<Array>(params[2]); if (strMethod == "getrawtransaction" && n > 1) ConvertTo<boost::int64_t>(params[1]); if (strMethod == "createrawtransaction" && n > 0) ConvertTo<Array>(params[0]); if (strMethod == "createrawtransaction" && n > 1) ConvertTo<Object>(params[1]); if (strMethod == "signrawtransaction" && n > 1) ConvertTo<Array>(params[1], true); if (strMethod == "signrawtransaction" && n > 2) ConvertTo<Array>(params[2], true); return params; } int CommandLineRPC(int argc, char *argv[]) { string strPrint; int nRet = 0; try { // Skip switches while (argc > 1 && IsSwitchChar(argv[1][0])) { argc--; argv++; } // Method if (argc < 2) throw runtime_error("too few parameters"); string strMethod = argv[1]; // Parameters default to strings std::vector<std::string> strParams(&argv[2], &argv[argc]); Array params = RPCConvertValues(strMethod, strParams); // Execute Object reply = CallRPC(strMethod, params); // Parse reply const Value& result = find_value(reply, "result"); const Value& error = find_value(reply, "error"); if (error.type() != null_type) { // Error strPrint = "error: " + write_string(error, false); int code = find_value(error.get_obj(), "code").get_int(); nRet = abs(code); } else { // Result if (result.type() == null_type) strPrint = ""; else if (result.type() == str_type) strPrint = result.get_str(); else strPrint = write_string(result, true); } } catch (std::exception& e) { strPrint = string("error: ") + e.what(); nRet = 87; } catch (...) { PrintException(NULL, "CommandLineRPC()"); } if (strPrint != "") { fprintf((nRet == 0 ? stdout : stderr), "%s\n", strPrint.c_str()); } return nRet; } #ifdef TEST int main(int argc, char *argv[]) { #ifdef _MSC_VER // Turn off Microsoft heap dump noise _CrtSetReportMode(_CRT_WARN, _CRTDBG_MODE_FILE); _CrtSetReportFile(_CRT_WARN, CreateFile("NUL", GENERIC_WRITE, 0, NULL, OPEN_EXISTING, 0, 0)); #endif setbuf(stdin, NULL); setbuf(stdout, NULL); setbuf(stderr, NULL); try { if (argc >= 2 && string(argv[1]) == "-server") { printf("server ready\n"); ThreadRPCServer(NULL); } else { return CommandLineRPC(argc, argv); } } catch (std::exception& e) { PrintException(&e, "main()"); } catch (...) { PrintException(NULL, "main()"); } return 0; } #endif const CRPCTable tableRPC;
{ "content_hash": "b1d9caf7fea7c356a1bcaa426b83fec5", "timestamp": "", "source": "github", "line_count": 1292, "max_line_length": 159, "avg_line_length": 35.856037151702786, "alnum_prop": 0.5850278461339204, "repo_name": "information-coin/informationcoin", "id": "6d9400a4ec2807baf93824b241e4755dc34ad3d4", "size": "46326", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/bitcoinrpc.cpp", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "61562" }, { "name": "C", "bytes": "65871" }, { "name": "C++", "bytes": "1569238" }, { "name": "IDL", "bytes": "12145" }, { "name": "Objective-C", "bytes": "2451" }, { "name": "Python", "bytes": "11646" }, { "name": "Shell", "bytes": "1026" }, { "name": "TypeScript", "bytes": "105954" } ], "symlink_target": "" }
""" All layers just related to the detection neural network. """ import paddle from .layer_function_generator import templatedoc from ..layer_helper import LayerHelper from ..framework import Variable, _non_static_mode, static_only, in_dygraph_mode from .. import core from .loss import softmax_with_cross_entropy from . import tensor from . import nn from ..data_feeder import check_variable_and_dtype, check_type, check_dtype import math import numpy as np from functools import reduce from ..data_feeder import ( convert_dtype, check_variable_and_dtype, check_type, check_dtype, ) from paddle.utils import deprecated from paddle import _C_ops, _legacy_C_ops from ..framework import in_dygraph_mode __all__ = [ 'prior_box', 'density_prior_box', 'multi_box_head', 'bipartite_match', 'target_assign', 'detection_output', 'ssd_loss', 'rpn_target_assign', 'retinanet_target_assign', 'sigmoid_focal_loss', 'anchor_generator', 'roi_perspective_transform', 'generate_proposal_labels', 'generate_proposals', 'generate_mask_labels', 'iou_similarity', 'box_coder', 'polygon_box_transform', 'yolov3_loss', 'yolo_box', 'box_clip', 'multiclass_nms', 'locality_aware_nms', 'matrix_nms', 'retinanet_detection_output', 'distribute_fpn_proposals', 'box_decoder_and_assign', 'collect_fpn_proposals', ] def retinanet_target_assign( bbox_pred, cls_logits, anchor_box, anchor_var, gt_boxes, gt_labels, is_crowd, im_info, num_classes=1, positive_overlap=0.5, negative_overlap=0.4, ): r""" **Target Assign Layer for the detector RetinaNet.** This OP finds out positive and negative samples from all anchors for training the detector `RetinaNet <https://arxiv.org/abs/1708.02002>`_ , and assigns target labels for classification along with target locations for regression to each sample, then takes out the part belonging to positive and negative samples from category prediction( :attr:`cls_logits`) and location prediction( :attr:`bbox_pred`) which belong to all anchors. The searching principles for positive and negative samples are as followed: 1. Anchors are assigned to ground-truth boxes when it has the highest IoU overlap with a ground-truth box. 2. Anchors are assigned to ground-truth boxes when it has an IoU overlap higher than :attr:`positive_overlap` with any ground-truth box. 3. Anchors are assigned to background when its IoU overlap is lower than :attr:`negative_overlap` for all ground-truth boxes. 4. Anchors which do not meet the above conditions do not participate in the training process. Retinanet predicts a :math:`C`-vector for classification and a 4-vector for box regression for each anchor, hence the target label for each positive(or negative) sample is a :math:`C`-vector and the target locations for each positive sample is a 4-vector. As for a positive sample, if the category of its assigned ground-truth box is class :math:`i`, the corresponding entry in its length :math:`C` label vector is set to 1 and all other entries is set to 0, its box regression targets are computed as the offset between itself and its assigned ground-truth box. As for a negative sample, all entries in its length :math:`C` label vector are set to 0 and box regression targets are omitted because negative samples do not participate in the training process of location regression. After the assignment, the part belonging to positive and negative samples is taken out from category prediction( :attr:`cls_logits` ), and the part belonging to positive samples is taken out from location prediction( :attr:`bbox_pred` ). Args: bbox_pred(Variable): A 3-D Tensor with shape :math:`[N, M, 4]` represents the predicted locations of all anchors. :math:`N` is the batch size( the number of images in a mini-batch), :math:`M` is the number of all anchors of one image, and each anchor has 4 coordinate values. The data type of :attr:`bbox_pred` is float32 or float64. cls_logits(Variable): A 3-D Tensor with shape :math:`[N, M, C]` represents the predicted categories of all anchors. :math:`N` is the batch size, :math:`M` is the number of all anchors of one image, and :math:`C` is the number of categories (**Notice: excluding background**). The data type of :attr:`cls_logits` is float32 or float64. anchor_box(Variable): A 2-D Tensor with shape :math:`[M, 4]` represents the locations of all anchors. :math:`M` is the number of all anchors of one image, each anchor is represented as :math:`[xmin, ymin, xmax, ymax]`, :math:`[xmin, ymin]` is the left top coordinate of the anchor box, :math:`[xmax, ymax]` is the right bottom coordinate of the anchor box. The data type of :attr:`anchor_box` is float32 or float64. Please refer to the OP :ref:`api_fluid_layers_anchor_generator` for the generation of :attr:`anchor_box`. anchor_var(Variable): A 2-D Tensor with shape :math:`[M,4]` represents the expanded factors of anchor locations used in loss function. :math:`M` is number of all anchors of one image, each anchor possesses a 4-vector expanded factor. The data type of :attr:`anchor_var` is float32 or float64. Please refer to the OP :ref:`api_fluid_layers_anchor_generator` for the generation of :attr:`anchor_var`. gt_boxes(Variable): A 1-level 2-D LoDTensor with shape :math:`[G, 4]` represents locations of all ground-truth boxes. :math:`G` is the total number of all ground-truth boxes in a mini-batch, and each ground-truth box has 4 coordinate values. The data type of :attr:`gt_boxes` is float32 or float64. gt_labels(variable): A 1-level 2-D LoDTensor with shape :math:`[G, 1]` represents categories of all ground-truth boxes, and the values are in the range of :math:`[1, C]`. :math:`G` is the total number of all ground-truth boxes in a mini-batch, and each ground-truth box has one category. The data type of :attr:`gt_labels` is int32. is_crowd(Variable): A 1-level 1-D LoDTensor with shape :math:`[G]` which indicates whether a ground-truth box is a crowd. If the value is 1, the corresponding box is a crowd, it is ignored during training. :math:`G` is the total number of all ground-truth boxes in a mini-batch. The data type of :attr:`is_crowd` is int32. im_info(Variable): A 2-D Tensor with shape [N, 3] represents the size information of input images. :math:`N` is the batch size, the size information of each image is a 3-vector which are the height and width of the network input along with the factor scaling the origin image to the network input. The data type of :attr:`im_info` is float32. num_classes(int32): The number of categories for classification, the default value is 1. positive_overlap(float32): Minimum overlap required between an anchor and ground-truth box for the anchor to be a positive sample, the default value is 0.5. negative_overlap(float32): Maximum overlap allowed between an anchor and ground-truth box for the anchor to be a negative sample, the default value is 0.4. :attr:`negative_overlap` should be less than or equal to :attr:`positive_overlap`, if not, the actual value of :attr:`positive_overlap` is :attr:`negative_overlap`. Returns: A tuple with 6 Variables: **predict_scores** (Variable): A 2-D Tensor with shape :math:`[F+B, C]` represents category prediction belonging to positive and negative samples. :math:`F` is the number of positive samples in a mini-batch, :math:`B` is the number of negative samples, and :math:`C` is the number of categories (**Notice: excluding background**). The data type of :attr:`predict_scores` is float32 or float64. **predict_location** (Variable): A 2-D Tensor with shape :math:`[F, 4]` represents location prediction belonging to positive samples. :math:`F` is the number of positive samples. :math:`F` is the number of positive samples, and each sample has 4 coordinate values. The data type of :attr:`predict_location` is float32 or float64. **target_label** (Variable): A 2-D Tensor with shape :math:`[F+B, 1]` represents target labels for classification belonging to positive and negative samples. :math:`F` is the number of positive samples, :math:`B` is the number of negative, and each sample has one target category. The data type of :attr:`target_label` is int32. **target_bbox** (Variable): A 2-D Tensor with shape :math:`[F, 4]` represents target locations for box regression belonging to positive samples. :math:`F` is the number of positive samples, and each sample has 4 coordinate values. The data type of :attr:`target_bbox` is float32 or float64. **bbox_inside_weight** (Variable): A 2-D Tensor with shape :math:`[F, 4]` represents whether a positive sample is fake positive, if a positive sample is false positive, the corresponding entries in :attr:`bbox_inside_weight` are set 0, otherwise 1. :math:`F` is the number of total positive samples in a mini-batch, and each sample has 4 coordinate values. The data type of :attr:`bbox_inside_weight` is float32 or float64. **fg_num** (Variable): A 2-D Tensor with shape :math:`[N, 1]` represents the number of positive samples. :math:`N` is the batch size. **Notice: The number of positive samples is used as the denominator of later loss function, to avoid the condition that the denominator is zero, this OP has added 1 to the actual number of positive samples of each image.** The data type of :attr:`fg_num` is int32. Examples: .. code-block:: python import paddle.fluid as fluid bbox_pred = fluid.data(name='bbox_pred', shape=[1, 100, 4], dtype='float32') cls_logits = fluid.data(name='cls_logits', shape=[1, 100, 10], dtype='float32') anchor_box = fluid.data(name='anchor_box', shape=[100, 4], dtype='float32') anchor_var = fluid.data(name='anchor_var', shape=[100, 4], dtype='float32') gt_boxes = fluid.data(name='gt_boxes', shape=[10, 4], dtype='float32') gt_labels = fluid.data(name='gt_labels', shape=[10, 1], dtype='int32') is_crowd = fluid.data(name='is_crowd', shape=[1], dtype='int32') im_info = fluid.data(name='im_info', shape=[1, 3], dtype='float32') score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \\ fluid.layers.retinanet_target_assign(bbox_pred, cls_logits, anchor_box, anchor_var, gt_boxes, gt_labels, is_crowd, im_info, 10) """ check_variable_and_dtype( bbox_pred, 'bbox_pred', ['float32', 'float64'], 'retinanet_target_assign', ) check_variable_and_dtype( cls_logits, 'cls_logits', ['float32', 'float64'], 'retinanet_target_assign', ) check_variable_and_dtype( anchor_box, 'anchor_box', ['float32', 'float64'], 'retinanet_target_assign', ) check_variable_and_dtype( anchor_var, 'anchor_var', ['float32', 'float64'], 'retinanet_target_assign', ) check_variable_and_dtype( gt_boxes, 'gt_boxes', ['float32', 'float64'], 'retinanet_target_assign' ) check_variable_and_dtype( gt_labels, 'gt_labels', ['int32'], 'retinanet_target_assign' ) check_variable_and_dtype( is_crowd, 'is_crowd', ['int32'], 'retinanet_target_assign' ) check_variable_and_dtype( im_info, 'im_info', ['float32', 'float64'], 'retinanet_target_assign' ) helper = LayerHelper('retinanet_target_assign', **locals()) # Assign target label to anchors loc_index = helper.create_variable_for_type_inference(dtype='int32') score_index = helper.create_variable_for_type_inference(dtype='int32') target_label = helper.create_variable_for_type_inference(dtype='int32') target_bbox = helper.create_variable_for_type_inference( dtype=anchor_box.dtype ) bbox_inside_weight = helper.create_variable_for_type_inference( dtype=anchor_box.dtype ) fg_num = helper.create_variable_for_type_inference(dtype='int32') helper.append_op( type="retinanet_target_assign", inputs={ 'Anchor': anchor_box, 'GtBoxes': gt_boxes, 'GtLabels': gt_labels, 'IsCrowd': is_crowd, 'ImInfo': im_info, }, outputs={ 'LocationIndex': loc_index, 'ScoreIndex': score_index, 'TargetLabel': target_label, 'TargetBBox': target_bbox, 'BBoxInsideWeight': bbox_inside_weight, 'ForegroundNumber': fg_num, }, attrs={ 'positive_overlap': positive_overlap, 'negative_overlap': negative_overlap, }, ) loc_index.stop_gradient = True score_index.stop_gradient = True target_label.stop_gradient = True target_bbox.stop_gradient = True bbox_inside_weight.stop_gradient = True fg_num.stop_gradient = True cls_logits = nn.reshape(x=cls_logits, shape=(-1, num_classes)) bbox_pred = nn.reshape(x=bbox_pred, shape=(-1, 4)) predicted_cls_logits = paddle.gather(cls_logits, score_index) predicted_bbox_pred = paddle.gather(bbox_pred, loc_index) return ( predicted_cls_logits, predicted_bbox_pred, target_label, target_bbox, bbox_inside_weight, fg_num, ) def rpn_target_assign( bbox_pred, cls_logits, anchor_box, anchor_var, gt_boxes, is_crowd, im_info, rpn_batch_size_per_im=256, rpn_straddle_thresh=0.0, rpn_fg_fraction=0.5, rpn_positive_overlap=0.7, rpn_negative_overlap=0.3, use_random=True, ): """ **Target Assign Layer for region proposal network (RPN) in Faster-RCNN detection.** This layer can be, for given the Intersection-over-Union (IoU) overlap between anchors and ground truth boxes, to assign classification and regression targets to each each anchor, these target labels are used for train RPN. The classification targets is a binary class label (of being an object or not). Following the paper of Faster-RCNN, the positive labels are two kinds of anchors: (i) the anchor/anchors with the highest IoU overlap with a ground-truth box, or (ii) an anchor that has an IoU overlap higher than rpn_positive_overlap(0.7) with any ground-truth box. Note that a single ground-truth box may assign positive labels to multiple anchors. A non-positive anchor is when its IoU ratio is lower than rpn_negative_overlap (0.3) for all ground-truth boxes. Anchors that are neither positive nor negative do not contribute to the training objective. The regression targets are the encoded ground-truth boxes associated with the positive anchors. Args: bbox_pred(Variable): A 3-D Tensor with shape [N, M, 4] represents the predicted locations of M bounding bboxes. N is the batch size, and each bounding box has four coordinate values and the layout is [xmin, ymin, xmax, ymax]. The data type can be float32 or float64. cls_logits(Variable): A 3-D Tensor with shape [N, M, 1] represents the predicted confidence predictions. N is the batch size, 1 is the frontground and background sigmoid, M is number of bounding boxes. The data type can be float32 or float64. anchor_box(Variable): A 2-D Tensor with shape [M, 4] holds M boxes, each box is represented as [xmin, ymin, xmax, ymax], [xmin, ymin] is the left top coordinate of the anchor box, if the input is image feature map, they are close to the origin of the coordinate system. [xmax, ymax] is the right bottom coordinate of the anchor box. The data type can be float32 or float64. anchor_var(Variable): A 2-D Tensor with shape [M,4] holds expanded variances of anchors. The data type can be float32 or float64. gt_boxes (Variable): The ground-truth bounding boxes (bboxes) are a 2D LoDTensor with shape [Ng, 4], Ng is the total number of ground-truth bboxes of mini-batch input. The data type can be float32 or float64. is_crowd (Variable): A 1-D LoDTensor which indicates groud-truth is crowd. The data type must be int32. im_info (Variable): A 2-D LoDTensor with shape [N, 3]. N is the batch size, 3 is the height, width and scale. rpn_batch_size_per_im(int): Total number of RPN examples per image. The data type must be int32. rpn_straddle_thresh(float): Remove RPN anchors that go outside the image by straddle_thresh pixels. The data type must be float32. rpn_fg_fraction(float): Target fraction of RoI minibatch that is labeled foreground (i.e. class > 0), 0-th class is background. The data type must be float32. rpn_positive_overlap(float): Minimum overlap required between an anchor and ground-truth box for the (anchor, gt box) pair to be a positive example. The data type must be float32. rpn_negative_overlap(float): Maximum overlap allowed between an anchor and ground-truth box for the (anchor, gt box) pair to be a negative examples. The data type must be float32. Returns: tuple: A tuple(predicted_scores, predicted_location, target_label, target_bbox, bbox_inside_weight) is returned. The predicted_scores and predicted_location is the predicted result of the RPN. The target_label and target_bbox is the ground truth, respectively. The predicted_location is a 2D Tensor with shape [F, 4], and the shape of target_bbox is same as the shape of the predicted_location, F is the number of the foreground anchors. The predicted_scores is a 2D Tensor with shape [F + B, 1], and the shape of target_label is same as the shape of the predicted_scores, B is the number of the background anchors, the F and B is depends on the input of this operator. Bbox_inside_weight represents whether the predicted loc is fake_fg or not and the shape is [F, 4]. Examples: .. code-block:: python import paddle.fluid as fluid bbox_pred = fluid.data(name='bbox_pred', shape=[None, 4], dtype='float32') cls_logits = fluid.data(name='cls_logits', shape=[None, 1], dtype='float32') anchor_box = fluid.data(name='anchor_box', shape=[None, 4], dtype='float32') anchor_var = fluid.data(name='anchor_var', shape=[None, 4], dtype='float32') gt_boxes = fluid.data(name='gt_boxes', shape=[None, 4], dtype='float32') is_crowd = fluid.data(name='is_crowd', shape=[None], dtype='float32') im_info = fluid.data(name='im_infoss', shape=[None, 3], dtype='float32') loc, score, loc_target, score_target, inside_weight = fluid.layers.rpn_target_assign( bbox_pred, cls_logits, anchor_box, anchor_var, gt_boxes, is_crowd, im_info) """ helper = LayerHelper('rpn_target_assign', **locals()) check_variable_and_dtype( bbox_pred, 'bbox_pred', ['float32', 'float64'], 'rpn_target_assign' ) check_variable_and_dtype( cls_logits, 'cls_logits', ['float32', 'float64'], 'rpn_target_assign' ) check_variable_and_dtype( anchor_box, 'anchor_box', ['float32', 'float64'], 'rpn_target_assign' ) check_variable_and_dtype( anchor_var, 'anchor_var', ['float32', 'float64'], 'rpn_target_assign' ) check_variable_and_dtype( gt_boxes, 'gt_boxes', ['float32', 'float64'], 'rpn_target_assign' ) check_variable_and_dtype( is_crowd, 'is_crowd', ['int32'], 'rpn_target_assign' ) check_variable_and_dtype( im_info, 'im_info', ['float32', 'float64'], 'rpn_target_assign' ) # Assign target label to anchors loc_index = helper.create_variable_for_type_inference(dtype='int32') score_index = helper.create_variable_for_type_inference(dtype='int32') target_label = helper.create_variable_for_type_inference(dtype='int32') target_bbox = helper.create_variable_for_type_inference( dtype=anchor_box.dtype ) bbox_inside_weight = helper.create_variable_for_type_inference( dtype=anchor_box.dtype ) helper.append_op( type="rpn_target_assign", inputs={ 'Anchor': anchor_box, 'GtBoxes': gt_boxes, 'IsCrowd': is_crowd, 'ImInfo': im_info, }, outputs={ 'LocationIndex': loc_index, 'ScoreIndex': score_index, 'TargetLabel': target_label, 'TargetBBox': target_bbox, 'BBoxInsideWeight': bbox_inside_weight, }, attrs={ 'rpn_batch_size_per_im': rpn_batch_size_per_im, 'rpn_straddle_thresh': rpn_straddle_thresh, 'rpn_positive_overlap': rpn_positive_overlap, 'rpn_negative_overlap': rpn_negative_overlap, 'rpn_fg_fraction': rpn_fg_fraction, 'use_random': use_random, }, ) loc_index.stop_gradient = True score_index.stop_gradient = True target_label.stop_gradient = True target_bbox.stop_gradient = True bbox_inside_weight.stop_gradient = True cls_logits = nn.reshape(x=cls_logits, shape=(-1, 1)) bbox_pred = nn.reshape(x=bbox_pred, shape=(-1, 4)) predicted_cls_logits = paddle.gather(cls_logits, score_index) predicted_bbox_pred = paddle.gather(bbox_pred, loc_index) return ( predicted_cls_logits, predicted_bbox_pred, target_label, target_bbox, bbox_inside_weight, ) def sigmoid_focal_loss(x, label, fg_num, gamma=2.0, alpha=0.25): r""" :alias_main: paddle.nn.functional.sigmoid_focal_loss :alias: paddle.nn.functional.sigmoid_focal_loss,paddle.nn.functional.loss.sigmoid_focal_loss :old_api: paddle.fluid.layers.sigmoid_focal_loss **Sigmoid Focal Loss Operator.** `Focal Loss <https://arxiv.org/abs/1708.02002>`_ is used to address the foreground-background class imbalance existed on the training phase of many computer vision tasks. This OP computes the sigmoid value for each element in the input tensor :attr:`x`, after which focal loss is measured between the sigmoid value and target label. The focal loss is given as followed: .. math:: \\mathop{loss_{i,\\,j}}\\limits_{i\\in\\mathbb{[0,\\,N-1]},\\,j\\in\\mathbb{[0,\\,C-1]}}=\\left\\{ \\begin{array}{rcl} - \\frac{1}{fg\_num} * \\alpha * {(1 - \\sigma(x_{i,\\,j}))}^{\\gamma} * \\log(\\sigma(x_{i,\\,j})) & & {(j +1) = label_{i,\\,0}} \\\\ - \\frac{1}{fg\_num} * (1 - \\alpha) * {\sigma(x_{i,\\,j})}^{ \\gamma} * \\log(1 - \\sigma(x_{i,\\,j})) & & {(j +1)!= label_{i,\\,0}} \\end{array} \\right. We know that .. math:: \\sigma(x_j) = \\frac{1}{1 + \\exp(-x_j)} Args: x(Variable): A 2-D tensor with shape :math:`[N, C]` represents the predicted categories of all samples. :math:`N` is the number of all samples responsible for optimization in a mini-batch, for example, samples are anchor boxes for object detection and :math:`N` is the total number of positive and negative samples in a mini-batch; Samples are images for image classification and :math:`N` is the number of images in a mini-batch. :math:`C` is the number of classes (**Notice: excluding background**). The data type of :attr:`x` is float32 or float64. label(Variable): A 2-D tensor with shape :math:`[N, 1]` represents the target labels for classification. :math:`N` is the number of all samples responsible for optimization in a mini-batch, each sample has one target category. The values for positive samples are in the range of :math:`[1, C]`, and the values for negative samples are 0. The data type of :attr:`label` is int32. fg_num(Variable): A 1-D tensor with shape [1] represents the number of positive samples in a mini-batch, which should be obtained before this OP. The data type of :attr:`fg_num` is int32. gamma(int|float): Hyper-parameter to balance the easy and hard examples. Default value is set to 2.0. alpha(int|float): Hyper-parameter to balance the positive and negative example. Default value is set to 0.25. Returns: Variable(the data type is float32 or float64): A 2-D tensor with shape :math:`[N, C]`, which is the focal loss of each element in the input tensor :attr:`x`. Examples: .. code-block:: python import numpy as np import paddle.fluid as fluid num_classes = 10 # exclude background image_width = 16 image_height = 16 batch_size = 32 max_iter = 20 def gen_train_data(): x_data = np.random.uniform(0, 255, (batch_size, 3, image_height, image_width)).astype('float64') label_data = np.random.randint(0, num_classes, (batch_size, 1)).astype('int32') return {"x": x_data, "label": label_data} def get_focal_loss(pred, label, fg_num, num_classes): pred = fluid.layers.reshape(pred, [-1, num_classes]) label = fluid.layers.reshape(label, [-1, 1]) label.stop_gradient = True loss = fluid.layers.sigmoid_focal_loss( pred, label, fg_num, gamma=2.0, alpha=0.25) loss = fluid.layers.reduce_sum(loss) return loss def build_model(mode='train'): x = fluid.data(name="x", shape=[-1, 3, -1, -1], dtype='float64') output = fluid.layers.pool2d(input=x, pool_type='avg', global_pooling=True) output = fluid.layers.fc( input=output, size=num_classes, # Notice: size is set to be the number of target classes (excluding backgorund) # because sigmoid activation will be done in the sigmoid_focal_loss op. act=None) if mode == 'train': label = fluid.data(name="label", shape=[-1, 1], dtype='int32') # Obtain the fg_num needed by the sigmoid_focal_loss op: # 0 in label represents background, >=1 in label represents foreground, # find the elements in label which are greater or equal than 1, then # computed the numbers of these elements. data = fluid.layers.fill_constant(shape=[1], value=1, dtype='int32') fg_label = fluid.layers.greater_equal(label, data) fg_label = fluid.layers.cast(fg_label, dtype='int32') fg_num = fluid.layers.reduce_sum(fg_label) fg_num.stop_gradient = True avg_loss = get_focal_loss(output, label, fg_num, num_classes) return avg_loss else: # During evaluating or testing phase, # output of the final fc layer should be connected to a sigmoid layer. pred = fluid.layers.sigmoid(output) return pred loss = build_model('train') moment_optimizer = fluid.optimizer.MomentumOptimizer( learning_rate=0.001, momentum=0.9) moment_optimizer.minimize(loss) place = fluid.CPUPlace() exe = fluid.Executor(place) exe.run(fluid.default_startup_program()) for i in range(max_iter): outs = exe.run(feed=gen_train_data(), fetch_list=[loss.name]) print(outs) """ check_variable_and_dtype( x, 'x', ['float32', 'float64'], 'sigmoid_focal_loss' ) check_variable_and_dtype(label, 'label', ['int32'], 'sigmoid_focal_loss') check_variable_and_dtype(fg_num, 'fg_num', ['int32'], 'sigmoid_focal_loss') helper = LayerHelper("sigmoid_focal_loss", **locals()) out = helper.create_variable_for_type_inference(dtype=x.dtype) helper.append_op( type="sigmoid_focal_loss", inputs={"X": x, "Label": label, "FgNum": fg_num}, attrs={"gamma": gamma, 'alpha': alpha}, outputs={"Out": out}, ) return out def detection_output( loc, scores, prior_box, prior_box_var, background_label=0, nms_threshold=0.3, nms_top_k=400, keep_top_k=200, score_threshold=0.01, nms_eta=1.0, return_index=False, ): """ Given the regression locations, classification confidences and prior boxes, calculate the detection outputs by performing following steps: 1. Decode input bounding box predictions according to the prior boxes and regression locations. 2. Get the final detection results by applying multi-class non maximum suppression (NMS). Please note, this operation doesn't clip the final output bounding boxes to the image window. Args: loc(Variable): A 3-D Tensor with shape [N, M, 4] represents the predicted locations of M bounding bboxes. Data type should be float32 or float64. N is the batch size, and each bounding box has four coordinate values and the layout is [xmin, ymin, xmax, ymax]. scores(Variable): A 3-D Tensor with shape [N, M, C] represents the predicted confidence predictions. Data type should be float32 or float64. N is the batch size, C is the class number, M is number of bounding boxes. prior_box(Variable): A 2-D Tensor with shape [M, 4] holds M boxes, each box is represented as [xmin, ymin, xmax, ymax]. Data type should be float32 or float64. prior_box_var(Variable): A 2-D Tensor with shape [M, 4] holds M group of variance. Data type should be float32 or float64. background_label(int): The index of background label, the background label will be ignored. If set to -1, then all categories will be considered. Default: 0. nms_threshold(float): The threshold to be used in NMS. Default: 0.3. nms_top_k(int): Maximum number of detections to be kept according to the confidences after filtering detections based on score_threshold and before NMS. Default: 400. keep_top_k(int): Number of total bboxes to be kept per image after NMS step. -1 means keeping all bboxes after NMS step. Default: 200. score_threshold(float): Threshold to filter out bounding boxes with low confidence score. If not provided, consider all boxes. Default: 0.01. nms_eta(float): The parameter for adaptive NMS. It works only when the value is less than 1.0. Default: 1.0. return_index(bool): Whether return selected index. Default: False Returns: A tuple with two Variables: (Out, Index) if return_index is True, otherwise, a tuple with one Variable(Out) is returned. Out (Variable): The detection outputs is a LoDTensor with shape [No, 6]. Data type is the same as input (loc). Each row has six values: [label, confidence, xmin, ymin, xmax, ymax]. `No` is the total number of detections in this mini-batch. For each instance, the offsets in first dimension are called LoD, the offset number is N + 1, N is the batch size. The i-th image has `LoD[i + 1] - LoD[i]` detected results, if it is 0, the i-th image has no detected results. Index (Variable): Only return when return_index is True. A 2-D LoDTensor with shape [No, 1] represents the selected index which type is Integer. The index is the absolute value cross batches. No is the same number as Out. If the index is used to gather other attribute such as age, one needs to reshape the input(N, M, 1) to (N * M, 1) as first, where N is the batch size and M is the number of boxes. Examples: .. code-block:: python import paddle.fluid as fluid import paddle paddle.enable_static() pb = fluid.data(name='prior_box', shape=[10, 4], dtype='float32') pbv = fluid.data(name='prior_box_var', shape=[10, 4], dtype='float32') loc = fluid.data(name='target_box', shape=[2, 21, 4], dtype='float32') scores = fluid.data(name='scores', shape=[2, 21, 10], dtype='float32') nmsed_outs, index = fluid.layers.detection_output(scores=scores, loc=loc, prior_box=pb, prior_box_var=pbv, return_index=True) """ helper = LayerHelper("detection_output", **locals()) decoded_box = box_coder( prior_box=prior_box, prior_box_var=prior_box_var, target_box=loc, code_type='decode_center_size', ) scores = nn.softmax(input=scores) scores = nn.transpose(scores, perm=[0, 2, 1]) scores.stop_gradient = True nmsed_outs = helper.create_variable_for_type_inference( dtype=decoded_box.dtype ) if return_index: index = helper.create_variable_for_type_inference(dtype='int') helper.append_op( type="multiclass_nms2", inputs={'Scores': scores, 'BBoxes': decoded_box}, outputs={'Out': nmsed_outs, 'Index': index}, attrs={ 'background_label': 0, 'nms_threshold': nms_threshold, 'nms_top_k': nms_top_k, 'keep_top_k': keep_top_k, 'score_threshold': score_threshold, 'nms_eta': 1.0, }, ) index.stop_gradient = True else: helper.append_op( type="multiclass_nms", inputs={'Scores': scores, 'BBoxes': decoded_box}, outputs={'Out': nmsed_outs}, attrs={ 'background_label': 0, 'nms_threshold': nms_threshold, 'nms_top_k': nms_top_k, 'keep_top_k': keep_top_k, 'score_threshold': score_threshold, 'nms_eta': 1.0, }, ) nmsed_outs.stop_gradient = True if return_index: return nmsed_outs, index return nmsed_outs @templatedoc() def iou_similarity(x, y, box_normalized=True, name=None): """ :alias_main: paddle.nn.functional.iou_similarity :alias: paddle.nn.functional.iou_similarity,paddle.nn.functional.loss.iou_similarity :old_api: paddle.fluid.layers.iou_similarity ${comment} Args: x (Variable): ${x_comment}.The data type is float32 or float64. y (Variable): ${y_comment}.The data type is float32 or float64. box_normalized(bool): Whether treat the priorbox as a normalized box. Set true by default. Returns: Variable: ${out_comment}.The data type is same with x. Examples: .. code-block:: python import numpy as np import paddle.fluid as fluid use_gpu = False place = fluid.CUDAPlace(0) if use_gpu else fluid.CPUPlace() exe = fluid.Executor(place) x = fluid.data(name='x', shape=[None, 4], dtype='float32') y = fluid.data(name='y', shape=[None, 4], dtype='float32') iou = fluid.layers.iou_similarity(x=x, y=y) exe.run(fluid.default_startup_program()) test_program = fluid.default_main_program().clone(for_test=True) [out_iou] = exe.run(test_program, fetch_list=iou, feed={'x': np.array([[0.5, 0.5, 2.0, 2.0], [0., 0., 1.0, 1.0]]).astype('float32'), 'y': np.array([[1.0, 1.0, 2.5, 2.5]]).astype('float32')}) # out_iou is [[0.2857143], # [0. ]] with shape: [2, 1] """ helper = LayerHelper("iou_similarity", **locals()) out = helper.create_variable_for_type_inference(dtype=x.dtype) helper.append_op( type="iou_similarity", inputs={"X": x, "Y": y}, attrs={"box_normalized": box_normalized}, outputs={"Out": out}, ) return out @templatedoc() def box_coder( prior_box, prior_box_var, target_box, code_type="encode_center_size", box_normalized=True, name=None, axis=0, ): r""" **Box Coder Layer** Encode/Decode the target bounding box with the priorbox information. The Encoding schema described below: .. math:: ox = (tx - px) / pw / pxv oy = (ty - py) / ph / pyv ow = \log(\abs(tw / pw)) / pwv oh = \log(\abs(th / ph)) / phv The Decoding schema described below: .. math:: ox = (pw * pxv * tx * + px) - tw / 2 oy = (ph * pyv * ty * + py) - th / 2 ow = \exp(pwv * tw) * pw + tw / 2 oh = \exp(phv * th) * ph + th / 2 where `tx`, `ty`, `tw`, `th` denote the target box's center coordinates, width and height respectively. Similarly, `px`, `py`, `pw`, `ph` denote the priorbox's (anchor) center coordinates, width and height. `pxv`, `pyv`, `pwv`, `phv` denote the variance of the priorbox and `ox`, `oy`, `ow`, `oh` denote the encoded/decoded coordinates, width and height. During Box Decoding, two modes for broadcast are supported. Say target box has shape [N, M, 4], and the shape of prior box can be [N, 4] or [M, 4]. Then prior box will broadcast to target box along the assigned axis. Args: prior_box(Variable): Box list prior_box is a 2-D Tensor with shape [M, 4] holds M boxes and data type is float32 or float64. Each box is represented as [xmin, ymin, xmax, ymax], [xmin, ymin] is the left top coordinate of the anchor box, if the input is image feature map, they are close to the origin of the coordinate system. [xmax, ymax] is the right bottom coordinate of the anchor box. prior_box_var(List|Variable|None): prior_box_var supports three types of input. One is variable with shape [M, 4] which holds M group and data type is float32 or float64. The second is list consist of 4 elements shared by all boxes and data type is float32 or float64. Other is None and not involved in calculation. target_box(Variable): This input can be a 2-D LoDTensor with shape [N, 4] when code_type is 'encode_center_size'. This input also can be a 3-D Tensor with shape [N, M, 4] when code_type is 'decode_center_size'. Each box is represented as [xmin, ymin, xmax, ymax]. The data type is float32 or float64. This tensor can contain LoD information to represent a batch of inputs. code_type(str): The code type used with the target box. It can be `encode_center_size` or `decode_center_size`. `encode_center_size` by default. box_normalized(bool): Whether treat the priorbox as a normalized box. Set true by default. name(str, optional): For detailed information, please refer to :ref:`api_guide_Name`. Usually name is no need to set and None by default. axis(int): Which axis in PriorBox to broadcast for box decode, for example, if axis is 0 and TargetBox has shape [N, M, 4] and PriorBox has shape [M, 4], then PriorBox will broadcast to [N, M, 4] for decoding. It is only valid when code type is `decode_center_size`. Set 0 by default. Returns: Variable: output_box(Variable): When code_type is 'encode_center_size', the output tensor of box_coder_op with shape [N, M, 4] representing the result of N target boxes encoded with M Prior boxes and variances. When code_type is 'decode_center_size', N represents the batch size and M represents the number of decoded boxes. Examples: .. code-block:: python import paddle.fluid as fluid import paddle paddle.enable_static() # For encode prior_box_encode = fluid.data(name='prior_box_encode', shape=[512, 4], dtype='float32') target_box_encode = fluid.data(name='target_box_encode', shape=[81, 4], dtype='float32') output_encode = fluid.layers.box_coder(prior_box=prior_box_encode, prior_box_var=[0.1,0.1,0.2,0.2], target_box=target_box_encode, code_type="encode_center_size") # For decode prior_box_decode = fluid.data(name='prior_box_decode', shape=[512, 4], dtype='float32') target_box_decode = fluid.data(name='target_box_decode', shape=[512, 81, 4], dtype='float32') output_decode = fluid.layers.box_coder(prior_box=prior_box_decode, prior_box_var=[0.1,0.1,0.2,0.2], target_box=target_box_decode, code_type="decode_center_size", box_normalized=False, axis=1) """ return paddle.vision.ops.box_coder( prior_box=prior_box, prior_box_var=prior_box_var, target_box=target_box, code_type=code_type, box_normalized=box_normalized, axis=axis, name=name, ) @templatedoc() def polygon_box_transform(input, name=None): """ ${comment} Args: input(Variable): The input with shape [batch_size, geometry_channels, height, width]. A Tensor with type float32, float64. name(str, Optional): For details, please refer to :ref:`api_guide_Name`. Generally, no setting is required. Default: None. Returns: Variable: The output with the same shape as input. A Tensor with type float32, float64. Examples: .. code-block:: python import paddle.fluid as fluid input = fluid.data(name='input', shape=[4, 10, 5, 5], dtype='float32') out = fluid.layers.polygon_box_transform(input) """ check_variable_and_dtype( input, "input", ['float32', 'float64'], 'polygon_box_transform' ) helper = LayerHelper("polygon_box_transform", **locals()) output = helper.create_variable_for_type_inference(dtype=input.dtype) helper.append_op( type="polygon_box_transform", inputs={"Input": input}, attrs={}, outputs={"Output": output}, ) return output @deprecated(since="2.0.0", update_to="paddle.vision.ops.yolo_loss") @templatedoc(op_type="yolov3_loss") def yolov3_loss( x, gt_box, gt_label, anchors, anchor_mask, class_num, ignore_thresh, downsample_ratio, gt_score=None, use_label_smooth=True, name=None, scale_x_y=1.0, ): """ ${comment} Args: x (Variable): ${x_comment}The data type is float32 or float64. gt_box (Variable): groud truth boxes, should be in shape of [N, B, 4], in the third dimension, x, y, w, h should be stored. x,y is the center coordinate of boxes, w, h are the width and height, x, y, w, h should be divided by input image height to scale to [0, 1]. N is the batch number and B is the max box number in an image.The data type is float32 or float64. gt_label (Variable): class id of ground truth boxes, should be in shape of [N, B].The data type is int32. anchors (list|tuple): ${anchors_comment} anchor_mask (list|tuple): ${anchor_mask_comment} class_num (int): ${class_num_comment} ignore_thresh (float): ${ignore_thresh_comment} downsample_ratio (int): ${downsample_ratio_comment} name (string): The default value is None. Normally there is no need for user to set this property. For more information, please refer to :ref:`api_guide_Name` gt_score (Variable): mixup score of ground truth boxes, should be in shape of [N, B]. Default None. use_label_smooth (bool): ${use_label_smooth_comment} scale_x_y (float): ${scale_x_y_comment} Returns: Variable: A 1-D tensor with shape [N], the value of yolov3 loss Raises: TypeError: Input x of yolov3_loss must be Variable TypeError: Input gtbox of yolov3_loss must be Variable TypeError: Input gtlabel of yolov3_loss must be Variable TypeError: Input gtscore of yolov3_loss must be None or Variable TypeError: Attr anchors of yolov3_loss must be list or tuple TypeError: Attr class_num of yolov3_loss must be an integer TypeError: Attr ignore_thresh of yolov3_loss must be a float number TypeError: Attr use_label_smooth of yolov3_loss must be a bool value Examples: .. code-block:: python import paddle.fluid as fluid import paddle paddle.enable_static() x = fluid.data(name='x', shape=[None, 255, 13, 13], dtype='float32') gt_box = fluid.data(name='gt_box', shape=[None, 6, 4], dtype='float32') gt_label = fluid.data(name='gt_label', shape=[None, 6], dtype='int32') gt_score = fluid.data(name='gt_score', shape=[None, 6], dtype='float32') anchors = [10, 13, 16, 30, 33, 23, 30, 61, 62, 45, 59, 119, 116, 90, 156, 198, 373, 326] anchor_mask = [0, 1, 2] loss = fluid.layers.yolov3_loss(x=x, gt_box=gt_box, gt_label=gt_label, gt_score=gt_score, anchors=anchors, anchor_mask=anchor_mask, class_num=80, ignore_thresh=0.7, downsample_ratio=32) """ if not isinstance(x, Variable): raise TypeError("Input x of yolov3_loss must be Variable") if not isinstance(gt_box, Variable): raise TypeError("Input gtbox of yolov3_loss must be Variable") if not isinstance(gt_label, Variable): raise TypeError("Input gtlabel of yolov3_loss must be Variable") if gt_score is not None and not isinstance(gt_score, Variable): raise TypeError("Input gtscore of yolov3_loss must be Variable") if not isinstance(anchors, list) and not isinstance(anchors, tuple): raise TypeError("Attr anchors of yolov3_loss must be list or tuple") if not isinstance(anchor_mask, list) and not isinstance(anchor_mask, tuple): raise TypeError("Attr anchor_mask of yolov3_loss must be list or tuple") if not isinstance(class_num, int): raise TypeError("Attr class_num of yolov3_loss must be an integer") if not isinstance(ignore_thresh, float): raise TypeError( "Attr ignore_thresh of yolov3_loss must be a float number" ) if not isinstance(use_label_smooth, bool): raise TypeError( "Attr use_label_smooth of yolov3_loss must be a bool value" ) if _non_static_mode(): attrs = ( "anchors", anchors, "anchor_mask", anchor_mask, "class_num", class_num, "ignore_thresh", ignore_thresh, "downsample_ratio", downsample_ratio, "use_label_smooth", use_label_smooth, "scale_x_y", scale_x_y, ) loss, _, _ = _legacy_C_ops.yolov3_loss( x, gt_box, gt_label, gt_score, *attrs ) return loss helper = LayerHelper('yolov3_loss', **locals()) loss = helper.create_variable_for_type_inference(dtype=x.dtype) objectness_mask = helper.create_variable_for_type_inference(dtype='int32') gt_match_mask = helper.create_variable_for_type_inference(dtype='int32') inputs = { "X": x, "GTBox": gt_box, "GTLabel": gt_label, } if gt_score is not None: inputs["GTScore"] = gt_score attrs = { "anchors": anchors, "anchor_mask": anchor_mask, "class_num": class_num, "ignore_thresh": ignore_thresh, "downsample_ratio": downsample_ratio, "use_label_smooth": use_label_smooth, "scale_x_y": scale_x_y, } helper.append_op( type='yolov3_loss', inputs=inputs, outputs={ 'Loss': loss, 'ObjectnessMask': objectness_mask, 'GTMatchMask': gt_match_mask, }, attrs=attrs, ) return loss @deprecated(since="2.0.0", update_to="paddle.vision.ops.yolo_box") @templatedoc(op_type="yolo_box") def yolo_box( x, img_size, anchors, class_num, conf_thresh, downsample_ratio, clip_bbox=True, name=None, scale_x_y=1.0, iou_aware=False, iou_aware_factor=0.5, ): """ ${comment} Args: x (Variable): ${x_comment} The data type is float32 or float64. img_size (Variable): ${img_size_comment} The data type is int32. anchors (list|tuple): ${anchors_comment} class_num (int): ${class_num_comment} conf_thresh (float): ${conf_thresh_comment} downsample_ratio (int): ${downsample_ratio_comment} clip_bbox (bool): ${clip_bbox_comment} scale_x_y (float): ${scale_x_y_comment} name (string): The default value is None. Normally there is no need for user to set this property. For more information, please refer to :ref:`api_guide_Name` iou_aware (bool): ${iou_aware_comment} iou_aware_factor (float): ${iou_aware_factor_comment} Returns: Variable: A 3-D tensor with shape [N, M, 4], the coordinates of boxes, and a 3-D tensor with shape [N, M, :attr:`class_num`], the classification scores of boxes. Raises: TypeError: Input x of yolov_box must be Variable TypeError: Attr anchors of yolo box must be list or tuple TypeError: Attr class_num of yolo box must be an integer TypeError: Attr conf_thresh of yolo box must be a float number Examples: .. code-block:: python import paddle.fluid as fluid import paddle paddle.enable_static() x = fluid.data(name='x', shape=[None, 255, 13, 13], dtype='float32') img_size = fluid.data(name='img_size',shape=[None, 2],dtype='int64') anchors = [10, 13, 16, 30, 33, 23] boxes,scores = fluid.layers.yolo_box(x=x, img_size=img_size, class_num=80, anchors=anchors, conf_thresh=0.01, downsample_ratio=32) """ helper = LayerHelper('yolo_box', **locals()) if not isinstance(x, Variable): raise TypeError("Input x of yolo_box must be Variable") if not isinstance(img_size, Variable): raise TypeError("Input img_size of yolo_box must be Variable") if not isinstance(anchors, list) and not isinstance(anchors, tuple): raise TypeError("Attr anchors of yolo_box must be list or tuple") if not isinstance(class_num, int): raise TypeError("Attr class_num of yolo_box must be an integer") if not isinstance(conf_thresh, float): raise TypeError("Attr ignore_thresh of yolo_box must be a float number") boxes = helper.create_variable_for_type_inference(dtype=x.dtype) scores = helper.create_variable_for_type_inference(dtype=x.dtype) attrs = { "anchors": anchors, "class_num": class_num, "conf_thresh": conf_thresh, "downsample_ratio": downsample_ratio, "clip_bbox": clip_bbox, "scale_x_y": scale_x_y, "iou_aware": iou_aware, "iou_aware_factor": iou_aware_factor, } helper.append_op( type='yolo_box', inputs={ "X": x, "ImgSize": img_size, }, outputs={ 'Boxes': boxes, 'Scores': scores, }, attrs=attrs, ) return boxes, scores @templatedoc() def detection_map( detect_res, label, class_num, background_label=0, overlap_threshold=0.3, evaluate_difficult=True, has_state=None, input_states=None, out_states=None, ap_version='integral', ): """ ${comment} Args: detect_res: ${detect_res_comment} label: ${label_comment} class_num: ${class_num_comment} background_label: ${background_label_comment} overlap_threshold: ${overlap_threshold_comment} evaluate_difficult: ${evaluate_difficult_comment} has_state: ${has_state_comment} input_states: (tuple|None) If not None, It contains 3 elements: (1) pos_count ${pos_count_comment}. (2) true_pos ${true_pos_comment}. (3) false_pos ${false_pos_comment}. out_states: (tuple|None) If not None, it contains 3 elements. (1) accum_pos_count ${accum_pos_count_comment}. (2) accum_true_pos ${accum_true_pos_comment}. (3) accum_false_pos ${accum_false_pos_comment}. ap_version: ${ap_type_comment} Returns: ${map_comment} Examples: .. code-block:: python import paddle.fluid as fluid from fluid.layers import detection detect_res = fluid.data( name='detect_res', shape=[10, 6], dtype='float32') label = fluid.data( name='label', shape=[10, 6], dtype='float32') map_out = detection.detection_map(detect_res, label, 21) """ helper = LayerHelper("detection_map", **locals()) def __create_var(type): return helper.create_variable_for_type_inference(dtype=type) map_out = __create_var('float32') accum_pos_count_out = ( out_states[0] if out_states is not None else __create_var('int32') ) accum_true_pos_out = ( out_states[1] if out_states is not None else __create_var('float32') ) accum_false_pos_out = ( out_states[2] if out_states is not None else __create_var('float32') ) pos_count = input_states[0] if input_states is not None else None true_pos = input_states[1] if input_states is not None else None false_pos = input_states[2] if input_states is not None else None helper.append_op( type="detection_map", inputs={ 'Label': label, 'DetectRes': detect_res, 'HasState': has_state, 'PosCount': pos_count, 'TruePos': true_pos, 'FalsePos': false_pos, }, outputs={ 'MAP': map_out, 'AccumPosCount': accum_pos_count_out, 'AccumTruePos': accum_true_pos_out, 'AccumFalsePos': accum_false_pos_out, }, attrs={ 'overlap_threshold': overlap_threshold, 'evaluate_difficult': evaluate_difficult, 'ap_type': ap_version, 'class_num': class_num, }, ) return map_out def bipartite_match( dist_matrix, match_type=None, dist_threshold=None, name=None ): """ This operator implements a greedy bipartite matching algorithm, which is used to obtain the matching with the maximum distance based on the input distance matrix. For input 2D matrix, the bipartite matching algorithm can find the matched column for each row (matched means the largest distance), also can find the matched row for each column. And this operator only calculate matched indices from column to row. For each instance, the number of matched indices is the column number of the input distance matrix. **The OP only supports CPU**. There are two outputs, matched indices and distance. A simple description, this algorithm matched the best (maximum distance) row entity to the column entity and the matched indices are not duplicated in each row of ColToRowMatchIndices. If the column entity is not matched any row entity, set -1 in ColToRowMatchIndices. NOTE: the input DistMat can be LoDTensor (with LoD) or Tensor. If LoDTensor with LoD, the height of ColToRowMatchIndices is batch size. If Tensor, the height of ColToRowMatchIndices is 1. NOTE: This API is a very low level API. It is used by :code:`ssd_loss` layer. Please consider to use :code:`ssd_loss` instead. Args: dist_matrix(Variable): This input is a 2-D LoDTensor with shape [K, M]. The data type is float32 or float64. It is pair-wise distance matrix between the entities represented by each row and each column. For example, assumed one entity is A with shape [K], another entity is B with shape [M]. The dist_matrix[i][j] is the distance between A[i] and B[j]. The bigger the distance is, the better matching the pairs are. NOTE: This tensor can contain LoD information to represent a batch of inputs. One instance of this batch can contain different numbers of entities. match_type(str, optional): The type of matching method, should be 'bipartite' or 'per_prediction'. None ('bipartite') by default. dist_threshold(float32, optional): If `match_type` is 'per_prediction', this threshold is to determine the extra matching bboxes based on the maximum distance, 0.5 by default. name(str, optional): For detailed information, please refer to :ref:`api_guide_Name`. Usually name is no need to set and None by default. Returns: Tuple: matched_indices(Variable): A 2-D Tensor with shape [N, M]. The data type is int32. N is the batch size. If match_indices[i][j] is -1, it means B[j] does not match any entity in i-th instance. Otherwise, it means B[j] is matched to row match_indices[i][j] in i-th instance. The row number of i-th instance is saved in match_indices[i][j]. matched_distance(Variable): A 2-D Tensor with shape [N, M]. The data type is float32. N is batch size. If match_indices[i][j] is -1, match_distance[i][j] is also -1.0. Otherwise, assumed match_distance[i][j] = d, and the row offsets of each instance are called LoD. Then match_distance[i][j] = dist_matrix[d+LoD[i]][j]. Examples: >>> import paddle.fluid as fluid >>> x = fluid.data(name='x', shape=[None, 4], dtype='float32') >>> y = fluid.data(name='y', shape=[None, 4], dtype='float32') >>> iou = fluid.layers.iou_similarity(x=x, y=y) >>> matched_indices, matched_dist = fluid.layers.bipartite_match(iou) """ helper = LayerHelper('bipartite_match', **locals()) match_indices = helper.create_variable_for_type_inference(dtype='int32') match_distance = helper.create_variable_for_type_inference( dtype=dist_matrix.dtype ) helper.append_op( type='bipartite_match', inputs={'DistMat': dist_matrix}, attrs={ 'match_type': match_type, 'dist_threshold': dist_threshold, }, outputs={ 'ColToRowMatchIndices': match_indices, 'ColToRowMatchDist': match_distance, }, ) return match_indices, match_distance def target_assign( input, matched_indices, negative_indices=None, mismatch_value=None, name=None, ): """ This operator can be, for given the target bounding boxes or labels, to assign classification and regression targets to each prediction as well as weights to prediction. The weights is used to specify which prediction would not contribute to training loss. For each instance, the output `out` and`out_weight` are assigned based on `match_indices` and `negative_indices`. Assumed that the row offset for each instance in `input` is called lod, this operator assigns classification/regression targets by performing the following steps: 1. Assigning all outputs based on `match_indices`: .. code-block:: text If id = match_indices[i][j] > 0, out[i][j][0 : K] = X[lod[i] + id][j % P][0 : K] out_weight[i][j] = 1. Otherwise, out[j][j][0 : K] = {mismatch_value, mismatch_value, ...} out_weight[i][j] = 0. 2. Assigning outputs based on `neg_indices` if `neg_indices` is provided: Assumed that i-th instance in `neg_indices` is called `neg_indice`, for i-th instance: .. code-block:: text for id in neg_indice: out[i][id][0 : K] = {mismatch_value, mismatch_value, ...} out_weight[i][id] = 1.0 Args: input (Variable): This input is a 3D LoDTensor with shape [M, P, K]. Data type should be int32 or float32. matched_indices (Variable): The input matched indices is 2D Tenosr<int32> with shape [N, P], If MatchIndices[i][j] is -1, the j-th entity of column is not matched to any entity of row in i-th instance. negative_indices (Variable, optional): The input negative example indices are an optional input with shape [Neg, 1] and int32 type, where Neg is the total number of negative example indices. mismatch_value (float32, optional): Fill this value to the mismatched location. name (string): The default value is None. Normally there is no need for user to set this property. For more information, please refer to :ref:`api_guide_Name`. Returns: tuple: A tuple(out, out_weight) is returned. out (Variable): a 3D Tensor with shape [N, P, K] and same data type with `input`, N and P is the same as they are in `matched_indices`, K is the same as it in input of X. out_weight (Variable): the weight for output with the shape of [N, P, 1]. Data type is float32. Examples: .. code-block:: python import paddle.fluid as fluid import paddle paddle.enable_static() x = fluid.data( name='x', shape=[4, 20, 4], dtype='float', lod_level=1) matched_id = fluid.data( name='indices', shape=[8, 20], dtype='int32') trg, trg_weight = fluid.layers.target_assign( x, matched_id, mismatch_value=0) """ helper = LayerHelper('target_assign', **locals()) out = helper.create_variable_for_type_inference(dtype=input.dtype) out_weight = helper.create_variable_for_type_inference(dtype='float32') helper.append_op( type='target_assign', inputs={ 'X': input, 'MatchIndices': matched_indices, 'NegIndices': negative_indices, }, outputs={'Out': out, 'OutWeight': out_weight}, attrs={'mismatch_value': mismatch_value}, ) return out, out_weight def ssd_loss( location, confidence, gt_box, gt_label, prior_box, prior_box_var=None, background_label=0, overlap_threshold=0.5, neg_pos_ratio=3.0, neg_overlap=0.5, loc_loss_weight=1.0, conf_loss_weight=1.0, match_type='per_prediction', mining_type='max_negative', normalize=True, sample_size=None, ): r""" :alias_main: paddle.nn.functional.ssd_loss :alias: paddle.nn.functional.ssd_loss,paddle.nn.functional.loss.ssd_loss :old_api: paddle.fluid.layers.ssd_loss **Multi-box loss layer for object detection algorithm of SSD** This layer is to compute detection loss for SSD given the location offset predictions, confidence predictions, prior boxes and ground-truth bounding boxes and labels, and the type of hard example mining. The returned loss is a weighted sum of the localization loss (or regression loss) and confidence loss (or classification loss) by performing the following steps: 1. Find matched bounding box by bipartite matching algorithm. 1.1 Compute IOU similarity between ground-truth boxes and prior boxes. 1.2 Compute matched bounding box by bipartite matching algorithm. 2. Compute confidence for mining hard examples 2.1. Get the target label based on matched indices. 2.2. Compute confidence loss. 3. Apply hard example mining to get the negative example indices and update the matched indices. 4. Assign classification and regression targets 4.1. Encoded bbox according to the prior boxes. 4.2. Assign regression targets. 4.3. Assign classification targets. 5. Compute the overall objective loss. 5.1 Compute confidence loss. 5.2 Compute localization loss. 5.3 Compute the overall weighted loss. Args: location (Variable): The location predictions are a 3D Tensor with shape [N, Np, 4], N is the batch size, Np is total number of predictions for each instance. 4 is the number of coordinate values, the layout is [xmin, ymin, xmax, ymax].The data type is float32 or float64. confidence (Variable): The confidence predictions are a 3D Tensor with shape [N, Np, C], N and Np are the same as they are in `location`, C is the class number.The data type is float32 or float64. gt_box (Variable): The ground-truth bounding boxes (bboxes) are a 2D LoDTensor with shape [Ng, 4], Ng is the total number of ground-truth bboxes of mini-batch input.The data type is float32 or float64. gt_label (Variable): The ground-truth labels are a 2D LoDTensor with shape [Ng, 1].Ng is the total number of ground-truth bboxes of mini-batch input, 1 is the number of class. The data type is float32 or float64. prior_box (Variable): The prior boxes are a 2D Tensor with shape [Np, 4]. Np and 4 are the same as they are in `location`. The data type is float32 or float64. prior_box_var (Variable): The variance of prior boxes are a 2D Tensor with shape [Np, 4]. Np and 4 are the same as they are in `prior_box` background_label (int): The index of background label, 0 by default. overlap_threshold (float): If match_type is 'per_prediction', use 'overlap_threshold' to determine the extra matching bboxes when finding \ matched boxes. 0.5 by default. neg_pos_ratio (float): The ratio of the negative boxes to the positive boxes, used only when mining_type is 'max_negative', 3.0 by default. neg_overlap (float): The negative overlap upper bound for the unmatched predictions. Use only when mining_type is 'max_negative', 0.5 by default. loc_loss_weight (float): Weight for localization loss, 1.0 by default. conf_loss_weight (float): Weight for confidence loss, 1.0 by default. match_type (str): The type of matching method during training, should be 'bipartite' or 'per_prediction', 'per_prediction' by default. mining_type (str): The hard example mining type, should be 'hard_example' or 'max_negative', now only support `max_negative`. normalize (bool): Whether to normalize the SSD loss by the total number of output locations, True by default. sample_size (int): The max sample size of negative box, used only when mining_type is 'hard_example'. Returns: Variable(Tensor): The weighted sum of the localization loss and confidence loss, \ with shape [N * Np, 1], N and Np are the same as they are in `location`.The data type is float32 or float64. Raises: ValueError: If mining_type is 'hard_example', now only support mining \ type of `max_negative`. Examples: .. code-block:: python import paddle.fluid as fluid pb = fluid.data( name='prior_box', shape=[10, 4], dtype='float32') pbv = fluid.data( name='prior_box_var', shape=[10, 4], dtype='float32') loc = fluid.data(name='target_box', shape=[10, 4], dtype='float32') scores = fluid.data(name='scores', shape=[10, 21], dtype='float32') gt_box = fluid.data( name='gt_box', shape=[4], lod_level=1, dtype='float32') gt_label = fluid.data( name='gt_label', shape=[1], lod_level=1, dtype='float32') loss = fluid.layers.ssd_loss(loc, scores, gt_box, gt_label, pb, pbv) """ helper = LayerHelper('ssd_loss', **locals()) if mining_type != 'max_negative': raise ValueError("Only support mining_type == max_negative now.") num, num_prior, num_class = confidence.shape conf_shape = nn.shape(confidence) def __reshape_to_2d(var): return nn.flatten(x=var, axis=2) # 1. Find matched bounding box by prior box. # 1.1 Compute IOU similarity between ground-truth boxes and prior boxes. iou = iou_similarity(x=gt_box, y=prior_box) # 1.2 Compute matched bounding box by bipartite matching algorithm. matched_indices, matched_dist = bipartite_match( iou, match_type, overlap_threshold ) # 2. Compute confidence for mining hard examples # 2.1. Get the target label based on matched indices gt_label = nn.reshape( x=gt_label, shape=(len(gt_label.shape) - 1) * (0,) + (-1, 1) ) gt_label.stop_gradient = True target_label, _ = target_assign( gt_label, matched_indices, mismatch_value=background_label ) # 2.2. Compute confidence loss. # Reshape confidence to 2D tensor. confidence = __reshape_to_2d(confidence) target_label = tensor.cast(x=target_label, dtype='int64') target_label = __reshape_to_2d(target_label) target_label.stop_gradient = True conf_loss = softmax_with_cross_entropy(confidence, target_label) # 3. Mining hard examples actual_shape = nn.slice(conf_shape, axes=[0], starts=[0], ends=[2]) actual_shape.stop_gradient = True # shape=(-1, 0) is set for compile-time, the correct shape is set by # actual_shape in runtime. conf_loss = nn.reshape( x=conf_loss, shape=(-1, 0), actual_shape=actual_shape ) conf_loss.stop_gradient = True neg_indices = helper.create_variable_for_type_inference(dtype='int32') dtype = matched_indices.dtype updated_matched_indices = helper.create_variable_for_type_inference( dtype=dtype ) helper.append_op( type='mine_hard_examples', inputs={ 'ClsLoss': conf_loss, 'LocLoss': None, 'MatchIndices': matched_indices, 'MatchDist': matched_dist, }, outputs={ 'NegIndices': neg_indices, 'UpdatedMatchIndices': updated_matched_indices, }, attrs={ 'neg_pos_ratio': neg_pos_ratio, 'neg_dist_threshold': neg_overlap, 'mining_type': mining_type, 'sample_size': sample_size, }, ) # 4. Assign classification and regression targets # 4.1. Encoded bbox according to the prior boxes. encoded_bbox = box_coder( prior_box=prior_box, prior_box_var=prior_box_var, target_box=gt_box, code_type='encode_center_size', ) # 4.2. Assign regression targets target_bbox, target_loc_weight = target_assign( encoded_bbox, updated_matched_indices, mismatch_value=background_label ) # 4.3. Assign classification targets target_label, target_conf_weight = target_assign( gt_label, updated_matched_indices, negative_indices=neg_indices, mismatch_value=background_label, ) # 5. Compute loss. # 5.1 Compute confidence loss. target_label = __reshape_to_2d(target_label) target_label = tensor.cast(x=target_label, dtype='int64') conf_loss = softmax_with_cross_entropy(confidence, target_label) target_conf_weight = __reshape_to_2d(target_conf_weight) conf_loss = conf_loss * target_conf_weight # the target_label and target_conf_weight do not have gradient. target_label.stop_gradient = True target_conf_weight.stop_gradient = True # 5.2 Compute regression loss. location = __reshape_to_2d(location) target_bbox = __reshape_to_2d(target_bbox) loc_loss = nn.smooth_l1(location, target_bbox) target_loc_weight = __reshape_to_2d(target_loc_weight) loc_loss = loc_loss * target_loc_weight # the target_bbox and target_loc_weight do not have gradient. target_bbox.stop_gradient = True target_loc_weight.stop_gradient = True # 5.3 Compute overall weighted loss. loss = conf_loss_weight * conf_loss + loc_loss_weight * loc_loss # reshape to [N, Np], N is the batch size and Np is the prior box number. # shape=(-1, 0) is set for compile-time, the correct shape is set by # actual_shape in runtime. loss = nn.reshape(x=loss, shape=(-1, 0), actual_shape=actual_shape) loss = nn.reduce_sum(loss, dim=1, keep_dim=True) if normalize: normalizer = nn.reduce_sum(target_loc_weight) loss = loss / normalizer return loss def prior_box( input, image, min_sizes, max_sizes=None, aspect_ratios=[1.0], variance=[0.1, 0.1, 0.2, 0.2], flip=False, clip=False, steps=[0.0, 0.0], offset=0.5, name=None, min_max_aspect_ratios_order=False, ): """ This op generates prior boxes for SSD(Single Shot MultiBox Detector) algorithm. Each position of the input produce N prior boxes, N is determined by the count of min_sizes, max_sizes and aspect_ratios, The size of the box is in range(min_size, max_size) interval, which is generated in sequence according to the aspect_ratios. Parameters: input(Variable): 4-D tensor(NCHW), the data type should be float32 or float64. image(Variable): 4-D tensor(NCHW), the input image data of PriorBoxOp, the data type should be float32 or float64. min_sizes(list|tuple|float): the min sizes of generated prior boxes. max_sizes(list|tuple|None): the max sizes of generated prior boxes. Default: None. aspect_ratios(list|tuple|float): the aspect ratios of generated prior boxes. Default: [1.]. variance(list|tuple): the variances to be encoded in prior boxes. Default:[0.1, 0.1, 0.2, 0.2]. flip(bool): Whether to flip aspect ratios. Default:False. clip(bool): Whether to clip out-of-boundary boxes. Default: False. step(list|tuple): Prior boxes step across width and height, If step[0] equals to 0.0 or step[1] equals to 0.0, the prior boxes step across height or weight of the input will be automatically calculated. Default: [0., 0.] offset(float): Prior boxes center offset. Default: 0.5 min_max_aspect_ratios_order(bool): If set True, the output prior box is in order of [min, max, aspect_ratios], which is consistent with Caffe. Please note, this order affects the weights order of convolution layer followed by and does not affect the final detection results. Default: False. name(str, optional): The default value is None. Normally there is no need for user to set this property. For more information, please refer to :ref:`api_guide_Name` Returns: Tuple: A tuple with two Variable (boxes, variances) boxes(Variable): the output prior boxes of PriorBox. 4-D tensor, the layout is [H, W, num_priors, 4]. H is the height of input, W is the width of input, num_priors is the total box count of each position of input. variances(Variable): the expanded variances of PriorBox. 4-D tensor, the layput is [H, W, num_priors, 4]. H is the height of input, W is the width of input num_priors is the total box count of each position of input Examples: .. code-block:: python #declarative mode import paddle.fluid as fluid import numpy as np import paddle paddle.enable_static() input = fluid.data(name="input", shape=[None,3,6,9]) image = fluid.data(name="image", shape=[None,3,9,12]) box, var = fluid.layers.prior_box( input=input, image=image, min_sizes=[100.], clip=True, flip=True) place = fluid.CPUPlace() exe = fluid.Executor(place) exe.run(fluid.default_startup_program()) # prepare a batch of data input_data = np.random.rand(1,3,6,9).astype("float32") image_data = np.random.rand(1,3,9,12).astype("float32") box_out, var_out = exe.run(fluid.default_main_program(), feed={"input":input_data,"image":image_data}, fetch_list=[box,var], return_numpy=True) # print(box_out.shape) # (6, 9, 1, 4) # print(var_out.shape) # (6, 9, 1, 4) # imperative mode import paddle.fluid.dygraph as dg with dg.guard(place) as g: input = dg.to_variable(input_data) image = dg.to_variable(image_data) box, var = fluid.layers.prior_box( input=input, image=image, min_sizes=[100.], clip=True, flip=True) # print(box.shape) # [6L, 9L, 1L, 4L] # print(var.shape) # [6L, 9L, 1L, 4L] """ return paddle.vision.ops.prior_box( input=input, image=image, min_sizes=min_sizes, max_sizes=max_sizes, aspect_ratios=aspect_ratios, variance=variance, flip=flip, clip=clip, steps=steps, offset=offset, min_max_aspect_ratios_order=min_max_aspect_ratios_order, name=name, ) def density_prior_box( input, image, densities=None, fixed_sizes=None, fixed_ratios=None, variance=[0.1, 0.1, 0.2, 0.2], clip=False, steps=[0.0, 0.0], offset=0.5, flatten_to_2d=False, name=None, ): r""" This op generates density prior boxes for SSD(Single Shot MultiBox Detector) algorithm. Each position of the input produce N prior boxes, N is determined by the count of densities, fixed_sizes and fixed_ratios. Boxes center at grid points around each input position is generated by this operator, and the grid points is determined by densities and the count of density prior box is determined by fixed_sizes and fixed_ratios. Obviously, the number of fixed_sizes is equal to the number of densities. For densities_i in densities: .. math:: N\_density_prior\_box = SUM(N\_fixed\_ratios * densities\_i^2) N_density_prior_box is the number of density_prior_box and N_fixed_ratios is the number of fixed_ratios. Parameters: input(Variable): 4-D tensor(NCHW), the data type should be float32 of float64. image(Variable): 4-D tensor(NCHW), the input image data of PriorBoxOp, the data type should be float32 or float64. the layout is NCHW. densities(list|tuple|None): The densities of generated density prior boxes, this attribute should be a list or tuple of integers. Default: None. fixed_sizes(list|tuple|None): The fixed sizes of generated density prior boxes, this attribute should a list or tuple of same length with :attr:`densities`. Default: None. fixed_ratios(list|tuple|None): The fixed ratios of generated density prior boxes, if this attribute is not set and :attr:`densities` and :attr:`fix_sizes` is set, :attr:`aspect_ratios` will be used to generate density prior boxes. variance(list|tuple): The variances to be encoded in density prior boxes. Default:[0.1, 0.1, 0.2, 0.2]. clip(bool): Whether to clip out of boundary boxes. Default: False. step(list|tuple): Prior boxes step across width and height, If step[0] equals 0.0 or step[1] equals 0.0, the density prior boxes step across height or weight of the input will be automatically calculated. Default: [0., 0.] offset(float): Prior boxes center offset. Default: 0.5 flatten_to_2d(bool): Whether to flatten output prior boxes and variance to 2D shape, the second dim is 4. Default: False. name(str, optional): The default value is None. Normally there is no need for user to set this property. For more information, please refer to :ref:`api_guide_Name` Returns: Tuple: A tuple with two Variable (boxes, variances) boxes: the output density prior boxes of PriorBox. 4-D tensor, the layout is [H, W, num_priors, 4] when flatten_to_2d is False. 2-D tensor, the layout is [H * W * num_priors, 4] when flatten_to_2d is True. H is the height of input, W is the width of input, and num_priors is the total box count of each position of input. variances: the expanded variances of PriorBox. 4-D tensor, the layout is [H, W, num_priors, 4] when flatten_to_2d is False. 2-D tensor, the layout is [H * W * num_priors, 4] when flatten_to_2d is True. H is the height of input, W is the width of input, and num_priors is the total box count of each position of input. Examples: .. code-block:: python #declarative mode import paddle.fluid as fluid import numpy as np import paddle paddle.enable_static() input = fluid.data(name="input", shape=[None,3,6,9]) image = fluid.data(name="image", shape=[None,3,9,12]) box, var = fluid.layers.density_prior_box( input=input, image=image, densities=[4, 2, 1], fixed_sizes=[32.0, 64.0, 128.0], fixed_ratios=[1.], clip=True, flatten_to_2d=True) place = fluid.CPUPlace() exe = fluid.Executor(place) exe.run(fluid.default_startup_program()) # prepare a batch of data input_data = np.random.rand(1,3,6,9).astype("float32") image_data = np.random.rand(1,3,9,12).astype("float32") box_out, var_out = exe.run( fluid.default_main_program(), feed={"input":input_data, "image":image_data}, fetch_list=[box,var], return_numpy=True) # print(box_out.shape) # (1134, 4) # print(var_out.shape) # (1134, 4) #imperative mode import paddle.fluid.dygraph as dg with dg.guard(place) as g: input = dg.to_variable(input_data) image = dg.to_variable(image_data) box, var = fluid.layers.density_prior_box( input=input, image=image, densities=[4, 2, 1], fixed_sizes=[32.0, 64.0, 128.0], fixed_ratios=[1.], clip=True) # print(box.shape) # [6L, 9L, 21L, 4L] # print(var.shape) # [6L, 9L, 21L, 4L] """ helper = LayerHelper("density_prior_box", **locals()) dtype = helper.input_dtype() check_variable_and_dtype( input, 'input', ['float32', 'float64'], 'density_prior_box' ) def _is_list_or_tuple_(data): return isinstance(data, list) or isinstance(data, tuple) check_type(densities, 'densities', (list, tuple), 'density_prior_box') check_type(fixed_sizes, 'fixed_sizes', (list, tuple), 'density_prior_box') check_type(fixed_ratios, 'fixed_ratios', (list, tuple), 'density_prior_box') if len(densities) != len(fixed_sizes): raise ValueError('densities and fixed_sizes length should be euqal.') if not (_is_list_or_tuple_(steps) and len(steps) == 2): raise ValueError( 'steps should be a list or tuple ', 'with length 2, (step_width, step_height).', ) densities = list(map(int, densities)) fixed_sizes = list(map(float, fixed_sizes)) fixed_ratios = list(map(float, fixed_ratios)) steps = list(map(float, steps)) attrs = { 'variances': variance, 'clip': clip, 'step_w': steps[0], 'step_h': steps[1], 'offset': offset, 'densities': densities, 'fixed_sizes': fixed_sizes, 'fixed_ratios': fixed_ratios, 'flatten_to_2d': flatten_to_2d, } box = helper.create_variable_for_type_inference(dtype) var = helper.create_variable_for_type_inference(dtype) helper.append_op( type="density_prior_box", inputs={"Input": input, "Image": image}, outputs={"Boxes": box, "Variances": var}, attrs=attrs, ) box.stop_gradient = True var.stop_gradient = True return box, var @static_only def multi_box_head( inputs, image, base_size, num_classes, aspect_ratios, min_ratio=None, max_ratio=None, min_sizes=None, max_sizes=None, steps=None, step_w=None, step_h=None, offset=0.5, variance=[0.1, 0.1, 0.2, 0.2], flip=True, clip=False, kernel_size=1, pad=0, stride=1, name=None, min_max_aspect_ratios_order=False, ): """ :api_attr: Static Graph Base on SSD ((Single Shot MultiBox Detector) algorithm, generate prior boxes, regression location and classification confidence on multiple input feature maps, then output the concatenate results. The details of this algorithm, please refer the section 2.2 of SSD paper `SSD: Single Shot MultiBox Detector <https://arxiv.org/abs/1512.02325>`_ . Args: inputs (list(Variable)|tuple(Variable)): The list of input variables, the format of all Variables are 4-D Tensor, layout is NCHW. Data type should be float32 or float64. image (Variable): The input image, layout is NCHW. Data type should be the same as inputs. base_size(int): the base_size is input image size. When len(inputs) > 2 and `min_size` and `max_size` are None, the `min_size` and `max_size` are calculated by `baze_size`, 'min_ratio' and `max_ratio`. The formula is as follows: .. code-block:: text min_sizes = [] max_sizes = [] step = int(math.floor(((max_ratio - min_ratio)) / (num_layer - 2))) for ratio in range(min_ratio, max_ratio + 1, step): min_sizes.append(base_size * ratio / 100.) max_sizes.append(base_size * (ratio + step) / 100.) min_sizes = [base_size * .10] + min_sizes max_sizes = [base_size * .20] + max_sizes num_classes(int): The number of classes. aspect_ratios(list(float) | tuple(float)): the aspect ratios of generated prior boxes. The length of input and aspect_ratios must be equal. min_ratio(int): the min ratio of generated prior boxes. max_ratio(int): the max ratio of generated prior boxes. min_sizes(list|tuple|None): If `len(inputs) <=2`, min_sizes must be set up, and the length of min_sizes should equal to the length of inputs. Default: None. max_sizes(list|tuple|None): If `len(inputs) <=2`, max_sizes must be set up, and the length of min_sizes should equal to the length of inputs. Default: None. steps(list|tuple): If step_w and step_h are the same, step_w and step_h can be replaced by steps. step_w(list|tuple): Prior boxes step across width. If step_w[i] == 0.0, the prior boxes step across width of the inputs[i] will be automatically calculated. Default: None. step_h(list|tuple): Prior boxes step across height, If step_h[i] == 0.0, the prior boxes step across height of the inputs[i] will be automatically calculated. Default: None. offset(float): Prior boxes center offset. Default: 0.5 variance(list|tuple): the variances to be encoded in prior boxes. Default:[0.1, 0.1, 0.2, 0.2]. flip(bool): Whether to flip aspect ratios. Default:False. clip(bool): Whether to clip out-of-boundary boxes. Default: False. kernel_size(int): The kernel size of conv2d. Default: 1. pad(int|list|tuple): The padding of conv2d. Default:0. stride(int|list|tuple): The stride of conv2d. Default:1, name(str): The default value is None. Normally there is no need for user to set this property. For more information, please refer to :ref:`api_guide_Name`. min_max_aspect_ratios_order(bool): If set True, the output prior box is in order of [min, max, aspect_ratios], which is consistent with Caffe. Please note, this order affects the weights order of convolution layer followed by and does not affect the final detection results. Default: False. Returns: tuple: A tuple with four Variables. (mbox_loc, mbox_conf, boxes, variances) mbox_loc (Variable): The predicted boxes' location of the inputs. The layout is [N, num_priors, 4], where N is batch size, ``num_priors`` is the number of prior boxes. Data type is the same as input. mbox_conf (Variable): The predicted boxes' confidence of the inputs. The layout is [N, num_priors, C], where ``N`` and ``num_priors`` has the same meaning as above. C is the number of Classes. Data type is the same as input. boxes (Variable): the output prior boxes. The layout is [num_priors, 4]. The meaning of num_priors is the same as above. Data type is the same as input. variances (Variable): the expanded variances for prior boxes. The layout is [num_priors, 4]. Data type is the same as input. Examples 1: set min_ratio and max_ratio: .. code-block:: python import paddle paddle.enable_static() images = paddle.static.data(name='data', shape=[None, 3, 300, 300], dtype='float32') conv1 = paddle.static.data(name='conv1', shape=[None, 512, 19, 19], dtype='float32') conv2 = paddle.static.data(name='conv2', shape=[None, 1024, 10, 10], dtype='float32') conv3 = paddle.static.data(name='conv3', shape=[None, 512, 5, 5], dtype='float32') conv4 = paddle.static.data(name='conv4', shape=[None, 256, 3, 3], dtype='float32') conv5 = paddle.static.data(name='conv5', shape=[None, 256, 2, 2], dtype='float32') conv6 = paddle.static.data(name='conv6', shape=[None, 128, 1, 1], dtype='float32') mbox_locs, mbox_confs, box, var = paddle.static.nn.multi_box_head( inputs=[conv1, conv2, conv3, conv4, conv5, conv6], image=images, num_classes=21, min_ratio=20, max_ratio=90, aspect_ratios=[[2.], [2., 3.], [2., 3.], [2., 3.], [2.], [2.]], base_size=300, offset=0.5, flip=True, clip=True) Examples 2: set min_sizes and max_sizes: .. code-block:: python import paddle paddle.enable_static() images = paddle.static.data(name='data', shape=[None, 3, 300, 300], dtype='float32') conv1 = paddle.static.data(name='conv1', shape=[None, 512, 19, 19], dtype='float32') conv2 = paddle.static.data(name='conv2', shape=[None, 1024, 10, 10], dtype='float32') conv3 = paddle.static.data(name='conv3', shape=[None, 512, 5, 5], dtype='float32') conv4 = paddle.static.data(name='conv4', shape=[None, 256, 3, 3], dtype='float32') conv5 = paddle.static.data(name='conv5', shape=[None, 256, 2, 2], dtype='float32') conv6 = paddle.static.data(name='conv6', shape=[None, 128, 1, 1], dtype='float32') mbox_locs, mbox_confs, box, var = paddle.static.nn.multi_box_head( inputs=[conv1, conv2, conv3, conv4, conv5, conv6], image=images, num_classes=21, min_sizes=[60.0, 105.0, 150.0, 195.0, 240.0, 285.0], max_sizes=[[], 150.0, 195.0, 240.0, 285.0, 300.0], aspect_ratios=[[2.], [2., 3.], [2., 3.], [2., 3.], [2.], [2.]], base_size=300, offset=0.5, flip=True, clip=True) """ def _reshape_with_axis_(input, axis=1): out = nn.flatten(x=input, axis=axis) return out def _is_list_or_tuple_(data): return isinstance(data, list) or isinstance(data, tuple) def _is_list_or_tuple_and_equal(data, length, err_info): if not (_is_list_or_tuple_(data) and len(data) == length): raise ValueError(err_info) if not _is_list_or_tuple_(inputs): raise ValueError('inputs should be a list or tuple.') num_layer = len(inputs) if num_layer <= 2: assert min_sizes is not None and max_sizes is not None assert len(min_sizes) == num_layer and len(max_sizes) == num_layer elif min_sizes is None and max_sizes is None: min_sizes = [] max_sizes = [] step = int(math.floor(((max_ratio - min_ratio)) / (num_layer - 2))) for ratio in range(min_ratio, max_ratio + 1, step): min_sizes.append(base_size * ratio / 100.0) max_sizes.append(base_size * (ratio + step) / 100.0) min_sizes = [base_size * 0.10] + min_sizes max_sizes = [base_size * 0.20] + max_sizes if aspect_ratios: _is_list_or_tuple_and_equal( aspect_ratios, num_layer, 'aspect_ratios should be list or tuple, and the length of inputs ' 'and aspect_ratios should be the same.', ) if step_h is not None: _is_list_or_tuple_and_equal( step_h, num_layer, 'step_h should be list or tuple, and the length of inputs and ' 'step_h should be the same.', ) if step_w is not None: _is_list_or_tuple_and_equal( step_w, num_layer, 'step_w should be list or tuple, and the length of inputs and ' 'step_w should be the same.', ) if steps is not None: _is_list_or_tuple_and_equal( steps, num_layer, 'steps should be list or tuple, and the length of inputs and ' 'step_w should be the same.', ) step_w = steps step_h = steps mbox_locs = [] mbox_confs = [] box_results = [] var_results = [] for i, input in enumerate(inputs): min_size = min_sizes[i] max_size = max_sizes[i] if not _is_list_or_tuple_(min_size): min_size = [min_size] if not _is_list_or_tuple_(max_size): max_size = [max_size] aspect_ratio = [] if aspect_ratios is not None: aspect_ratio = aspect_ratios[i] if not _is_list_or_tuple_(aspect_ratio): aspect_ratio = [aspect_ratio] step = [step_w[i] if step_w else 0.0, step_h[i] if step_w else 0.0] box, var = prior_box( input, image, min_size, max_size, aspect_ratio, variance, flip, clip, step, offset, None, min_max_aspect_ratios_order, ) box_results.append(box) var_results.append(var) num_boxes = box.shape[2] # get loc num_loc_output = num_boxes * 4 mbox_loc = nn.conv2d( input=input, num_filters=num_loc_output, filter_size=kernel_size, padding=pad, stride=stride, ) mbox_loc = nn.transpose(mbox_loc, perm=[0, 2, 3, 1]) mbox_loc_flatten = nn.flatten(mbox_loc, axis=1) mbox_locs.append(mbox_loc_flatten) # get conf num_conf_output = num_boxes * num_classes conf_loc = nn.conv2d( input=input, num_filters=num_conf_output, filter_size=kernel_size, padding=pad, stride=stride, ) conf_loc = nn.transpose(conf_loc, perm=[0, 2, 3, 1]) conf_loc_flatten = nn.flatten(conf_loc, axis=1) mbox_confs.append(conf_loc_flatten) if len(box_results) == 1: box = box_results[0] var = var_results[0] mbox_locs_concat = mbox_locs[0] mbox_confs_concat = mbox_confs[0] else: reshaped_boxes = [] reshaped_vars = [] for i in range(len(box_results)): reshaped_boxes.append(_reshape_with_axis_(box_results[i], axis=3)) reshaped_vars.append(_reshape_with_axis_(var_results[i], axis=3)) box = tensor.concat(reshaped_boxes) var = tensor.concat(reshaped_vars) mbox_locs_concat = tensor.concat(mbox_locs, axis=1) mbox_locs_concat = nn.reshape(mbox_locs_concat, shape=[0, -1, 4]) mbox_confs_concat = tensor.concat(mbox_confs, axis=1) mbox_confs_concat = nn.reshape( mbox_confs_concat, shape=[0, -1, num_classes] ) box.stop_gradient = True var.stop_gradient = True return mbox_locs_concat, mbox_confs_concat, box, var def anchor_generator( input, anchor_sizes=None, aspect_ratios=None, variance=[0.1, 0.1, 0.2, 0.2], stride=None, offset=0.5, name=None, ): """ **Anchor generator operator** Generate anchors for Faster RCNN algorithm. Each position of the input produce N anchors, N = size(anchor_sizes) * size(aspect_ratios). The order of generated anchors is firstly aspect_ratios loop then anchor_sizes loop. Args: input(Variable): 4-D Tensor with shape [N,C,H,W]. The input feature map. anchor_sizes(float32|list|tuple, optional): The anchor sizes of generated anchors, given in absolute pixels e.g. [64., 128., 256., 512.]. For instance, the anchor size of 64 means the area of this anchor equals to 64**2. None by default. aspect_ratios(float32|list|tuple, optional): The height / width ratios of generated anchors, e.g. [0.5, 1.0, 2.0]. None by default. variance(list|tuple, optional): The variances to be used in box regression deltas. The data type is float32, [0.1, 0.1, 0.2, 0.2] by default. stride(list|tuple, optional): The anchors stride across width and height. The data type is float32. e.g. [16.0, 16.0]. None by default. offset(float32, optional): Prior boxes center offset. 0.5 by default. name(str, optional): For detailed information, please refer to :ref:`api_guide_Name`. Usually name is no need to set and None by default. Returns: Tuple: Anchors(Variable): The output anchors with a layout of [H, W, num_anchors, 4]. H is the height of input, W is the width of input, num_anchors is the box count of each position. Each anchor is in (xmin, ymin, xmax, ymax) format an unnormalized. Variances(Variable): The expanded variances of anchors with a layout of [H, W, num_priors, 4]. H is the height of input, W is the width of input num_anchors is the box count of each position. Each variance is in (xcenter, ycenter, w, h) format. Examples: .. code-block:: python import paddle.fluid as fluid import paddle paddle.enable_static() conv1 = fluid.data(name='conv1', shape=[None, 48, 16, 16], dtype='float32') anchor, var = fluid.layers.anchor_generator( input=conv1, anchor_sizes=[64, 128, 256, 512], aspect_ratios=[0.5, 1.0, 2.0], variance=[0.1, 0.1, 0.2, 0.2], stride=[16.0, 16.0], offset=0.5) """ helper = LayerHelper("anchor_generator", **locals()) dtype = helper.input_dtype() def _is_list_or_tuple_(data): return isinstance(data, list) or isinstance(data, tuple) if not _is_list_or_tuple_(anchor_sizes): anchor_sizes = [anchor_sizes] if not _is_list_or_tuple_(aspect_ratios): aspect_ratios = [aspect_ratios] if not (_is_list_or_tuple_(stride) and len(stride) == 2): raise ValueError( 'stride should be a list or tuple ', 'with length 2, (stride_width, stride_height).', ) anchor_sizes = list(map(float, anchor_sizes)) aspect_ratios = list(map(float, aspect_ratios)) stride = list(map(float, stride)) attrs = { 'anchor_sizes': anchor_sizes, 'aspect_ratios': aspect_ratios, 'variances': variance, 'stride': stride, 'offset': offset, } anchor = helper.create_variable_for_type_inference(dtype) var = helper.create_variable_for_type_inference(dtype) helper.append_op( type="anchor_generator", inputs={"Input": input}, outputs={"Anchors": anchor, "Variances": var}, attrs=attrs, ) anchor.stop_gradient = True var.stop_gradient = True return anchor, var def roi_perspective_transform( input, rois, transformed_height, transformed_width, spatial_scale=1.0, name=None, ): """ **The** `rois` **of this op should be a LoDTensor.** ROI perspective transform op applies perspective transform to map each roi into an rectangular region. Perspective transform is a type of transformation in linear algebra. Parameters: input (Variable): 4-D Tensor, input of ROIPerspectiveTransformOp. The format of input tensor is NCHW. Where N is batch size, C is the number of input channels, H is the height of the feature, and W is the width of the feature. The data type is float32. rois (Variable): 2-D LoDTensor, ROIs (Regions of Interest) to be transformed. It should be a 2-D LoDTensor of shape (num_rois, 8). Given as [[x1, y1, x2, y2, x3, y3, x4, y4], ...], (x1, y1) is the top left coordinates, and (x2, y2) is the top right coordinates, and (x3, y3) is the bottom right coordinates, and (x4, y4) is the bottom left coordinates. The data type is the same as `input` transformed_height (int): The height of transformed output. transformed_width (int): The width of transformed output. spatial_scale (float): Spatial scale factor to scale ROI coords. Default: 1.0 name(str, optional): The default value is None. Normally there is no need for user to set this property. For more information, please refer to :ref:`api_guide_Name` Returns: A tuple with three Variables. (out, mask, transform_matrix) out: The output of ROIPerspectiveTransformOp which is a 4-D tensor with shape (num_rois, channels, transformed_h, transformed_w). The data type is the same as `input` mask: The mask of ROIPerspectiveTransformOp which is a 4-D tensor with shape (num_rois, 1, transformed_h, transformed_w). The data type is int32 transform_matrix: The transform matrix of ROIPerspectiveTransformOp which is a 2-D tensor with shape (num_rois, 9). The data type is the same as `input` Return Type: tuple Examples: .. code-block:: python import paddle.fluid as fluid x = fluid.data(name='x', shape=[100, 256, 28, 28], dtype='float32') rois = fluid.data(name='rois', shape=[None, 8], lod_level=1, dtype='float32') out, mask, transform_matrix = fluid.layers.roi_perspective_transform(x, rois, 7, 7, 1.0) """ check_variable_and_dtype( input, 'input', ['float32'], 'roi_perspective_transform' ) check_variable_and_dtype( rois, 'rois', ['float32'], 'roi_perspective_transform' ) check_type( transformed_height, 'transformed_height', int, 'roi_perspective_transform', ) check_type( transformed_width, 'transformed_width', int, 'roi_perspective_transform' ) check_type( spatial_scale, 'spatial_scale', float, 'roi_perspective_transform' ) helper = LayerHelper('roi_perspective_transform', **locals()) dtype = helper.input_dtype() out = helper.create_variable_for_type_inference(dtype) mask = helper.create_variable_for_type_inference(dtype="int32") transform_matrix = helper.create_variable_for_type_inference(dtype) out2in_idx = helper.create_variable_for_type_inference(dtype="int32") out2in_w = helper.create_variable_for_type_inference(dtype) helper.append_op( type="roi_perspective_transform", inputs={"X": input, "ROIs": rois}, outputs={ "Out": out, "Out2InIdx": out2in_idx, "Out2InWeights": out2in_w, "Mask": mask, "TransformMatrix": transform_matrix, }, attrs={ "transformed_height": transformed_height, "transformed_width": transformed_width, "spatial_scale": spatial_scale, }, ) return out, mask, transform_matrix def generate_proposal_labels( rpn_rois, gt_classes, is_crowd, gt_boxes, im_info, batch_size_per_im=256, fg_fraction=0.25, fg_thresh=0.25, bg_thresh_hi=0.5, bg_thresh_lo=0.0, bbox_reg_weights=[0.1, 0.1, 0.2, 0.2], class_nums=None, use_random=True, is_cls_agnostic=False, is_cascade_rcnn=False, max_overlap=None, return_max_overlap=False, ): """ **Generate Proposal Labels of Faster-RCNN** This operator can be, for given the GenerateProposalOp output bounding boxes and groundtruth, to sample foreground boxes and background boxes, and compute loss target. RpnRois is the output boxes of RPN and was processed by generate_proposal_op, these boxes were combined with groundtruth boxes and sampled according to batch_size_per_im and fg_fraction, If an instance with a groundtruth overlap greater than fg_thresh, then it was considered as a foreground sample. If an instance with a groundtruth overlap greater than bg_thresh_lo and lower than bg_thresh_hi, then it was considered as a background sample. After all foreground and background boxes are chosen (so called Rois), then we apply random sampling to make sure the number of foreground boxes is no more than batch_size_per_im * fg_fraction. For each box in Rois, we assign the classification (class label) and regression targets (box label) to it. Finally BboxInsideWeights and BboxOutsideWeights are used to specify whether it would contribute to training loss. Args: rpn_rois(Variable): A 2-D LoDTensor with shape [N, 4]. N is the number of the GenerateProposalOp's output, each element is a bounding box with [xmin, ymin, xmax, ymax] format. The data type can be float32 or float64. gt_classes(Variable): A 2-D LoDTensor with shape [M, 1]. M is the number of groundtruth, each element is a class label of groundtruth. The data type must be int32. is_crowd(Variable): A 2-D LoDTensor with shape [M, 1]. M is the number of groundtruth, each element is a flag indicates whether a groundtruth is crowd. The data type must be int32. gt_boxes(Variable): A 2-D LoDTensor with shape [M, 4]. M is the number of groundtruth, each element is a bounding box with [xmin, ymin, xmax, ymax] format. im_info(Variable): A 2-D LoDTensor with shape [B, 3]. B is the number of input images, each element consists of im_height, im_width, im_scale. batch_size_per_im(int): Batch size of rois per images. The data type must be int32. fg_fraction(float): Foreground fraction in total batch_size_per_im. The data type must be float32. fg_thresh(float): Overlap threshold which is used to chose foreground sample. The data type must be float32. bg_thresh_hi(float): Overlap threshold upper bound which is used to chose background sample. The data type must be float32. bg_thresh_lo(float): Overlap threshold lower bound which is used to chose background sample. The data type must be float32. bbox_reg_weights(list|tuple): Box regression weights. The data type must be float32. class_nums(int): Class number. The data type must be int32. use_random(bool): Use random sampling to choose foreground and background boxes. is_cls_agnostic(bool): bbox regression use class agnostic simply which only represent fg and bg boxes. is_cascade_rcnn(bool): it will filter some bbox crossing the image's boundary when setting True. max_overlap(Variable): Maximum overlap between each proposal box and ground-truth. return_max_overlap(bool): Whether return the maximum overlap between each sampled RoI and ground-truth. Returns: tuple: A tuple with format``(rois, labels_int32, bbox_targets, bbox_inside_weights, bbox_outside_weights, max_overlap)``. - **rois**: 2-D LoDTensor with shape ``[batch_size_per_im * batch_size, 4]``. The data type is the same as ``rpn_rois``. - **labels_int32**: 2-D LoDTensor with shape ``[batch_size_per_im * batch_size, 1]``. The data type must be int32. - **bbox_targets**: 2-D LoDTensor with shape ``[batch_size_per_im * batch_size, 4 * class_num]``. The regression targets of all RoIs. The data type is the same as ``rpn_rois``. - **bbox_inside_weights**: 2-D LoDTensor with shape ``[batch_size_per_im * batch_size, 4 * class_num]``. The weights of foreground boxes' regression loss. The data type is the same as ``rpn_rois``. - **bbox_outside_weights**: 2-D LoDTensor with shape ``[batch_size_per_im * batch_size, 4 * class_num]``. The weights of regression loss. The data type is the same as ``rpn_rois``. - **max_overlap**: 1-D LoDTensor with shape ``[P]``. P is the number of output ``rois``. The maximum overlap between each sampled RoI and ground-truth. Examples: .. code-block:: python import paddle import paddle.fluid as fluid paddle.enable_static() rpn_rois = fluid.data(name='rpn_rois', shape=[None, 4], dtype='float32') gt_classes = fluid.data(name='gt_classes', shape=[None, 1], dtype='int32') is_crowd = fluid.data(name='is_crowd', shape=[None, 1], dtype='int32') gt_boxes = fluid.data(name='gt_boxes', shape=[None, 4], dtype='float32') im_info = fluid.data(name='im_info', shape=[None, 3], dtype='float32') rois, labels, bbox, inside_weights, outside_weights = fluid.layers.generate_proposal_labels( rpn_rois, gt_classes, is_crowd, gt_boxes, im_info, class_nums=10) """ helper = LayerHelper('generate_proposal_labels', **locals()) check_variable_and_dtype( rpn_rois, 'rpn_rois', ['float32', 'float64'], 'generate_proposal_labels' ) check_variable_and_dtype( gt_classes, 'gt_classes', ['int32'], 'generate_proposal_labels' ) check_variable_and_dtype( is_crowd, 'is_crowd', ['int32'], 'generate_proposal_labels' ) if is_cascade_rcnn: assert ( max_overlap is not None ), "Input max_overlap of generate_proposal_labels should not be None if is_cascade_rcnn is True" rois = helper.create_variable_for_type_inference(dtype=rpn_rois.dtype) labels_int32 = helper.create_variable_for_type_inference( dtype=gt_classes.dtype ) bbox_targets = helper.create_variable_for_type_inference( dtype=rpn_rois.dtype ) bbox_inside_weights = helper.create_variable_for_type_inference( dtype=rpn_rois.dtype ) bbox_outside_weights = helper.create_variable_for_type_inference( dtype=rpn_rois.dtype ) max_overlap_with_gt = helper.create_variable_for_type_inference( dtype=rpn_rois.dtype ) inputs = { 'RpnRois': rpn_rois, 'GtClasses': gt_classes, 'IsCrowd': is_crowd, 'GtBoxes': gt_boxes, 'ImInfo': im_info, } if max_overlap is not None: inputs['MaxOverlap'] = max_overlap helper.append_op( type="generate_proposal_labels", inputs=inputs, outputs={ 'Rois': rois, 'LabelsInt32': labels_int32, 'BboxTargets': bbox_targets, 'BboxInsideWeights': bbox_inside_weights, 'BboxOutsideWeights': bbox_outside_weights, 'MaxOverlapWithGT': max_overlap_with_gt, }, attrs={ 'batch_size_per_im': batch_size_per_im, 'fg_fraction': fg_fraction, 'fg_thresh': fg_thresh, 'bg_thresh_hi': bg_thresh_hi, 'bg_thresh_lo': bg_thresh_lo, 'bbox_reg_weights': bbox_reg_weights, 'class_nums': class_nums, 'use_random': use_random, 'is_cls_agnostic': is_cls_agnostic, 'is_cascade_rcnn': is_cascade_rcnn, }, ) rois.stop_gradient = True labels_int32.stop_gradient = True bbox_targets.stop_gradient = True bbox_inside_weights.stop_gradient = True bbox_outside_weights.stop_gradient = True max_overlap_with_gt.stop_gradient = True if return_max_overlap: return ( rois, labels_int32, bbox_targets, bbox_inside_weights, bbox_outside_weights, max_overlap_with_gt, ) return ( rois, labels_int32, bbox_targets, bbox_inside_weights, bbox_outside_weights, ) def generate_mask_labels( im_info, gt_classes, is_crowd, gt_segms, rois, labels_int32, num_classes, resolution, ): r""" **Generate Mask Labels for Mask-RCNN** This operator can be, for given the RoIs and corresponding labels, to sample foreground RoIs. This mask branch also has a :math: `K \\times M^{2}` dimensional output targets for each foreground RoI, which encodes K binary masks of resolution M x M, one for each of the K classes. This mask targets are used to compute loss of mask branch. Please note, the data format of groud-truth segmentation, assumed the segmentations are as follows. The first instance has two gt objects. The second instance has one gt object, this object has two gt segmentations. .. code-block:: python #[ # [[[229.14, 370.9, 229.14, 370.9, ...]], # [[343.7, 139.85, 349.01, 138.46, ...]]], # 0-th instance # [[[500.0, 390.62, ...],[115.48, 187.86, ...]]] # 1-th instance #] batch_masks = [] for semgs in batch_semgs: gt_masks = [] for semg in semgs: gt_segm = [] for polys in semg: gt_segm.append(np.array(polys).reshape(-1, 2)) gt_masks.append(gt_segm) batch_masks.append(gt_masks) place = fluid.CPUPlace() feeder = fluid.DataFeeder(place=place, feed_list=feeds) feeder.feed(batch_masks) Args: im_info (Variable): A 2-D Tensor with shape [N, 3] and float32 data type. N is the batch size, each element is [height, width, scale] of image. Image scale is target_size / original_size, target_size is the size after resize, original_size is the original image size. gt_classes (Variable): A 2-D LoDTensor with shape [M, 1]. Data type should be int. M is the total number of ground-truth, each element is a class label. is_crowd (Variable): A 2-D LoDTensor with same shape and same data type as gt_classes, each element is a flag indicating whether a groundtruth is crowd. gt_segms (Variable): This input is a 2D LoDTensor with shape [S, 2] and float32 data type, it's LoD level is 3. Usually users do not needs to understand LoD, The users should return correct data format in reader. The LoD[0] represents the ground-truth objects number of each instance. LoD[1] represents the segmentation counts of each objects. LoD[2] represents the polygons number of each segmentation. S the total number of polygons coordinate points. Each element is (x, y) coordinate points. rois (Variable): A 2-D LoDTensor with shape [R, 4] and float32 data type float32. R is the total number of RoIs, each element is a bounding box with (xmin, ymin, xmax, ymax) format in the range of original image. labels_int32 (Variable): A 2-D LoDTensor in shape of [R, 1] with type of int32. R is the same as it in `rois`. Each element represents a class label of a RoI. num_classes (int): Class number. resolution (int): Resolution of mask predictions. Returns: mask_rois (Variable): A 2D LoDTensor with shape [P, 4] and same data type as `rois`. P is the total number of sampled RoIs. Each element is a bounding box with [xmin, ymin, xmax, ymax] format in range of original image size. mask_rois_has_mask_int32 (Variable): A 2D LoDTensor with shape [P, 1] and int data type, each element represents the output mask RoI index with regard to input RoIs. mask_int32 (Variable): A 2D LoDTensor with shape [P, K * M * M] and int data type, K is the classes number and M is the resolution of mask predictions. Each element represents the binary mask targets. Examples: .. code-block:: python import paddle.fluid as fluid im_info = fluid.data(name="im_info", shape=[None, 3], dtype="float32") gt_classes = fluid.data(name="gt_classes", shape=[None, 1], dtype="float32", lod_level=1) is_crowd = fluid.data(name="is_crowd", shape=[None, 1], dtype="float32", lod_level=1) gt_masks = fluid.data(name="gt_masks", shape=[None, 2], dtype="float32", lod_level=3) # rois, roi_labels can be the output of # fluid.layers.generate_proposal_labels. rois = fluid.data(name="rois", shape=[None, 4], dtype="float32", lod_level=1) roi_labels = fluid.data(name="roi_labels", shape=[None, 1], dtype="int32", lod_level=1) mask_rois, mask_index, mask_int32 = fluid.layers.generate_mask_labels( im_info=im_info, gt_classes=gt_classes, is_crowd=is_crowd, gt_segms=gt_masks, rois=rois, labels_int32=roi_labels, num_classes=81, resolution=14) """ helper = LayerHelper('generate_mask_labels', **locals()) mask_rois = helper.create_variable_for_type_inference(dtype=rois.dtype) roi_has_mask_int32 = helper.create_variable_for_type_inference( dtype=gt_classes.dtype ) mask_int32 = helper.create_variable_for_type_inference( dtype=gt_classes.dtype ) helper.append_op( type="generate_mask_labels", inputs={ 'ImInfo': im_info, 'GtClasses': gt_classes, 'IsCrowd': is_crowd, 'GtSegms': gt_segms, 'Rois': rois, 'LabelsInt32': labels_int32, }, outputs={ 'MaskRois': mask_rois, 'RoiHasMaskInt32': roi_has_mask_int32, 'MaskInt32': mask_int32, }, attrs={'num_classes': num_classes, 'resolution': resolution}, ) mask_rois.stop_gradient = True roi_has_mask_int32.stop_gradient = True mask_int32.stop_gradient = True return mask_rois, roi_has_mask_int32, mask_int32 def generate_proposals( scores, bbox_deltas, im_info, anchors, variances, pre_nms_top_n=6000, post_nms_top_n=1000, nms_thresh=0.5, min_size=0.1, eta=1.0, return_rois_num=False, name=None, ): """ **Generate proposal Faster-RCNN** This operation proposes RoIs according to each box with their probability to be a foreground object and the box can be calculated by anchors. Bbox_deltais and scores to be an object are the output of RPN. Final proposals could be used to train detection net. For generating proposals, this operation performs following steps: 1. Transposes and resizes scores and bbox_deltas in size of (H*W*A, 1) and (H*W*A, 4) 2. Calculate box locations as proposals candidates. 3. Clip boxes to image 4. Remove predicted boxes with small area. 5. Apply NMS to get final proposals as output. Args: scores(Variable): A 4-D Tensor with shape [N, A, H, W] represents the probability for each box to be an object. N is batch size, A is number of anchors, H and W are height and width of the feature map. The data type must be float32. bbox_deltas(Variable): A 4-D Tensor with shape [N, 4*A, H, W] represents the difference between predicted box location and anchor location. The data type must be float32. im_info(Variable): A 2-D Tensor with shape [N, 3] represents origin image information for N batch. Height and width are the input sizes and scale is the ratio of network input size and original size. The data type can be float32 or float64. anchors(Variable): A 4-D Tensor represents the anchors with a layout of [H, W, A, 4]. H and W are height and width of the feature map, num_anchors is the box count of each position. Each anchor is in (xmin, ymin, xmax, ymax) format an unnormalized. The data type must be float32. variances(Variable): A 4-D Tensor. The expanded variances of anchors with a layout of [H, W, num_priors, 4]. Each variance is in (xcenter, ycenter, w, h) format. The data type must be float32. pre_nms_top_n(float): Number of total bboxes to be kept per image before NMS. The data type must be float32. `6000` by default. post_nms_top_n(float): Number of total bboxes to be kept per image after NMS. The data type must be float32. `1000` by default. nms_thresh(float): Threshold in NMS. The data type must be float32. `0.5` by default. min_size(float): Remove predicted boxes with either height or width < min_size. The data type must be float32. `0.1` by default. eta(float): Apply in adaptive NMS, if adaptive `threshold > 0.5`, `adaptive_threshold = adaptive_threshold * eta` in each iteration. return_rois_num(bool): When setting True, it will return a 1D Tensor with shape [N, ] that includes Rois's num of each image in one batch. The N is the image's num. For example, the tensor has values [4,5] that represents the first image has 4 Rois, the second image has 5 Rois. It only used in rcnn model. 'False' by default. name(str, optional): For detailed information, please refer to :ref:`api_guide_Name`. Usually name is no need to set and None by default. Returns: tuple: A tuple with format ``(rpn_rois, rpn_roi_probs)``. - **rpn_rois**: The generated RoIs. 2-D Tensor with shape ``[N, 4]`` while ``N`` is the number of RoIs. The data type is the same as ``scores``. - **rpn_roi_probs**: The scores of generated RoIs. 2-D Tensor with shape ``[N, 1]`` while ``N`` is the number of RoIs. The data type is the same as ``scores``. Examples: .. code-block:: python import paddle.fluid as fluid import paddle paddle.enable_static() scores = fluid.data(name='scores', shape=[None, 4, 5, 5], dtype='float32') bbox_deltas = fluid.data(name='bbox_deltas', shape=[None, 16, 5, 5], dtype='float32') im_info = fluid.data(name='im_info', shape=[None, 3], dtype='float32') anchors = fluid.data(name='anchors', shape=[None, 5, 4, 4], dtype='float32') variances = fluid.data(name='variances', shape=[None, 5, 10, 4], dtype='float32') rois, roi_probs = fluid.layers.generate_proposals(scores, bbox_deltas, im_info, anchors, variances) """ return paddle.vision.ops.generate_proposals( scores=scores, bbox_deltas=bbox_deltas, img_size=im_info[:2], anchors=anchors, variances=variances, pre_nms_top_n=pre_nms_top_n, post_nms_top_n=post_nms_top_n, nms_thresh=nms_thresh, min_size=min_size, eta=eta, return_rois_num=return_rois_num, name=name, ) def box_clip(input, im_info, name=None): """ Clip the box into the size given by im_info For each input box, The formula is given as follows: .. code-block:: text xmin = max(min(xmin, im_w - 1), 0) ymin = max(min(ymin, im_h - 1), 0) xmax = max(min(xmax, im_w - 1), 0) ymax = max(min(ymax, im_h - 1), 0) where im_w and im_h are computed from im_info: .. code-block:: text im_h = round(height / scale) im_w = round(weight / scale) Args: input(Variable): The input Tensor with shape :math:`[N_1, N_2, ..., N_k, 4]`, the last dimension is 4 and data type is float32 or float64. im_info(Variable): The 2-D Tensor with shape [N, 3] with layout (height, width, scale) representing the information of image. Height and width are the input sizes and scale is the ratio of network input size and original size. The data type is float32 or float64. name(str, optional): For detailed information, please refer to :ref:`api_guide_Name`. Usually name is no need to set and None by default. Returns: Variable: output(Variable): The clipped tensor with data type float32 or float64. The shape is same as input. Examples: .. code-block:: python import paddle.fluid as fluid import paddle paddle.enable_static() boxes = fluid.data( name='boxes', shape=[None, 8, 4], dtype='float32', lod_level=1) im_info = fluid.data(name='im_info', shape=[-1 ,3]) out = fluid.layers.box_clip( input=boxes, im_info=im_info) """ check_variable_and_dtype(input, 'input', ['float32', 'float64'], 'box_clip') check_variable_and_dtype( im_info, 'im_info', ['float32', 'float64'], 'box_clip' ) helper = LayerHelper("box_clip", **locals()) output = helper.create_variable_for_type_inference(dtype=input.dtype) inputs = {"Input": input, "ImInfo": im_info} helper.append_op(type="box_clip", inputs=inputs, outputs={"Output": output}) return output def retinanet_detection_output( bboxes, scores, anchors, im_info, score_threshold=0.05, nms_top_k=1000, keep_top_k=100, nms_threshold=0.3, nms_eta=1.0, ): """ **Detection Output Layer for the detector RetinaNet.** In the detector `RetinaNet <https://arxiv.org/abs/1708.02002>`_ , many `FPN <https://arxiv.org/abs/1612.03144>`_ levels output the category and location predictions, this OP is to get the detection results by performing following steps: 1. For each FPN level, decode box predictions according to the anchor boxes from at most :attr:`nms_top_k` top-scoring predictions after thresholding detector confidence at :attr:`score_threshold`. 2. Merge top predictions from all levels and apply multi-class non maximum suppression (NMS) on them to get the final detections. Args: bboxes(List): A list of Tensors from multiple FPN levels represents the location prediction for all anchor boxes. Each element is a 3-D Tensor with shape :math:`[N, Mi, 4]`, :math:`N` is the batch size, :math:`Mi` is the number of bounding boxes from :math:`i`-th FPN level and each bounding box has four coordinate values and the layout is [xmin, ymin, xmax, ymax]. The data type of each element is float32 or float64. scores(List): A list of Tensors from multiple FPN levels represents the category prediction for all anchor boxes. Each element is a 3-D Tensor with shape :math:`[N, Mi, C]`, :math:`N` is the batch size, :math:`C` is the class number (**excluding background**), :math:`Mi` is the number of bounding boxes from :math:`i`-th FPN level. The data type of each element is float32 or float64. anchors(List): A list of Tensors from multiple FPN levels represents the locations of all anchor boxes. Each element is a 2-D Tensor with shape :math:`[Mi, 4]`, :math:`Mi` is the number of bounding boxes from :math:`i`-th FPN level, and each bounding box has four coordinate values and the layout is [xmin, ymin, xmax, ymax]. The data type of each element is float32 or float64. im_info(Variable): A 2-D Tensor with shape :math:`[N, 3]` represents the size information of input images. :math:`N` is the batch size, the size information of each image is a 3-vector which are the height and width of the network input along with the factor scaling the origin image to the network input. The data type of :attr:`im_info` is float32. score_threshold(float): Threshold to filter out bounding boxes with a confidence score before NMS, default value is set to 0.05. nms_top_k(int): Maximum number of detections per FPN layer to be kept according to the confidences before NMS, default value is set to 1000. keep_top_k(int): Number of total bounding boxes to be kept per image after NMS step. Default value is set to 100, -1 means keeping all bounding boxes after NMS step. nms_threshold(float): The Intersection-over-Union(IoU) threshold used to filter out boxes in NMS. nms_eta(float): The parameter for adjusting :attr:`nms_threshold` in NMS. Default value is set to 1., which represents the value of :attr:`nms_threshold` keep the same in NMS. If :attr:`nms_eta` is set to be lower than 1. and the value of :attr:`nms_threshold` is set to be higher than 0.5, everytime a bounding box is filtered out, the adjustment for :attr:`nms_threshold` like :attr:`nms_threshold` = :attr:`nms_threshold` * :attr:`nms_eta` will not be stopped until the actual value of :attr:`nms_threshold` is lower than or equal to 0.5. **Notice**: In some cases where the image sizes are very small, it's possible that there is no detection if :attr:`score_threshold` are used at all levels. Hence, this OP do not filter out anchors from the highest FPN level before NMS. And the last element in :attr:`bboxes`:, :attr:`scores` and :attr:`anchors` is required to be from the highest FPN level. Returns: Variable(The data type is float32 or float64): The detection output is a 1-level LoDTensor with shape :math:`[No, 6]`. Each row has six values: [label, confidence, xmin, ymin, xmax, ymax]. :math:`No` is the total number of detections in this mini-batch. The :math:`i`-th image has `LoD[i + 1] - LoD[i]` detected results, if `LoD[i + 1] - LoD[i]` is 0, the :math:`i`-th image has no detected results. If all images have no detected results, LoD will be set to 0, and the output tensor is empty (None). Examples: .. code-block:: python import paddle.fluid as fluid bboxes_low = fluid.data( name='bboxes_low', shape=[1, 44, 4], dtype='float32') bboxes_high = fluid.data( name='bboxes_high', shape=[1, 11, 4], dtype='float32') scores_low = fluid.data( name='scores_low', shape=[1, 44, 10], dtype='float32') scores_high = fluid.data( name='scores_high', shape=[1, 11, 10], dtype='float32') anchors_low = fluid.data( name='anchors_low', shape=[44, 4], dtype='float32') anchors_high = fluid.data( name='anchors_high', shape=[11, 4], dtype='float32') im_info = fluid.data( name="im_info", shape=[1, 3], dtype='float32') nmsed_outs = fluid.layers.retinanet_detection_output( bboxes=[bboxes_low, bboxes_high], scores=[scores_low, scores_high], anchors=[anchors_low, anchors_high], im_info=im_info, score_threshold=0.05, nms_top_k=1000, keep_top_k=100, nms_threshold=0.45, nms_eta=1.0) """ check_type(bboxes, 'bboxes', (list), 'retinanet_detection_output') for i, bbox in enumerate(bboxes): check_variable_and_dtype( bbox, 'bbox{}'.format(i), ['float32', 'float64'], 'retinanet_detection_output', ) check_type(scores, 'scores', (list), 'retinanet_detection_output') for i, score in enumerate(scores): check_variable_and_dtype( score, 'score{}'.format(i), ['float32', 'float64'], 'retinanet_detection_output', ) check_type(anchors, 'anchors', (list), 'retinanet_detection_output') for i, anchor in enumerate(anchors): check_variable_and_dtype( anchor, 'anchor{}'.format(i), ['float32', 'float64'], 'retinanet_detection_output', ) check_variable_and_dtype( im_info, 'im_info', ['float32', 'float64'], 'retinanet_detection_output' ) helper = LayerHelper('retinanet_detection_output', **locals()) output = helper.create_variable_for_type_inference( dtype=helper.input_dtype('scores') ) helper.append_op( type="retinanet_detection_output", inputs={ 'BBoxes': bboxes, 'Scores': scores, 'Anchors': anchors, 'ImInfo': im_info, }, attrs={ 'score_threshold': score_threshold, 'nms_top_k': nms_top_k, 'nms_threshold': nms_threshold, 'keep_top_k': keep_top_k, 'nms_eta': 1.0, }, outputs={'Out': output}, ) output.stop_gradient = True return output def multiclass_nms( bboxes, scores, score_threshold, nms_top_k, keep_top_k, nms_threshold=0.3, normalized=True, nms_eta=1.0, background_label=0, name=None, ): """ **Multiclass NMS** This operator is to do multi-class non maximum suppression (NMS) on boxes and scores. In the NMS step, this operator greedily selects a subset of detection bounding boxes that have high scores larger than score_threshold, if providing this threshold, then selects the largest nms_top_k confidences scores if nms_top_k is larger than -1. Then this operator pruns away boxes that have high IOU (intersection over union) overlap with already selected boxes by adaptive threshold NMS based on parameters of nms_threshold and nms_eta. Aftern NMS step, at most keep_top_k number of total bboxes are to be kept per image if keep_top_k is larger than -1. See below for an example: .. code-block:: text if: box1.data = (2.0, 3.0, 7.0, 5.0) format is (xmin, ymin, xmax, ymax) box1.scores = (0.7, 0.2, 0.4) which is (label0.score=0.7, label1.score=0.2, label2.cores=0.4) box2.data = (3.0, 4.0, 8.0, 5.0) box2.score = (0.3, 0.3, 0.1) nms_threshold = 0.3 background_label = 0 score_threshold = 0 Then: iou = 4/11 > 0.3 out.data = [[1, 0.3, 3.0, 4.0, 8.0, 5.0], [2, 0.4, 2.0, 3.0, 7.0, 5.0]] Out format is (label, confidence, xmin, ymin, xmax, ymax) Args: bboxes (Variable): Two types of bboxes are supported: 1. (Tensor) A 3-D Tensor with shape [N, M, 4 or 8 16 24 32] represents the predicted locations of M bounding bboxes, N is the batch size. Each bounding box has four coordinate values and the layout is [xmin, ymin, xmax, ymax], when box size equals to 4. The data type is float32 or float64. 2. (LoDTensor) A 3-D Tensor with shape [M, C, 4] M is the number of bounding boxes, C is the class number. The data type is float32 or float64. scores (Variable): Two types of scores are supported: 1. (Tensor) A 3-D Tensor with shape [N, C, M] represents the predicted confidence predictions. N is the batch size, C is the class number, M is number of bounding boxes. For each category there are total M scores which corresponding M bounding boxes. Please note, M is equal to the 2nd dimension of BBoxes.The data type is float32 or float64. 2. (LoDTensor) A 2-D LoDTensor with shape [M, C]. M is the number of bbox, C is the class number. In this case, input BBoxes should be the second case with shape [M, C, 4].The data type is float32 or float64. background_label (int): The index of background label, the background label will be ignored. If set to -1, then all categories will be considered. Default: 0 score_threshold (float): Threshold to filter out bounding boxes with low confidence score. If not provided, consider all boxes. nms_top_k (int): Maximum number of detections to be kept according to the confidences after the filtering detections based on score_threshold. nms_threshold (float): The threshold to be used in NMS. Default: 0.3 nms_eta (float): The threshold to be used in NMS. Default: 1.0 keep_top_k (int): Number of total bboxes to be kept per image after NMS step. -1 means keeping all bboxes after NMS step. normalized (bool): Whether detections are normalized. Default: True name(str): Name of the multiclass nms op. Default: None. Returns: Variable: A 2-D LoDTensor with shape [No, 6] represents the detections. Each row has 6 values: [label, confidence, xmin, ymin, xmax, ymax] or A 2-D LoDTensor with shape [No, 10] represents the detections. Each row has 10 values: [label, confidence, x1, y1, x2, y2, x3, y3, x4, y4]. No is the total number of detections. If there is no detected boxes for all images, lod will be set to {1} and Out only contains one value which is -1. (After version 1.3, when no boxes detected, the lod is changed from {0} to {1}) Examples: .. code-block:: python import paddle.fluid as fluid import paddle paddle.enable_static() boxes = fluid.data(name='bboxes', shape=[None,81, 4], dtype='float32', lod_level=1) scores = fluid.data(name='scores', shape=[None,81], dtype='float32', lod_level=1) out = fluid.layers.multiclass_nms(bboxes=boxes, scores=scores, background_label=0, score_threshold=0.5, nms_top_k=400, nms_threshold=0.3, keep_top_k=200, normalized=False) """ check_variable_and_dtype( bboxes, 'BBoxes', ['float32', 'float64'], 'multiclass_nms' ) check_variable_and_dtype( scores, 'Scores', ['float32', 'float64'], 'multiclass_nms' ) check_type(score_threshold, 'score_threshold', float, 'multicalss_nms') check_type(nms_top_k, 'nums_top_k', int, 'multiclass_nms') check_type(keep_top_k, 'keep_top_k', int, 'mutliclass_nms') check_type(nms_threshold, 'nms_threshold', float, 'multiclass_nms') check_type(normalized, 'normalized', bool, 'multiclass_nms') check_type(nms_eta, 'nms_eta', float, 'multiclass_nms') check_type(background_label, 'background_label', int, 'multiclass_nms') helper = LayerHelper('multiclass_nms', **locals()) output = helper.create_variable_for_type_inference(dtype=bboxes.dtype) helper.append_op( type="multiclass_nms", inputs={'BBoxes': bboxes, 'Scores': scores}, attrs={ 'background_label': background_label, 'score_threshold': score_threshold, 'nms_top_k': nms_top_k, 'nms_threshold': nms_threshold, 'nms_eta': nms_eta, 'keep_top_k': keep_top_k, 'normalized': normalized, }, outputs={'Out': output}, ) output.stop_gradient = True return output def locality_aware_nms( bboxes, scores, score_threshold, nms_top_k, keep_top_k, nms_threshold=0.3, normalized=True, nms_eta=1.0, background_label=-1, name=None, ): """ **Local Aware NMS** `Local Aware NMS <https://arxiv.org/abs/1704.03155>`_ is to do locality-aware non maximum suppression (LANMS) on boxes and scores. Firstly, this operator merge box and score according their IOU (intersection over union). In the NMS step, this operator greedily selects a subset of detection bounding boxes that have high scores larger than score_threshold, if providing this threshold, then selects the largest nms_top_k confidences scores if nms_top_k is larger than -1. Then this operator pruns away boxes that have high IOU overlap with already selected boxes by adaptive threshold NMS based on parameters of nms_threshold and nms_eta. Aftern NMS step, at most keep_top_k number of total bboxes are to be kept per image if keep_top_k is larger than -1. Args: bboxes (Variable): A 3-D Tensor with shape [N, M, 4 or 8 16 24 32] represents the predicted locations of M bounding bboxes, N is the batch size. Each bounding box has four coordinate values and the layout is [xmin, ymin, xmax, ymax], when box size equals to 4. The data type is float32 or float64. scores (Variable): A 3-D Tensor with shape [N, C, M] represents the predicted confidence predictions. N is the batch size, C is the class number, M is number of bounding boxes. Now only support 1 class. For each category there are total M scores which corresponding M bounding boxes. Please note, M is equal to the 2nd dimension of BBoxes. The data type is float32 or float64. background_label (int): The index of background label, the background label will be ignored. If set to -1, then all categories will be considered. Default: -1 score_threshold (float): Threshold to filter out bounding boxes with low confidence score. If not provided, consider all boxes. nms_top_k (int): Maximum number of detections to be kept according to the confidences after the filtering detections based on score_threshold. keep_top_k (int): Number of total bboxes to be kept per image after NMS step. -1 means keeping all bboxes after NMS step. nms_threshold (float): The threshold to be used in NMS. Default: 0.3 nms_eta (float): The threshold to be used in NMS. Default: 1.0 normalized (bool): Whether detections are normalized. Default: True name(str): Name of the locality aware nms op, please refer to :ref:`api_guide_Name` . Default: None. Returns: Variable: A 2-D LoDTensor with shape [No, 6] represents the detections. Each row has 6 values: [label, confidence, xmin, ymin, xmax, ymax] or A 2-D LoDTensor with shape [No, 10] represents the detections. Each row has 10 values: [label, confidence, x1, y1, x2, y2, x3, y3, x4, y4]. No is the total number of detections. If there is no detected boxes for all images, lod will be set to {1} and Out only contains one value which is -1. (After version 1.3, when no boxes detected, the lod is changed from {0} to {1}). The data type is float32 or float64. Examples: .. code-block:: python import paddle.fluid as fluid boxes = fluid.data(name='bboxes', shape=[None, 81, 8], dtype='float32') scores = fluid.data(name='scores', shape=[None, 1, 81], dtype='float32') out = fluid.layers.locality_aware_nms(bboxes=boxes, scores=scores, score_threshold=0.5, nms_top_k=400, nms_threshold=0.3, keep_top_k=200, normalized=False) """ check_variable_and_dtype( bboxes, 'bboxes', ['float32', 'float64'], 'locality_aware_nms' ) check_variable_and_dtype( scores, 'scores', ['float32', 'float64'], 'locality_aware_nms' ) check_type(background_label, 'background_label', int, 'locality_aware_nms') check_type(score_threshold, 'score_threshold', float, 'locality_aware_nms') check_type(nms_top_k, 'nms_top_k', int, 'locality_aware_nms') check_type(nms_eta, 'nms_eta', float, 'locality_aware_nms') check_type(nms_threshold, 'nms_threshold', float, 'locality_aware_nms') check_type(keep_top_k, 'keep_top_k', int, 'locality_aware_nms') check_type(normalized, 'normalized', bool, 'locality_aware_nms') shape = scores.shape assert len(shape) == 3, "dim size of scores must be 3" assert ( shape[1] == 1 ), "locality_aware_nms only support one class, Tensor score shape must be [N, 1, M]" helper = LayerHelper('locality_aware_nms', **locals()) output = helper.create_variable_for_type_inference(dtype=bboxes.dtype) out = {'Out': output} helper.append_op( type="locality_aware_nms", inputs={'BBoxes': bboxes, 'Scores': scores}, attrs={ 'background_label': background_label, 'score_threshold': score_threshold, 'nms_top_k': nms_top_k, 'nms_threshold': nms_threshold, 'nms_eta': nms_eta, 'keep_top_k': keep_top_k, 'nms_eta': nms_eta, 'normalized': normalized, }, outputs={'Out': output}, ) output.stop_gradient = True return output def matrix_nms( bboxes, scores, score_threshold, post_threshold, nms_top_k, keep_top_k, use_gaussian=False, gaussian_sigma=2.0, background_label=0, normalized=True, return_index=False, name=None, ): """ **Matrix NMS** This operator does matrix non maximum suppression (NMS). First selects a subset of candidate bounding boxes that have higher scores than score_threshold (if provided), then the top k candidate is selected if nms_top_k is larger than -1. Score of the remaining candidate are then decayed according to the Matrix NMS scheme. Aftern NMS step, at most keep_top_k number of total bboxes are to be kept per image if keep_top_k is larger than -1. Args: bboxes (Variable): A 3-D Tensor with shape [N, M, 4] represents the predicted locations of M bounding bboxes, N is the batch size. Each bounding box has four coordinate values and the layout is [xmin, ymin, xmax, ymax], when box size equals to 4. The data type is float32 or float64. scores (Variable): A 3-D Tensor with shape [N, C, M] represents the predicted confidence predictions. N is the batch size, C is the class number, M is number of bounding boxes. For each category there are total M scores which corresponding M bounding boxes. Please note, M is equal to the 2nd dimension of BBoxes. The data type is float32 or float64. score_threshold (float): Threshold to filter out bounding boxes with low confidence score. post_threshold (float): Threshold to filter out bounding boxes with low confidence score AFTER decaying. nms_top_k (int): Maximum number of detections to be kept according to the confidences after the filtering detections based on score_threshold. keep_top_k (int): Number of total bboxes to be kept per image after NMS step. -1 means keeping all bboxes after NMS step. use_gaussian (bool): Use Gaussian as the decay function. Default: False gaussian_sigma (float): Sigma for Gaussian decay function. Default: 2.0 background_label (int): The index of background label, the background label will be ignored. If set to -1, then all categories will be considered. Default: 0 normalized (bool): Whether detections are normalized. Default: True return_index(bool): Whether return selected index. Default: False name(str): Name of the matrix nms op. Default: None. Returns: A tuple with two Variables: (Out, Index) if return_index is True, otherwise, one Variable(Out) is returned. Out (Variable): A 2-D LoDTensor with shape [No, 6] containing the detection results. Each row has 6 values: [label, confidence, xmin, ymin, xmax, ymax] (After version 1.3, when no boxes detected, the lod is changed from {0} to {1}) Index (Variable): A 2-D LoDTensor with shape [No, 1] containing the selected indices, which are absolute values cross batches. Examples: .. code-block:: python import paddle.fluid as fluid boxes = fluid.data(name='bboxes', shape=[None,81, 4], dtype='float32', lod_level=1) scores = fluid.data(name='scores', shape=[None,81], dtype='float32', lod_level=1) out = fluid.layers.matrix_nms(bboxes=boxes, scores=scores, background_label=0, score_threshold=0.5, post_threshold=0.1, nms_top_k=400, keep_top_k=200, normalized=False) """ if in_dygraph_mode(): attrs = ( score_threshold, nms_top_k, keep_top_k, post_threshold, use_gaussian, gaussian_sigma, background_label, normalized, ) out, index = _C_ops.matrix_nms(bboxes, scores, *attrs) if return_index: return out, index else: return out check_variable_and_dtype( bboxes, 'BBoxes', ['float32', 'float64'], 'matrix_nms' ) check_variable_and_dtype( scores, 'Scores', ['float32', 'float64'], 'matrix_nms' ) check_type(score_threshold, 'score_threshold', float, 'matrix_nms') check_type(post_threshold, 'post_threshold', float, 'matrix_nms') check_type(nms_top_k, 'nums_top_k', int, 'matrix_nms') check_type(keep_top_k, 'keep_top_k', int, 'matrix_nms') check_type(normalized, 'normalized', bool, 'matrix_nms') check_type(use_gaussian, 'use_gaussian', bool, 'matrix_nms') check_type(gaussian_sigma, 'gaussian_sigma', float, 'matrix_nms') check_type(background_label, 'background_label', int, 'matrix_nms') helper = LayerHelper('matrix_nms', **locals()) output = helper.create_variable_for_type_inference(dtype=bboxes.dtype) index = helper.create_variable_for_type_inference(dtype='int') helper.append_op( type="matrix_nms", inputs={'BBoxes': bboxes, 'Scores': scores}, attrs={ 'score_threshold': score_threshold, 'post_threshold': post_threshold, 'nms_top_k': nms_top_k, 'keep_top_k': keep_top_k, 'use_gaussian': use_gaussian, 'gaussian_sigma': gaussian_sigma, 'background_label': background_label, 'normalized': normalized, }, outputs={'Out': output, 'Index': index}, ) output.stop_gradient = True if return_index: return output, index else: return output def distribute_fpn_proposals( fpn_rois, min_level, max_level, refer_level, refer_scale, rois_num=None, name=None, ): r""" **This op only takes LoDTensor as input.** In Feature Pyramid Networks (FPN) models, it is needed to distribute all proposals into different FPN level, with respect to scale of the proposals, the referring scale and the referring level. Besides, to restore the order of proposals, we return an array which indicates the original index of rois in current proposals. To compute FPN level for each roi, the formula is given as follows: .. math:: roi\_scale &= \sqrt{BBoxArea(fpn\_roi)} level = floor(&\log(\\frac{roi\_scale}{refer\_scale}) + refer\_level) where BBoxArea is a function to compute the area of each roi. Args: fpn_rois(Variable): 2-D Tensor with shape [N, 4] and data type is float32 or float64. The input fpn_rois. min_level(int32): The lowest level of FPN layer where the proposals come from. max_level(int32): The highest level of FPN layer where the proposals come from. refer_level(int32): The referring level of FPN layer with specified scale. refer_scale(int32): The referring scale of FPN layer with specified level. rois_num(Tensor): 1-D Tensor contains the number of RoIs in each image. The shape is [B] and data type is int32. B is the number of images. If it is not None then return a list of 1-D Tensor. Each element is the output RoIs' number of each image on the corresponding level and the shape is [B]. None by default. name(str, optional): For detailed information, please refer to :ref:`api_guide_Name`. Usually name is no need to set and None by default. Returns: Tuple: multi_rois(List) : A list of 2-D LoDTensor with shape [M, 4] and data type of float32 and float64. The length is max_level-min_level+1. The proposals in each FPN level. restore_ind(Variable): A 2-D Tensor with shape [N, 1], N is the number of total rois. The data type is int32. It is used to restore the order of fpn_rois. rois_num_per_level(List): A list of 1-D Tensor and each Tensor is the RoIs' number in each image on the corresponding level. The shape is [B] and data type of int32. B is the number of images Examples: .. code-block:: python import paddle.fluid as fluid import paddle paddle.enable_static() fpn_rois = fluid.data( name='data', shape=[None, 4], dtype='float32', lod_level=1) multi_rois, restore_ind = fluid.layers.distribute_fpn_proposals( fpn_rois=fpn_rois, min_level=2, max_level=5, refer_level=4, refer_scale=224) """ return paddle.vision.ops.distribute_fpn_proposals( fpn_rois=fpn_rois, min_level=min_level, max_level=max_level, refer_level=refer_level, refer_scale=refer_scale, rois_num=rois_num, name=name, ) @templatedoc() def box_decoder_and_assign( prior_box, prior_box_var, target_box, box_score, box_clip, name=None ): """ ${comment} Args: prior_box(${prior_box_type}): ${prior_box_comment} prior_box_var(${prior_box_var_type}): ${prior_box_var_comment} target_box(${target_box_type}): ${target_box_comment} box_score(${box_score_type}): ${box_score_comment} box_clip(${box_clip_type}): ${box_clip_comment} name(str, optional): For detailed information, please refer to :ref:`api_guide_Name`. Usually name is no need to set and None by default. Returns: Tuple: decode_box(${decode_box_type}): ${decode_box_comment} output_assign_box(${output_assign_box_type}): ${output_assign_box_comment} Examples: .. code-block:: python import paddle.fluid as fluid import paddle paddle.enable_static() pb = fluid.data( name='prior_box', shape=[None, 4], dtype='float32') pbv = fluid.data( name='prior_box_var', shape=[4], dtype='float32') loc = fluid.data( name='target_box', shape=[None, 4*81], dtype='float32') scores = fluid.data( name='scores', shape=[None, 81], dtype='float32') decoded_box, output_assign_box = fluid.layers.box_decoder_and_assign( pb, pbv, loc, scores, 4.135) """ check_variable_and_dtype( prior_box, 'prior_box', ['float32', 'float64'], 'box_decoder_and_assign' ) check_variable_and_dtype( target_box, 'target_box', ['float32', 'float64'], 'box_decoder_and_assign', ) check_variable_and_dtype( box_score, 'box_score', ['float32', 'float64'], 'box_decoder_and_assign' ) helper = LayerHelper("box_decoder_and_assign", **locals()) decoded_box = helper.create_variable_for_type_inference( dtype=prior_box.dtype ) output_assign_box = helper.create_variable_for_type_inference( dtype=prior_box.dtype ) helper.append_op( type="box_decoder_and_assign", inputs={ "PriorBox": prior_box, "PriorBoxVar": prior_box_var, "TargetBox": target_box, "BoxScore": box_score, }, attrs={"box_clip": box_clip}, outputs={ "DecodeBox": decoded_box, "OutputAssignBox": output_assign_box, }, ) return decoded_box, output_assign_box def collect_fpn_proposals( multi_rois, multi_scores, min_level, max_level, post_nms_top_n, rois_num_per_level=None, name=None, ): """ **This OP only supports LoDTensor as input**. Concat multi-level RoIs (Region of Interest) and select N RoIs with respect to multi_scores. This operation performs the following steps: 1. Choose num_level RoIs and scores as input: num_level = max_level - min_level 2. Concat multi-level RoIs and scores 3. Sort scores and select post_nms_top_n scores 4. Gather RoIs by selected indices from scores 5. Re-sort RoIs by corresponding batch_id Args: multi_rois(list): List of RoIs to collect. Element in list is 2-D LoDTensor with shape [N, 4] and data type is float32 or float64, N is the number of RoIs. multi_scores(list): List of scores of RoIs to collect. Element in list is 2-D LoDTensor with shape [N, 1] and data type is float32 or float64, N is the number of RoIs. min_level(int): The lowest level of FPN layer to collect max_level(int): The highest level of FPN layer to collect post_nms_top_n(int): The number of selected RoIs rois_num_per_level(list, optional): The List of RoIs' numbers. Each element is 1-D Tensor which contains the RoIs' number of each image on each level and the shape is [B] and data type is int32, B is the number of images. If it is not None then return a 1-D Tensor contains the output RoIs' number of each image and the shape is [B]. Default: None name(str, optional): For detailed information, please refer to :ref:`api_guide_Name`. Usually name is no need to set and None by default. Returns: Variable: fpn_rois(Variable): 2-D LoDTensor with shape [N, 4] and data type is float32 or float64. Selected RoIs. rois_num(Tensor): 1-D Tensor contains the RoIs's number of each image. The shape is [B] and data type is int32. B is the number of images. Examples: .. code-block:: python import paddle.fluid as fluid import paddle paddle.enable_static() multi_rois = [] multi_scores = [] for i in range(4): multi_rois.append(fluid.data( name='roi_'+str(i), shape=[None, 4], dtype='float32', lod_level=1)) for i in range(4): multi_scores.append(fluid.data( name='score_'+str(i), shape=[None, 1], dtype='float32', lod_level=1)) fpn_rois = fluid.layers.collect_fpn_proposals( multi_rois=multi_rois, multi_scores=multi_scores, min_level=2, max_level=5, post_nms_top_n=2000) """ num_lvl = max_level - min_level + 1 input_rois = multi_rois[:num_lvl] input_scores = multi_scores[:num_lvl] if _non_static_mode(): assert ( rois_num_per_level is not None ), "rois_num_per_level should not be None in dygraph mode." attrs = ('post_nms_topN', post_nms_top_n) output_rois, rois_num = _legacy_C_ops.collect_fpn_proposals( input_rois, input_scores, rois_num_per_level, *attrs ) check_type(multi_rois, 'multi_rois', list, 'collect_fpn_proposals') check_type(multi_scores, 'multi_scores', list, 'collect_fpn_proposals') helper = LayerHelper('collect_fpn_proposals', **locals()) dtype = helper.input_dtype('multi_rois') check_dtype( dtype, 'multi_rois', ['float32', 'float64'], 'collect_fpn_proposals' ) output_rois = helper.create_variable_for_type_inference(dtype) output_rois.stop_gradient = True inputs = { 'MultiLevelRois': input_rois, 'MultiLevelScores': input_scores, } outputs = {'FpnRois': output_rois} if rois_num_per_level is not None: inputs['MultiLevelRoIsNum'] = rois_num_per_level rois_num = helper.create_variable_for_type_inference(dtype='int32') rois_num.stop_gradient = True outputs['RoisNum'] = rois_num helper.append_op( type='collect_fpn_proposals', inputs=inputs, outputs=outputs, attrs={'post_nms_topN': post_nms_top_n}, ) if rois_num_per_level is not None: return output_rois, rois_num return output_rois
{ "content_hash": "19f89758992dde3603ed903fd5071a6e", "timestamp": "", "source": "github", "line_count": 4087, "max_line_length": 224, "avg_line_length": 41.393931979447025, "alnum_prop": 0.5962394415316503, "repo_name": "luotao1/Paddle", "id": "b7a3b2aba9c882f915897a53f29ccceffe48b041", "size": "169786", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "python/paddle/fluid/layers/detection.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "58544" }, { "name": "C", "bytes": "210300" }, { "name": "C++", "bytes": "36771446" }, { "name": "CMake", "bytes": "903079" }, { "name": "Cuda", "bytes": "5200715" }, { "name": "Dockerfile", "bytes": "4361" }, { "name": "Go", "bytes": "49796" }, { "name": "Java", "bytes": "16630" }, { "name": "Jinja", "bytes": "23852" }, { "name": "MLIR", "bytes": "39982" }, { "name": "Python", "bytes": "36248258" }, { "name": "R", "bytes": "1332" }, { "name": "Shell", "bytes": "553175" } ], "symlink_target": "" }
/** * @file f_bent_cigar.c * @brief Implementation of the bent cigar function and problem. */ #include <stdio.h> #include <assert.h> #include "coco.h" #include "coco_problem.c" #include "suite_bbob_legacy_code.c" #include "transform_obj_shift.c" #include "transform_vars_affine.c" #include "transform_vars_asymmetric.c" #include "transform_vars_shift.c" /** * @brief Implements the bent cigar function without connections to any COCO structures. */ static double f_bent_cigar_raw(const double *x, const size_t number_of_variables) { static const double condition = 1.0e6; size_t i; double result; result = x[0] * x[0]; for (i = 1; i < number_of_variables; ++i) { result += condition * x[i] * x[i]; } return result; } /** * @brief Uses the raw function to evaluate the COCO problem. */ static void f_bent_cigar_evaluate(coco_problem_t *problem, const double *x, double *y) { assert(problem->number_of_objectives == 1); y[0] = f_bent_cigar_raw(x, problem->number_of_variables); assert(y[0] + 1e-13 >= problem->best_value[0]); } /** * @brief Allocates the basic bent cigar problem. */ static coco_problem_t *f_bent_cigar_allocate(const size_t number_of_variables) { coco_problem_t *problem = coco_problem_allocate_from_scalars("bent cigar function", f_bent_cigar_evaluate, NULL, number_of_variables, -5.0, 5.0, 0.0); coco_problem_set_id(problem, "%s_d%02lu", "bent_cigar", number_of_variables); /* Compute best solution */ f_bent_cigar_evaluate(problem, problem->best_parameter, problem->best_value); return problem; } /** * @brief Creates the BBOB bent cigar problem. */ static coco_problem_t *f_bent_cigar_bbob_problem_allocate(const size_t function, const size_t dimension, const size_t instance, const long rseed, const char *problem_id_template, const char *problem_name_template) { double *xopt, fopt; coco_problem_t *problem = NULL; double *M = coco_allocate_vector(dimension * dimension); double *b = coco_allocate_vector(dimension); double **rot1; xopt = coco_allocate_vector(dimension); fopt = bbob2009_compute_fopt(function, instance); bbob2009_compute_xopt(xopt, rseed + 1000000, dimension); rot1 = bbob2009_allocate_matrix(dimension, dimension); bbob2009_compute_rotation(rot1, rseed + 1000000, dimension); bbob2009_copy_rotation_matrix(rot1, M, b, dimension); bbob2009_free_matrix(rot1, dimension); problem = f_bent_cigar_allocate(dimension); problem = transform_obj_shift(problem, fopt); problem = transform_vars_affine(problem, M, b, dimension); problem = transform_vars_asymmetric(problem, 0.5); problem = transform_vars_affine(problem, M, b, dimension); problem = transform_vars_shift(problem, xopt, 0); coco_problem_set_id(problem, problem_id_template, function, instance, dimension); coco_problem_set_name(problem, problem_name_template, function, instance, dimension); coco_problem_set_type(problem, "3-ill-conditioned"); coco_free_memory(M); coco_free_memory(b); coco_free_memory(xopt); return problem; }
{ "content_hash": "a4b9664e0865ae2d52085f000e4c53cd", "timestamp": "", "source": "github", "line_count": 97, "max_line_length": 94, "avg_line_length": 34.350515463917525, "alnum_prop": 0.6476590636254502, "repo_name": "oaelhara/numbbo", "id": "b1187e8d420853252e5cfcdd5d3b7ac72b317a2e", "size": "3332", "binary": false, "copies": "1", "ref": "refs/heads/development", "path": "code-experiments/src/f_bent_cigar.c", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Batchfile", "bytes": "738" }, { "name": "C", "bytes": "810419" }, { "name": "C++", "bytes": "51312" }, { "name": "Groff", "bytes": "14660" }, { "name": "HTML", "bytes": "190376" }, { "name": "Java", "bytes": "13954" }, { "name": "JavaScript", "bytes": "17374" }, { "name": "Makefile", "bytes": "457" }, { "name": "Matlab", "bytes": "63089" }, { "name": "Python", "bytes": "889491" }, { "name": "R", "bytes": "1465" }, { "name": "Shell", "bytes": "9502" }, { "name": "TeX", "bytes": "121264" } ], "symlink_target": "" }
#include "Client.h" #include <iostream> #include <string> #include "FileCopier.h" #include <chrono> #include <iomanip> Client::Client(){ recSizeInBytes = 0; initWinsock(); initSocket(); start(); } Client::~Client(){} int Client::initWinsock(){ if (WSAStartup(MAKEWORD(2, 2), &WsaData) != 0){ std::cout << "Winsock initialization failed!\n"; WSACleanup(); return 1; } return 0; } int Client::initSocket(){ Socket = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP); if (Socket == INVALID_SOCKET){ std::cout << "Socket creation failed!\n"; WSACleanup(); return 1; } std::cout << "Please enter new filename: "; std::cin >> newFileName; std::cout << "Please enter server's hostname or IP address: "; std::string s; std::cin >> s; char const* hostName = s.c_str(); if ((host = gethostbyname(hostName)) == NULL){ std::cout << "Failed to resolve hostname!\n"; WSACleanup(); return 2; } std::cout << "Please enter server's port number: "; int portNum; std::cin >> portNum; SockAddress.sin_port = htons(portNum); SockAddress.sin_family = AF_INET; SockAddress.sin_addr.s_addr = *((unsigned long*)host->h_addr); if (connect(Socket, (SOCKADDR*)(&SockAddress), sizeof(SockAddress)) != 0){ std::cout << "Failed to connect with server!\n"; WSACleanup(); return 3; } std::cout << "\n\nSuccessfuly connected to server at: " << hostName << " : " << portNum << "\n"; return 0; } void Client::start(){ receiveData(); close(); } void Client::close(){ shutdown(Socket, SD_SEND); closesocket(Socket); WSACleanup(); } uint32_t Client::getSizeInBytes(){ return recSizeInBytes; } std::vector<char> Client::getExtension(){ return eVec; } void Client::receiveData(){ /* STEPS 1. receive size of file in bytes 2. receive file extension length 3. receive actual extension as c-style string 4. receive actual binary data from file */ //1. receive size of file in bytes uint32_t fileSize = 0; uint32_t bytesReceived = 0; std::cout << "Receiving filesize... "; while (bytesReceived != sizeof(fileSize)){ bytesReceived = recv(Socket, (char*)(&fileSize), sizeof(fileSize), 0); if (bytesReceived == sizeof(fileSize)){ std::cout << "--->Filesize successfully received!\n"; } } //2. receive file extension length uint32_t extensionLength = 0; bytesReceived = 0;//reset variable for use with next recv std::cout << "Receiving file extension length... "; while (bytesReceived != sizeof(fileSize)){ bytesReceived = recv(Socket, (char*)(&extensionLength), sizeof(extensionLength), 0); if (bytesReceived == sizeof(extensionLength)){ std::cout << "--->File extension length successfully received!\n"; } } //3. receive actual extension as c-style string char* extension = new char[extensionLength]; bytesReceived = 0; std::cout << "Receiving file extension... "; while (bytesReceived != sizeof(extension)){ bytesReceived = recv(Socket, extension, sizeof(extension), 0); if (bytesReceived == sizeof(extension)){ std::cout << "--->File extension successfully received!\n"; extension[extensionLength] = '\0';//null terminating string if extension was successfully received } } //4. receive actual data in chunks FileCopier f; f.setOFileName(extension, newFileName); //setting up variables for a basic timer to keep track of megabytes per second auto start = std::chrono::high_resolution_clock::now(); auto end = std::chrono::high_resolution_clock::now(); uint32_t currentTime = 0; uint32_t currentBytes = 0; uint32_t oldBytes = 0; double MBPS;//will hold final result (megabytes per second) //receive one chunk at a time, and write to file uint32_t chunksReceived = 0; char* currentChunk = new char[f.getChunkSize()]; uint32_t lastChunkSize = fileSize - (f.getChunkSize() * (fileSize / f.getChunkSize())); uint32_t allBytesRec = 0; while (chunksReceived <= (fileSize / f.getChunkSize())){ uint32_t numBytesReceived = 0; uint32_t n; while (numBytesReceived < f.getChunkSize()){ n = recv(Socket, currentChunk + numBytesReceived, f.getChunkSize(), 0); if (n > 0 && n <= f.getChunkSize()){ numBytesReceived += n; allBytesRec += n; } } if (chunksReceived != (fileSize / f.getChunkSize())){ f.writeChunk(currentChunk, numBytesReceived); } else { f.writeChunk(currentChunk, lastChunkSize); } //if we get to this point, the complete chunk has been received and written, we can increment to next one! chunksReceived++; //update timer! end = std::chrono::high_resolution_clock::now(); auto timePassed = std::chrono::duration_cast<std::chrono::seconds>(end - start); //this block will save the amount of bytes downloaded since the last second into "oldBytes" if (currentTime != (int)timePassed.count()) { currentTime = (int)timePassed.count(); oldBytes = currentBytes; currentBytes = 0; } else { currentBytes += n; } MBPS = (double)oldBytes / (double)1000000;//find a ROUGHLY accurate estimation of MBPS //output current status of file transfer std::cout << "\rReceiving file..." << allBytesRec / 1000000 << "/" << fileSize / 1000000 << "MB" << "(" << std::fixed << std::setprecision(2) << MBPS << " MB/s)"; } delete[] currentChunk; std::cout << "--->File successfully received!\n"; }
{ "content_hash": "7649145634fb93f6d297e87b8afb5c11", "timestamp": "", "source": "github", "line_count": 186, "max_line_length": 165, "avg_line_length": 30.112903225806452, "alnum_prop": 0.6357793251205142, "repo_name": "FrankBotos/CPP-SocketsFileTransfer", "id": "4794409be2d3c8c7afc3b5031d6be3108bc2d316", "size": "5601", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Client.cpp", "mode": "33188", "license": "mit", "language": [ { "name": "C++", "bytes": "15689" } ], "symlink_target": "" }
package controllers; import java.util.Collection; import java.util.Map; import org.apache.commons.collections.map.HashedMap; import org.plannifico.PlannificoFactory; import org.plannifico.PlannificoFactoryProvider; import org.plannifico.data.PlanningSet; import org.plannifico.data.UniverseNotExistException; import org.plannifico.server.ActionNotPermittedException; import org.plannifico.server.PlanningEngine; import org.plannifico.server.ServerAlreadyRunningException; import org.plannifico.server.configuration.XMLBasedConfigurationManager; import org.plannifico.server.response.RecordsCollectionResponse; import com.fasterxml.jackson.databind.node.ObjectNode; import play.*; import play.libs.Json; import play.mvc.*; import play.data.*; import views.html.*; public class Application extends Controller { private static PlannificoFactory factory = PlannificoFactoryProvider.getInstance(); private static PlanningEngine engine = factory.getPlanningEngine(); private static String configurationFile = XMLBasedConfigurationManager.DEFAULT_CONFIGURATION_FILE; public static Result index() { Collection<String> universes = engine.getUniverses(); Map <String, Long> measureset_records_count = new HashedMap(); Map <String, Collection<String>> measuresets = new HashedMap(); for (String universe : universes) { Logger.debug ("Display universe: " + universe); Logger.debug ("Display measuresets: " + engine.getMeasureSetsNames (universe)); Collection <String> measureset_names = engine.getMeasureSetsNames (universe); measuresets.put(universe, engine.getMeasureSetsNames (universe)); } return ok (index.render ( engine.getStatus(), universes, measuresets )); } public static Result submitStart () { Logger.info("starting..."); try { if( engine.start (configurationFile) == 1) return ok (showresult.render ("Severe Error starting the server: read log for more information")); } catch (ServerAlreadyRunningException e) { return ok (showresult.render ("Failure: Server already running")); } Logger.info ("starting [DONE]"); return index(); } public static Result submitStop () { Logger.info("stopping..."); engine.stop(); return index(); } public static Result getMeasureSetCount () { Logger.info("get measureset count..."); DynamicForm bindedForm = Form.form().bindFromRequest(); if (engine.getStatus () != PlanningEngine.STARTED) return ok(showresult.render ("Error: Server is not started")); int result = engine.getMeasureSetsNumber (bindedForm.get("universe")); return ok (showresult.render ("Measure set count = " + result)); } public static Result getMeasureSetRecordCount () { Logger.info("get measureset count..."); DynamicForm bindedForm = Form.form().bindFromRequest(); Logger.info("universe: " + bindedForm.get("universe")); Logger.info("measureset: " + bindedForm.get("measureset")); if (engine.getStatus () != PlanningEngine.STARTED) return ok(showresult.render ("Error: Server is not started")); long result; try { result = engine.getMasureSetRecordsNumber (bindedForm.get("universe"), bindedForm.get("measureset")); return ok (showresult.render ("Measure set count = " + result)); } catch (ActionNotPermittedException | UniverseNotExistException e) { return ok (showresult.render ("Error = " + e.getMessage())); } } public static Result getAggregatedValue () { Logger.info("getAggregatedValue"); DynamicForm bindedForm = Form.form().bindFromRequest(); Logger.debug (bindedForm.get("universe")); return ok (showresult.render ("Aggregated")); } public static Result getDimRelationships (String universe, String dimension) { Logger.info("getDimRelationships (" + universe + ", " + dimension + ")"); Map<String, Collection<String>> rels = new HashedMap(); try { rels = engine.getAllDimensionRelationships(universe, dimension); Logger.debug ("getDimRelationships rels.size = " + rels.size()); } catch (UniverseNotExistException e) { Logger.warn ("Universe does not exist: " + universe); } return ok (showDimRelationships.render (universe, dimension, rels)); } public static Result showDataNavigator (/* String universe, String measureset*/) { DynamicForm bindedForm = Form.form().bindFromRequest(); String universe = bindedForm.get("universe"); String measureset = bindedForm.get("measureset"); Logger.info("measureset: " + bindedForm.get("measureset")); Map <String, Collection<String>> dimensions = new HashedMap(); Map <String, Collection<String>> measures = new HashedMap(); Logger.info("showDataNavigator (" + universe + ", " + measureset + ")"); try { dimensions.put(universe, engine.getPlanningDimensions (universe)); } catch (UniverseNotExistException e1) { Logger.warn ("Universe does not exist: " + universe); } try { measures.put(universe + measureset, engine.getMeasureSetsMeasureNames(universe, measureset)); } catch (UniverseNotExistException e) { Logger.warn ("Universe does not exist: " + universe); } return ok (datanavigator.render (universe, measureset, dimensions, measures)); } public static Result getDataset () { DynamicForm bindedForm = Form.form().bindFromRequest(); String universe = bindedForm.get("universe"); String measureset = bindedForm.get("measureset"); String measures = bindedForm.get("measures").replace(",", ";"); String filters = bindedForm.get("filters").replace(",", ";").replace(":", "="); String groupby = bindedForm.get("groupby").replace(",", ";"); Logger.debug ("getDataset (" + universe + ", " + measureset + ", " + measures + ", " + filters + ", " + groupby + ")"); ObjectNode result = Json.newObject(); PlanningSet dataset = null; try { dataset = engine.getDataSet (universe, measureset, measures, filters, groupby); Logger.debug ("dataset records.size = " + dataset.getData().size()); } catch (UniverseNotExistException e) { Logger.warn ("Universe does not exist: " + universe); } return ok (Json.toJson(new RecordsCollectionResponse (dataset.getData()))); } }
{ "content_hash": "09eb52cb580d6551d6f04a9b30605767", "timestamp": "", "source": "github", "line_count": 243, "max_line_length": 104, "avg_line_length": 27.090534979423868, "alnum_prop": 0.6700592435060003, "repo_name": "plannifico/plannifico-app", "id": "1f14ed86fbc1ae146197692d1fbef3ab8cda6373", "size": "6583", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/controllers/Application.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "13622" }, { "name": "Java", "bytes": "9334" }, { "name": "JavaScript", "bytes": "3364" } ], "symlink_target": "" }
SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
{ "content_hash": "fabe65002c587da29074434e65b4be0b", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 39, "avg_line_length": 10.23076923076923, "alnum_prop": 0.6917293233082706, "repo_name": "mdoering/backbone", "id": "6cbfa0216ba835f44b009e486afb381aedbb3f79", "size": "181", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Liliopsida/Poales/Bromeliaceae/Guzmania/Guzmania kalbreyeri/ Syn. Sodiroa kalbreyeri/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
/* * The IEEE hereby grants a general, royalty-free license to copy, distribute, * display and make derivative works from this material, for all purposes, * provided that any use of the material contains the following * attribution: "Reprinted with permission from IEEE 1516.1(TM)-2010". * Should you require additional information, contact the Manager, Standards * Intellectual Property, IEEE Standards Association ([email protected]). */ //File: AttributeRegionAssociation.java /** * Record stored in AttributeSetRegionSetPairList */ package hla.rti1516e; import java.io.Serializable; public final class AttributeRegionAssociation implements Serializable { public AttributeRegionAssociation(AttributeHandleSet ahs, RegionHandleSet rhs) { ahset = ahs; rhset = rhs; } public final AttributeHandleSet ahset; public final RegionHandleSet rhset; } //end AttributeRegionAssociation
{ "content_hash": "e88cd9b533680d3953a1adf2f9f95a49", "timestamp": "", "source": "github", "line_count": 32, "max_line_length": 81, "avg_line_length": 28.875, "alnum_prop": 0.7673160173160173, "repo_name": "MSG134/IVCT", "id": "0e6cd39d14626591afe1adee4b8351e87cfea8dd", "size": "924", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "IEEE1516e/src/main/java/hla/rti1516e/AttributeRegionAssociation.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "551726" } ], "symlink_target": "" }
require "spec_helper" describe Mongoid::Contextual::Mongo do [ :blank?, :empty? ].each do |method| describe "##{method}" do before do Band.create(name: "Depeche Mode") end context "when the count is zero" do let(:criteria) do Band.where(name: "New Order") end let(:context) do described_class.new(criteria) end it "returns true" do context.send(method).should be_true end end context "when the count is greater than zero" do let(:criteria) do Band.where(name: "Depeche Mode") end let(:context) do described_class.new(criteria) end it "returns false" do context.send(method).should be_false end end end end describe "#cached?" do context "when the criteria is cached" do let(:criteria) do Band.all.cache end let(:context) do described_class.new(criteria) end it "returns true" do context.should be_cached end end context "when the criteria is not cached" do let(:criteria) do Band.all end let(:context) do described_class.new(criteria) end it "returns false" do context.should_not be_cached end end end describe "#count" do let!(:depeche) do Band.create(name: "Depeche Mode") end let!(:new_order) do Band.create(name: "New Order") end let(:criteria) do Band.where(name: "Depeche Mode") end context "when no arguments are provided" do let(:context) do described_class.new(criteria) end it "returns the number of documents that match" do context.count.should eq(1) end end context "when provided a document" do let(:context) do described_class.new(criteria) end let(:count) do context.count(depeche) end it "returns the number of documents that match" do count.should eq(1) end end context "when provided a block" do let(:context) do described_class.new(criteria) end let(:count) do context.count do |doc| doc.likes.nil? end end it "returns the number of documents that match" do count.should eq(1) end end end [ :delete, :delete_all ].each do |method| describe "##{method}" do let!(:depeche_mode) do Band.create(name: "Depeche Mode") end let!(:new_order) do Band.create(name: "New Order") end context "when the selector is contraining" do let(:criteria) do Band.where(name: "Depeche Mode") end let(:context) do described_class.new(criteria) end let!(:deleted) do context.send(method) end it "deletes the matching documents" do Band.find(new_order.id).should eq(new_order) end it "deletes the correct number of documents" do Band.count.should eq(1) end it "returns the number of documents deleted" do deleted.should eq(1) end end context "when the selector is not contraining" do let(:criteria) do Band.all end let(:context) do described_class.new(criteria) end before do context.send(method) end it "deletes all the documents" do Band.count.should eq(0) end end end end [ :destroy, :destroy_all ].each do |method| describe "##{method}" do let!(:depeche_mode) do Band.create(name: "Depeche Mode") end let!(:new_order) do Band.create(name: "New Order") end context "when the selector is contraining" do let(:criteria) do Band.where(name: "Depeche Mode") end let(:context) do described_class.new(criteria) end let!(:destroyed) do context.send(method) end it "destroys the matching documents" do Band.find(new_order.id).should eq(new_order) end it "destroys the correct number of documents" do Band.count.should eq(1) end it "returns the number of documents destroyed" do destroyed.should eq(1) end end context "when the selector is not contraining" do let(:criteria) do Band.all end let(:context) do described_class.new(criteria) end before do context.send(method) end it "destroys all the documents" do Band.count.should eq(0) end end end end describe "#distinct" do before do Band.create(name: "Depeche Mode") Band.create(name: "New Order") end context "when limiting the result set" do let(:criteria) do Band.where(name: "Depeche Mode") end let(:context) do described_class.new(criteria) end it "returns the distinct matching fields" do context.distinct(:name).should eq([ "Depeche Mode" ]) end end context "when not limiting the result set" do let(:criteria) do Band.criteria end let(:context) do described_class.new(criteria) end it "returns the distinct field values" do context.distinct(:name).should eq([ "Depeche Mode", "New Order" ]) end end end describe "#each" do before do Band.create(name: "Depeche Mode") end let(:criteria) do Band.where(name: "Depeche Mode") end let(:context) do described_class.new(criteria) end context "when providing a block" do it "yields mongoid documents to the block" do context.each do |doc| doc.should be_a(Mongoid::Document) end end it "iterates over the matching documents" do context.each do |doc| doc.name.should eq("Depeche Mode") end end it "returns self" do context.each{}.should be(context) end end context "when no block is provided" do let(:enum) do context.each end it "returns an enumerator" do enum.should be_a(Enumerator) end context "when iterating over the enumerator" do context "when iterating with each" do it "yields mongoid documents to the block" do enum.each do |doc| doc.should be_a(Mongoid::Document) end end end context "when iterating with next" do it "yields mongoid documents" do enum.next.should be_a(Mongoid::Document) end end end end end describe "#eager_load" do let(:criteria) do Person.includes(:game) end let(:context) do described_class.new(criteria) end context "when no documents are returned" do let(:game_metadata) do Person.reflect_on_association(:game) end it "does not make any additional database queries" do game_metadata.should_receive(:eager_load).never context.send(:eager_load, []) end end end describe "#exists?" do before do Band.create(name: "Depeche Mode") end context "when the count is zero" do let(:criteria) do Band.where(name: "New Order") end let(:context) do described_class.new(criteria) end it "returns false" do context.should_not be_exists end end context "when the count is greater than zero" do let(:criteria) do Band.where(name: "Depeche Mode") end let(:context) do described_class.new(criteria) end it "returns true" do context.should be_exists end end end describe "#explain" do let(:criteria) do Band.where(name: "Depeche Mode") end let(:context) do described_class.new(criteria) end it "returns the criteria explain path" do context.explain["cursor"].should eq("BasicCursor") end end describe "#find_and_modify" do let!(:depeche) do Band.create(name: "Depeche Mode") end let!(:tool) do Band.create(name: "Tool") end context "when the selector matches" do context "when not providing options" do let(:criteria) do Band.where(name: "Depeche Mode") end let(:context) do described_class.new(criteria) end let!(:result) do context.find_and_modify("$inc" => { likes: 1 }) end it "returns the first matching document" do result.should eq(depeche) end it "updates the document in the database" do depeche.reload.likes.should eq(1) end end context "when sorting" do let(:criteria) do Band.desc(:name) end let(:context) do described_class.new(criteria) end let!(:result) do context.find_and_modify("$inc" => { likes: 1 }) end it "returns the first matching document" do result.should eq(tool) end it "updates the document in the database" do tool.reload.likes.should eq(1) end end context "when limiting fields" do let(:criteria) do Band.only(:_id) end let(:context) do described_class.new(criteria) end let!(:result) do context.find_and_modify("$inc" => { likes: 1 }) end it "returns the first matching document" do result.should eq(depeche) end it "limits the returned fields" do result.name.should be_nil end it "updates the document in the database" do depeche.reload.likes.should eq(1) end end context "when returning new" do let(:criteria) do Band.where(name: "Depeche Mode") end let(:context) do described_class.new(criteria) end let!(:result) do context.find_and_modify({ "$inc" => { likes: 1 }}, new: true) end it "returns the first matching document" do result.should eq(depeche) end it "returns the updated document" do result.likes.should eq(1) end end context "when removing" do let(:criteria) do Band.where(name: "Depeche Mode") end let(:context) do described_class.new(criteria) end let!(:result) do context.find_and_modify({}, remove: true) end it "returns the first matching document" do result.should eq(depeche) end it "deletes the document from the database" do expect { depeche.reload }.to raise_error(Mongoid::Errors::DocumentNotFound) end end end context "when the selector does not match" do let(:criteria) do Band.where(name: "Placebo") end let(:context) do described_class.new(criteria) end let(:result) do context.find_and_modify("$inc" => { likes: 1 }) end it "returns nil" do result.should be_nil end end end [ :first, :one ].each do |method| describe "##{method}" do let!(:depeche_mode) do Band.create(name: "Depeche Mode") end let!(:new_order) do Band.create(name: "New Order") end let(:criteria) do Band.where(name: "Depeche Mode") end let(:context) do described_class.new(criteria) end it "returns the first matching document" do context.send(method).should eq(depeche_mode) end end end describe "#initialize" do let(:criteria) do Band.where(name: "Depeche Mode") end let(:context) do described_class.new(criteria) end it "sets the criteria" do context.criteria.should eq(criteria) end it "sets the klass" do context.klass.should eq(Band) end it "sets the query" do context.query.should be_a(Moped::Query) end it "sets the query selector" do context.query.selector.should eq({ "name" => "Depeche Mode" }) end end describe "#last" do context "when no default scope" do let!(:depeche_mode) do Band.create(name: "Depeche Mode") end let!(:new_order) do Band.create(name: "New Order") end let(:criteria) do Band.all end let(:context) do described_class.new(criteria) end it "returns the last matching document" do context.last.should eq(new_order) end end context "when default scope" do let!(:palm) do Tree.create(name: "Palm") end let!(:maple) do Tree.create(name: "Maple") end let(:criteria) do Tree.all end let(:context) do described_class.new(criteria) end it "respects default scope" do context.last.should eq(palm) end end end [ :length, :size ].each do |method| describe "##{method}" do before do Band.create(name: "Depeche Mode") Band.create(name: "New Order") end context "when the criteria has a limit" do let(:criteria) do Band.limit(1) end let(:context) do described_class.new(criteria) end it "returns the number of documents that match" do context.send(method).should eq(2) end context "when calling more than once" do before do context.query.should_receive(:count).once.and_return(2) end it "returns the cached value for subsequent calls" do 2.times { context.send(method).should eq(2) } end end context "when the results have been iterated over" do before do context.entries context.query.should_receive(:count).once.and_return(2) end it "returns the cached value for all calls" do context.send(method).should eq(2) end context "when the results have been iterated over multiple times" do before do context.entries end it "resets the length on each full iteration" do context.should have(2).items end end end end context "when the criteria has no limit" do let(:criteria) do Band.where(name: "Depeche Mode") end let(:context) do described_class.new(criteria) end it "returns the number of documents that match" do context.send(method).should eq(1) end context "when calling more than once" do before do context.query.should_receive(:count).once.and_return(1) end it "returns the cached value for subsequent calls" do 2.times { context.send(method).should eq(1) } end end context "when the results have been iterated over" do before do context.entries context.query.should_receive(:count).once.and_return(1) end it "returns the cached value for all calls" do context.send(method).should eq(1) end context "when the results have been iterated over multiple times" do before do context.entries end it "resets the length on each full iteration" do context.should have(1).item end end end end end end describe "#limit" do let!(:depeche_mode) do Band.create(name: "Depeche Mode") end let!(:new_order) do Band.create(name: "New Order") end let(:criteria) do Band.all end let(:context) do described_class.new(criteria) end it "limits the results" do context.limit(1).entries.should eq([ depeche_mode ]) end end describe "#map_reduce" do let!(:depeche_mode) do Band.create(name: "Depeche Mode", likes: 200) end let!(:tool) do Band.create(name: "Tool", likes: 100) end let(:map) do %Q{ function() { emit(this.name, { likes: this.likes }); }} end let(:reduce) do %Q{ function(key, values) { var result = { likes: 0 }; values.forEach(function(value) { result.likes += value.likes; }); return result; }} end context "when no selection is provided" do let(:criteria) do Band.all end let(:context) do described_class.new(criteria) end let(:results) do context.map_reduce(map, reduce).out(inline: 1) end it "returns the first aggregate result" do results.should include( { "_id" => "Depeche Mode", "value" => { "likes" => 200 }} ) end it "returns the second aggregate result" do results.should include( { "_id" => "Tool", "value" => { "likes" => 100 }} ) end it "returns the correct number of documents" do results.count.should eq(2) end it "contains the entire raw results" do results["results"].should eq([ { "_id" => "Depeche Mode", "value" => { "likes" => 200 }}, { "_id" => "Tool", "value" => { "likes" => 100 }} ]) end it "contains the execution time" do results.time.should_not be_nil end it "contains the count statistics" do results["counts"].should eq({ "input" => 2, "emit" => 2, "reduce" => 0, "output" => 2 }) end it "contains the input count" do results.input.should eq(2) end it "contains the emitted count" do results.emitted.should eq(2) end it "contains the reduced count" do results.reduced.should eq(0) end it "contains the output count" do results.output.should eq(2) end end context "when selection is provided" do let(:criteria) do Band.where(name: "Depeche Mode") end let(:context) do described_class.new(criteria) end let(:results) do context.map_reduce(map, reduce).out(inline: 1) end it "includes the aggregate result" do results.should include( { "_id" => "Depeche Mode", "value" => { "likes" => 200 }} ) end it "returns the correct number of documents" do results.count.should eq(1) end it "contains the entire raw results" do results["results"].should eq([ { "_id" => "Depeche Mode", "value" => { "likes" => 200 }} ]) end it "contains the execution time" do results.time.should_not be_nil end it "contains the count statistics" do results["counts"].should eq({ "input" => 1, "emit" => 1, "reduce" => 0, "output" => 1 }) end it "contains the input count" do results.input.should eq(1) end it "contains the emitted count" do results.emitted.should eq(1) end it "contains the reduced count" do results.reduced.should eq(0) end it "contains the output count" do results.output.should eq(1) end end context "when sorting is provided" do before do Band.index(name: -1) Band.create_indexes end let(:criteria) do Band.desc(:name) end let(:context) do described_class.new(criteria) end let(:results) do context.map_reduce(map, reduce).out(inline: 1) end it "returns the first aggregate result" do results.should include( { "_id" => "Tool", "value" => { "likes" => 100 }} ) end it "returns the second aggregate result" do results.should include( { "_id" => "Depeche Mode", "value" => { "likes" => 200 }} ) end it "returns the correct number of documents" do results.count.should eq(2) end it "contains the entire raw results" do results["results"].should eq([ { "_id" => "Depeche Mode", "value" => { "likes" => 200 }}, { "_id" => "Tool", "value" => { "likes" => 100 }} ]) end end context "when limiting is provided" do let(:criteria) do Band.limit(1) end let(:context) do described_class.new(criteria) end let(:results) do context.map_reduce(map, reduce).out(inline: 1) end it "returns the first aggregate result" do results.should include( { "_id" => "Depeche Mode", "value" => { "likes" => 200 }} ) end it "returns the correct number of documents" do results.count.should eq(1) end it "contains the entire raw results" do results["results"].should eq([ { "_id" => "Depeche Mode", "value" => { "likes" => 200 }} ]) end end context "when the output is replace" do let(:criteria) do Band.limit(1) end let(:context) do described_class.new(criteria) end let(:results) do context.map_reduce(map, reduce).out(replace: "mr-output") end it "returns the correct number of documents" do results.count.should eq(1) end it "contains the entire results" do results.should eq([ { "_id" => "Depeche Mode", "value" => { "likes" => 200 }} ]) end end context "when the output is reduce" do let(:criteria) do Band.limit(1) end let(:context) do described_class.new(criteria) end let(:results) do context.map_reduce(map, reduce).out(reduce: :mr_output) end it "returns the correct number of documents" do results.count.should eq(1) end it "contains the entire results" do results.should eq([ { "_id" => "Depeche Mode", "value" => { "likes" => 200 }} ]) end end context "when the output is merge" do let(:criteria) do Band.limit(1) end let(:context) do described_class.new(criteria) end let(:results) do context.map_reduce(map, reduce).out(merge: :mr_output) end it "returns the correct number of documents" do results.count.should eq(1) end it "contains the entire results" do results.should eq([ { "_id" => "Depeche Mode", "value" => { "likes" => 200 }} ]) end end context "when providing no output" do let(:criteria) do Band.limit(1) end let(:context) do described_class.new(criteria) end let(:results) do context.map_reduce(map, reduce) end it "raises an error" do expect { results.entries }.to raise_error(Mongoid::Errors::NoMapReduceOutput) end end context "when providing a finalize" do let(:criteria) do Band.limit(1) end let(:context) do described_class.new(criteria) end let(:finalize) do %Q{ function(key, value) { value.extra = true; return value; }} end let(:results) do context.map_reduce(map, reduce).out(inline: 1).finalize(finalize) end it "returns the correct number of documents" do results.count.should eq(1) end it "contains the entire results" do results.should eq([ { "_id" => "Depeche Mode", "value" => { "likes" => 200, "extra" => true }} ]) end end end describe "#skip" do let!(:depeche_mode) do Band.create(name: "Depeche Mode") end let!(:new_order) do Band.create(name: "New Order") end let(:criteria) do Band.all end let(:context) do described_class.new(criteria) end it "limits the results" do context.skip(1).entries.should eq([ new_order ]) end end describe "#sort" do let!(:depeche_mode) do Band.create(name: "Depeche Mode") end let!(:new_order) do Band.create(name: "New Order") end let(:criteria) do Band.all end let(:context) do described_class.new(criteria) end context "when providing a spec" do it "sorts the results" do context.sort(name: -1).entries.should eq([ new_order, depeche_mode ]) end it "returns the context" do context.sort(name: 1).should eq(context) end end context "when providing a block" do let(:sorted) do context.sort do |a, b| b.name <=> a.name end end it "sorts the results in memory" do sorted.should eq([ new_order, depeche_mode ]) end end end describe "#update" do let!(:depeche_mode) do Band.create(name: "Depeche Mode") end let!(:new_order) do Band.create(name: "New Order") end let(:criteria) do Band.all end let(:context) do described_class.new(criteria) end context "when providing attributes" do context "when the attributes are of the correct type" do before do context.update(name: "Smiths") end it "updates only the first matching document" do depeche_mode.reload.name.should eq("Smiths") end it "does not update the last matching document" do new_order.reload.name.should eq("New Order") end end context "when the attributes must be mongoized" do before do context.update(member_count: "1") end it "updates the first matching document" do depeche_mode.reload.member_count.should eq(1) end it "does not update the last matching document" do new_order.reload.member_count.should be_nil end end end context "when providing atomic operations" do context "when only atomic operations are provided" do context "when the attributes are in the correct type" do before do context.update("$set" => { name: "Smiths" }) end it "updates the first matching document" do depeche_mode.reload.name.should eq("Smiths") end it "does not update the last matching document" do new_order.reload.name.should eq("New Order") end end context "when the attributes must be mongoized" do before do context.update("$set" => { member_count: "1" }) end it "updates the first matching document" do depeche_mode.reload.member_count.should eq(1) end it "does not update the last matching document" do new_order.reload.member_count.should be_nil end end end context "when a mix are provided" do before do context.update("$set" => { name: "Smiths" }, likes: 100) end it "updates the first matching document's set" do depeche_mode.reload.name.should eq("Smiths") end it "updates the first matching document's updates" do depeche_mode.reload.likes.should eq(100) end it "does not update the last matching document's set" do new_order.reload.name.should eq("New Order") end it "does not update the last matching document's updates" do new_order.reload.likes.should be_nil end end end context "when providing no attributes" do it "returns false" do context.update.should be_false end end end describe "#update_all" do let!(:depeche_mode) do Band.create(name: "Depeche Mode") end let!(:new_order) do Band.create(name: "New Order") end let(:criteria) do Band.all end let(:context) do described_class.new(criteria) end context "when providing attributes" do context "when the attributes are of the correct type" do before do context.update_all(name: "Smiths") end it "updates the first matching document" do depeche_mode.reload.name.should eq("Smiths") end it "updates the last matching document" do new_order.reload.name.should eq("Smiths") end end context "when the attributes must be mongoized" do before do context.update_all(member_count: "1") end it "updates the first matching document" do depeche_mode.reload.member_count.should eq(1) end it "updates the last matching document" do new_order.reload.member_count.should eq(1) end end end context "when providing atomic operations" do context "when only atomic operations are provided" do context "when the attributes are in the correct type" do before do context.update_all("$set" => { name: "Smiths" }) end it "updates the first matching document" do depeche_mode.reload.name.should eq("Smiths") end it "updates the last matching document" do new_order.reload.name.should eq("Smiths") end end context "when the attributes must be mongoized" do before do context.update_all("$set" => { member_count: "1" }) end it "updates the first matching document" do depeche_mode.reload.member_count.should eq(1) end it "updates the last matching document" do new_order.reload.member_count.should eq(1) end end end context "when a mix are provided" do before do context.update_all("$set" => { name: "Smiths" }, likes: 100) end it "updates the first matching document's set" do depeche_mode.reload.name.should eq("Smiths") end it "updates the first matching document's updates" do depeche_mode.reload.likes.should eq(100) end it "updates the last matching document's set" do new_order.reload.name.should eq("Smiths") end it "updates the last matching document's updates" do new_order.reload.likes.should eq(100) end end end context "when providing no attributes" do it "returns false" do context.update_all.should be_false end end end end
{ "content_hash": "547852ca802a81fa55de2125fbdda46f", "timestamp": "", "source": "github", "line_count": 1475, "max_line_length": 84, "avg_line_length": 21.309830508474576, "alnum_prop": 0.5533532705523034, "repo_name": "jamesjn/mongoid", "id": "d485881495e68b558a09dfe7e8ada9b2f15f9ed4", "size": "31432", "binary": false, "copies": "7", "ref": "refs/heads/master", "path": "spec/mongoid/contextual/mongo_spec.rb", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
package com.nokia.springboot.training.d01.s03.repository; import org.springframework.stereotype.Repository; /** * A simple product repository * * @author bogdan.solga */ @Repository public class ProductRepository { public void displayProducts() { System.out.println("Displaying all the products"); } }
{ "content_hash": "dcad4657b272cfd7bc8791cc6c69a15e", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 58, "avg_line_length": 20.3125, "alnum_prop": 0.7292307692307692, "repo_name": "bogdansolga/nokia-spring-boot-training", "id": "12728e27ee971738189fc95ac994ff28a065385d", "size": "325", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "d01/d01s03/d01s03e04-bean-aliasing/src/main/java/com/nokia/springboot/training/d01/s03/repository/ProductRepository.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "1192" }, { "name": "Java", "bytes": "273049" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8" ?> <!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd"> <mapper namespace="com.tlkzzz.jeesite.modules.cw.dao.FPaymentDao"> <sql id="fPaymentColumns"> a.id AS "id", a.payment_date AS "paymentDate", a.payment_code AS "paymentCode", a.payment_account AS "paymentAccount", fao.bank_code AS "faName", a.travel_unit AS "travelUnit.id", csu.name AS "travelUnit.name", cst.name AS "travelUnit.code", a.travel_account AS "travelAccount", fat.bank_code AS "csName", a.payment_type AS "paymentType", a.payment_mode AS "paymentMode", a.je AS "je", a.jsr AS "jsr.id", cb.name AS "jsr.name", a.subject_code AS "subjectCode.id", ck.kmname AS "subjectCode.name", a.approval_status AS "approvalStatus", a.auditor AS "auditor.id", su.name AS "auditor.name", a.remarks AS "remarks", a.create_by AS "createBy.id", a.create_date AS "createDate", a.update_by AS "updateBy.id", a.update_date AS "updateDate", a.htje as "htje", rck.ddbh as "ddbh", a.thstatus as "thstatus" </sql> <sql id="fPaymentJoins"> LEFT JOIN c_rkckddinfo rck ON rck.id = a.payment_code LEFT JOIN c_supplier csu ON csu.id=a.travel_unit LEFT JOIN c_store cst ON cst.id=a.travel_unit LEFT JOIN c_km ck ON ck.id=a.subject_code LEFT JOIN sys_user su ON su.id=a.auditor LEFT JOIN sys_user cb ON cb.id=a.jsr LEFT JOIN f_account fao ON fao.id=a.payment_account LEFT JOIN f_account fat ON fat.id=a.travel_account </sql> <select id="get" resultType="FPayment"> SELECT <include refid="fPaymentColumns"/> FROM f_payment a <include refid="fPaymentJoins"/> WHERE a.id = #{id} </select> <select id="sfFindList" resultType="FPayment"> SELECT a.* FROM ( SELECT fp.*,cs.name as "csName",fa.name as "faName",cr.ddbh as "ddbh",1 as "fybs" FROM f_payment fp LEFT JOIN c_sclass cs ON cs.id=fp.travel_unit LEFT JOIN f_account fa ON fa.bank_code=fp.travel_account LEFT JOIN c_rkckddinfo cr ON cr.id=fp.payment_code <if test="paymentDate != null and paymentDate != ''"> WHERE STR_TO_DATE(fp.payment_date,'%Y-%m-%d')=STR_TO_DATE(#{paymentDate},'%Y-%m-%d') </if> UNION ALL SELECT fr.*,cs.name as "csName",fa.name as "faName",cr.ddbh as "ddbh",2 as "fybs" FROM f_receipt fr LEFT JOIN c_sclass cs ON cs.id=fr.travel_unit LEFT JOIN f_account fa ON fa.bank_code=fr.travel_account LEFT JOIN c_rkckddinfo cr ON cr.id=fr.receipt_code <if test="paymentDate != null and paymentDate != ''"> WHERE STR_TO_DATE(fr.receipt_date,'%Y-%m-%d') = STR_TO_DATE(#{paymentDate},'%Y-%m-%d') </if> ) a ORDER BY a.payment_date DESC </select> <select id="getByPaymentCode" resultType="FPayment"> SELECT <include refid="fPaymentColumns"/> FROM f_payment a <include refid="fPaymentJoins"/> WHERE a.payment_code = #{paymentCode} </select> <update id="paymentAddHtje"> UPDATE f_payment SET htje=#{htje} WHERE id = #{id} </update> <select id="fyfindList" resultType="FPayment"> SELECT o.* FROM ( SELECT <include refid="fPaymentColumns"/> FROM f_payment a <include refid="fPaymentJoins"/> ) o LIMIT ${fybs},10 </select> <select id="findList" resultType="FPayment"> SELECT <include refid="fPaymentColumns"/> FROM f_payment a <include refid="fPaymentJoins"/> <where> <if test="paymentType != null and paymentType != ''"> AND FIND_IN_SET(a.payment_type,#{paymentType}) </if> <if test="paymentDate != null and paymentDate != ''"> AND a.payment_date &gt;= #{paymentDate} </if> <if test="paymentCode != null and paymentCode != ''"> AND a.payment_code = #{paymentCode} </if> <if test="thstatus != null and thstatus != ''"> AND a.thstatus &gt;= #{thstatus} </if> <if test="paymentAccount != null and paymentAccount != ''"> AND a.payment_account = #{paymentAccount} </if> <if test="travelAccount != null and travelAccount != ''"> AND a.travel_account = #{travelAccount} </if> </where> <choose> <when test="page !=null and page.orderBy != null and page.orderBy != ''"> ORDER BY ${page.orderBy} </when> <otherwise> ORDER BY a.update_date DESC </otherwise> </choose> </select> <select id="findAllList" resultType="FPayment"> SELECT <include refid="fPaymentColumns"/> FROM f_payment a <include refid="fPaymentJoins"/> <where> </where> <choose> <when test="page !=null and page.orderBy != null and page.orderBy != ''"> ORDER BY ${page.orderBy} </when> <otherwise> ORDER BY a.update_date DESC </otherwise> </choose> </select> <insert id="insert"> INSERT INTO f_payment( id, payment_date, payment_code, payment_account, travel_unit, travel_account, payment_type, payment_mode, je, jsr, subject_code, approval_status, auditor, remarks, create_by, create_date, update_by, update_date, htje, thstatus ) VALUES ( #{id}, #{paymentDate}, #{paymentCode}, #{paymentAccount}, #{travelUnit.id}, #{travelAccount}, #{paymentType}, #{paymentMode}, #{je}, #{jsr.id}, #{subjectCode.id}, #{approvalStatus}, #{auditor.id}, #{remarks}, #{createBy.id}, #{createDate}, #{updateBy.id}, #{updateDate}, #{htje}, #{thstatus} ) </insert> <update id="update"> UPDATE f_payment SET payment_date = #{paymentDate}, payment_code = #{paymentCode}, payment_account = #{paymentAccount}, travel_unit = #{travelUnit.id}, travel_account = #{travelAccount}, payment_type = #{paymentType}, payment_mode = #{paymentMode}, je = #{je}, jsr = #{jsr.id}, subject_code = #{subjectCode.id}, approval_status = #{approvalStatus}, auditor = #{auditor.id}, remarks = #{remarks}, update_by = #{updateBy.id}, update_date = #{updateDate}, htje=#{htje}, thstatus=#{thstatus} WHERE id = #{id} </update> <update id="thstatusUpdate"> UPDATE f_payment SET approval_status = #{approvalStatus}, thstatus = #{thstatus} WHERE id = #{id} </update> <update id="updateApprovalStatus"> UPDATE f_payment SET approval_status = #{approvalStatus}, auditor = #{auditor.id} WHERE id = #{id} </update> <update id="addHTJE"> UPDATE f_payment a SET a.htje = a.htje+#{htje} WHERE id = #{id} </update> <update id="minHTJE"> UPDATE f_payment a SET a.htje = a.htje-#{htje} WHERE id = #{id} </update> <update id="delete"> DELETE FROM f_payment WHERE id = #{id} </update> </mapper>
{ "content_hash": "6638ddece5ad1e0b21aa80d6c67eaa56", "timestamp": "", "source": "github", "line_count": 251, "max_line_length": 107, "avg_line_length": 25.788844621513945, "alnum_prop": 0.6493125289664762, "repo_name": "tlkzzz/xpjfx", "id": "e4d96b07bcb1116f9eaba72be24bf0728f96447b", "size": "6473", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/resources/mappings/modules/cw/FPaymentDao.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ASP", "bytes": "3753" }, { "name": "ApacheConf", "bytes": "768" }, { "name": "Batchfile", "bytes": "6350" }, { "name": "CSS", "bytes": "991728" }, { "name": "FreeMarker", "bytes": "766" }, { "name": "HTML", "bytes": "2598796" }, { "name": "Java", "bytes": "3228932" }, { "name": "JavaScript", "bytes": "11271992" }, { "name": "PHP", "bytes": "8060" } ], "symlink_target": "" }
#include "mod_proxy.h" #include "scoreboard.h" #include "ap_mpm.h" #include "apr_version.h" #include "ap_hooks.h" module AP_MODULE_DECLARE_DATA lbmethod_bytraffic_module; static int (*ap_proxy_retry_worker_fn)(const char *proxy_function, proxy_worker *worker, server_rec *s) = NULL; /* * The idea behind the find_best_bytraffic scheduler is the following: * * We know the amount of traffic (bytes in and out) handled by each * worker. We normalize that traffic by each workers' weight. So assuming * a setup as below: * * worker a b c * lbfactor 1 1 3 * * the scheduler will allow worker c to handle 3 times the * traffic of a and b. If each request/response results in the * same amount of traffic, then c would be accessed 3 times as * often as a or b. If, for example, a handled a request that * resulted in a large i/o bytecount, then b and c would be * chosen more often, to even things out. */ static proxy_worker *find_best_bytraffic(proxy_balancer *balancer, request_rec *r) { int i; apr_off_t mytraffic = 0; apr_off_t curmin = 0; proxy_worker **worker; proxy_worker *mycandidate = NULL; int cur_lbset = 0; int max_lbset = 0; int checking_standby; int checked_standby; if (!ap_proxy_retry_worker_fn) { ap_proxy_retry_worker_fn = APR_RETRIEVE_OPTIONAL_FN(ap_proxy_retry_worker); if (!ap_proxy_retry_worker_fn) { /* can only happen if mod_proxy isn't loaded */ return NULL; } } ap_log_error(APLOG_MARK, APLOG_DEBUG, 0, r->server, APLOGNO(01209) "proxy: Entering bytraffic for BALANCER (%s)", balancer->s->name); /* First try to see if we have available candidate */ do { checking_standby = checked_standby = 0; while (!mycandidate && !checked_standby) { worker = (proxy_worker **)balancer->workers->elts; for (i = 0; i < balancer->workers->nelts; i++, worker++) { if (!checking_standby) { /* first time through */ if ((*worker)->s->lbset > max_lbset) max_lbset = (*worker)->s->lbset; } if ( ((*worker)->s->lbset != cur_lbset) || (checking_standby ? !PROXY_WORKER_IS_STANDBY(*worker) : PROXY_WORKER_IS_STANDBY(*worker)) || (PROXY_WORKER_IS_DRAINING(*worker)) ) { continue; } /* If the worker is in error state run * retry on that worker. It will be marked as * operational if the retry timeout is elapsed. * The worker might still be unusable, but we try * anyway. */ if (!PROXY_WORKER_IS_USABLE(*worker)) ap_proxy_retry_worker_fn("BALANCER", *worker, r->server); /* Take into calculation only the workers that are * not in error state or not disabled. */ if (PROXY_WORKER_IS_USABLE(*worker)) { mytraffic = ((*worker)->s->transferred/(*worker)->s->lbfactor) + ((*worker)->s->read/(*worker)->s->lbfactor); if (!mycandidate || mytraffic < curmin) { mycandidate = *worker; curmin = mytraffic; } } } checked_standby = checking_standby++; } cur_lbset++; } while (cur_lbset <= max_lbset && !mycandidate); if (mycandidate) { ap_log_error(APLOG_MARK, APLOG_DEBUG, 0, r->server, APLOGNO(01210) "proxy: bytraffic selected worker \"%s\" : busy %" APR_SIZE_T_FMT, mycandidate->s->name, mycandidate->s->busy); } return mycandidate; } /* assumed to be mutex protected by caller */ static apr_status_t reset(proxy_balancer *balancer, server_rec *s) { int i; proxy_worker **worker; worker = (proxy_worker **)balancer->workers->elts; for (i = 0; i < balancer->workers->nelts; i++, worker++) { (*worker)->s->lbstatus = 0; (*worker)->s->busy = 0; (*worker)->s->transferred = 0; (*worker)->s->read = 0; } return APR_SUCCESS; } static apr_status_t age(proxy_balancer *balancer, server_rec *s) { return APR_SUCCESS; } static const proxy_balancer_method bytraffic = { "bytraffic", &find_best_bytraffic, NULL, &reset, &age }; static void register_hook(apr_pool_t *p) { /* Only the mpm_winnt has child init hook handler. * make sure that we are called after the mpm * initializes and after the mod_proxy */ ap_register_provider(p, PROXY_LBMETHOD, "bytraffic", "0", &bytraffic); } AP_DECLARE_MODULE(lbmethod_bytraffic) = { STANDARD20_MODULE_STUFF, NULL, /* create per-directory config structure */ NULL, /* merge per-directory config structures */ NULL, /* create per-server config structure */ NULL, /* merge per-server config structures */ NULL, /* command apr_table_t */ register_hook /* register hooks */ };
{ "content_hash": "43b535a5e6208adc5a886474340008fb", "timestamp": "", "source": "github", "line_count": 155, "max_line_length": 112, "avg_line_length": 34.329032258064515, "alnum_prop": 0.5540311971433941, "repo_name": "ZHYfeng/malicious-code-conceal", "id": "b3fe556a55050dcf58c65887ca91ebc0793eaaab", "size": "6119", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "3-2-multi-programmes-big/httpd-8dffc15e54a846e692e17e8761b3350df2d7f254/modules/proxy/balancers/mod_lbmethod_bytraffic.c", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ASP", "bytes": "579" }, { "name": "Assembly", "bytes": "5001" }, { "name": "Awk", "bytes": "36184" }, { "name": "Batchfile", "bytes": "60629" }, { "name": "C", "bytes": "114808650" }, { "name": "C++", "bytes": "10029499" }, { "name": "CMake", "bytes": "57202" }, { "name": "CSS", "bytes": "41400" }, { "name": "DTrace", "bytes": "12419" }, { "name": "Dockerfile", "bytes": "4182" }, { "name": "Forth", "bytes": "199744" }, { "name": "Frege", "bytes": "3519320" }, { "name": "GDB", "bytes": "11041" }, { "name": "HTML", "bytes": "208126" }, { "name": "JavaScript", "bytes": "1822843" }, { "name": "Lex", "bytes": "9956" }, { "name": "Lua", "bytes": "109203" }, { "name": "M4", "bytes": "141034" }, { "name": "Makefile", "bytes": "565761" }, { "name": "PHP", "bytes": "2728" }, { "name": "Perl", "bytes": "22259" }, { "name": "Perl 6", "bytes": "11115" }, { "name": "PowerShell", "bytes": "2037" }, { "name": "Python", "bytes": "114162" }, { "name": "Roff", "bytes": "153364" }, { "name": "Ruby", "bytes": "11216" }, { "name": "Shell", "bytes": "118175" }, { "name": "SourcePawn", "bytes": "10547" }, { "name": "TeX", "bytes": "2582" }, { "name": "Visual Basic", "bytes": "1074" }, { "name": "XSLT", "bytes": "331268" }, { "name": "Yacc", "bytes": "9085" } ], "symlink_target": "" }
<?php // This file is part of Moodle - http://moodle.org/ // // Moodle is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // Moodle is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with Moodle. If not, see <http://www.gnu.org/licenses/>. /** * Class profile_field_checkbox * * @copyright 2008 onwards Shane Elliot {@link http://pukunui.com} * @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later */ class profile_field_checkbox extends profile_field_base { /** * Constructor method. * Pulls out the options for the checkbox from the database and sets the * the corresponding key for the data if it exists * * @param int $fieldid * @param int $userid */ public function profile_field_checkbox($fieldid=0, $userid=0) { global $DB; // First call parent constructor. $this->profile_field_base($fieldid, $userid); if (!empty($this->field)) { $datafield = $DB->get_field('user_info_data', 'data', array('userid' => $this->userid, 'fieldid' => $this->fieldid)); if ($datafield !== false) { $this->data = $datafield; } else { $this->data = $this->field->defaultdata; } } } /** * Add elements for editing the profile field value. * @param moodleform $mform */ public function edit_field_add($mform) { // Create the form field. $checkbox = $mform->addElement('advcheckbox', $this->inputname, format_string($this->field->name)); if ($this->data == '1') { $checkbox->setChecked(true); } $mform->setType($this->inputname, PARAM_BOOL); if ($this->is_required() and !has_capability('moodle/user:update', context_system::instance())) { $mform->addRule($this->inputname, get_string('required'), 'nonzero', null, 'client'); } } /** * Display the data for this field * * @return string HTML. */ public function display_data() { $options = new stdClass(); $options->para = false; $checked = intval($this->data) === 1 ? 'checked="checked"' : ''; return '<input disabled="disabled" type="checkbox" name="'.$this->inputname.'" '.$checked.' />'; } }
{ "content_hash": "ec7f9d5877d226095ac4881294ceb038", "timestamp": "", "source": "github", "line_count": 80, "max_line_length": 129, "avg_line_length": 34.3375, "alnum_prop": 0.6082999635966508, "repo_name": "miguelhidrogo/moodle_auth_cors", "id": "68b75a94a58d9475ba56dabbc8a5c765571b2b0a", "size": "3024", "binary": false, "copies": "155", "ref": "refs/heads/master", "path": "user/profile/field/checkbox/field.class.php", "mode": "33188", "license": "mit", "language": [ { "name": "ActionScript", "bytes": "1205" }, { "name": "CSS", "bytes": "1250739" }, { "name": "Cucumber", "bytes": "1573954" }, { "name": "HTML", "bytes": "418957" }, { "name": "Java", "bytes": "14870" }, { "name": "JavaScript", "bytes": "12201919" }, { "name": "PHP", "bytes": "66948375" }, { "name": "PLSQL", "bytes": "4867" }, { "name": "Perl", "bytes": "20769" }, { "name": "XSLT", "bytes": "33489" } ], "symlink_target": "" }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.camel.cdi.std; import static org.assertj.core.api.Assertions.assertThat; import java.util.List; import javax.inject.Inject; import org.apache.camel.CamelContext; import org.apache.camel.Route; import org.apache.camel.builder.RouteBuilder; import org.flowable.engine.runtime.Execution; import org.flowable.engine.runtime.ProcessInstance; import org.flowable.engine.test.Deployment; import org.junit.After; import org.junit.Test; /** * Adapted from {@link AsyncPingTest}. * * @author Zach Visagie */ public class CdiAsyncPingTest extends StdCamelCdiFlowableTestCase { @Inject protected CamelContext camelContext; @Override public void setUp() throws Exception { super.setUp(); camelContext.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { from("flowable:asyncPingProcess:serviceAsyncPing").to("seda:continueAsync"); from("seda:continueAsync").to("flowable:asyncPingProcess:receiveAsyncPing"); } }); } @After public void tearDown() throws Exception { List<Route> routes = camelContext.getRoutes(); for (Route r : routes) { camelContext.stopRoute(r.getId()); camelContext.removeRoute(r.getId()); } } @Test @Deployment(resources = { "process/asyncPing.bpmn20.xml" }) public void testRunProcess() throws Exception { ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("asyncPingProcess"); List<Execution> executionList = runtimeService.createExecutionQuery().list(); assertThat(executionList).hasSize(2); managementService.executeJob(managementService.createJobQuery().processInstanceId(processInstance.getId()).singleResult().getId()); Thread.sleep(1500); executionList = runtimeService.createExecutionQuery().list(); assertThat(executionList).isEmpty(); assertThat(runtimeService.createProcessInstanceQuery().processInstanceId(processInstance.getId()).count()).isZero(); } }
{ "content_hash": "ba88a15b9c55d494df43a390f5163797", "timestamp": "", "source": "github", "line_count": 79, "max_line_length": 139, "avg_line_length": 34.949367088607595, "alnum_prop": 0.6896052155016299, "repo_name": "paulstapleton/flowable-engine", "id": "a87ff315a1db289293a7daaf6eb3508b6ca2f541", "size": "2761", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "modules/flowable-camel-cdi/src/test/java/org/flowable/camel/cdi/std/CdiAsyncPingTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "166" }, { "name": "CSS", "bytes": "688913" }, { "name": "Dockerfile", "bytes": "6367" }, { "name": "Groovy", "bytes": "482" }, { "name": "HTML", "bytes": "1100650" }, { "name": "Java", "bytes": "33678803" }, { "name": "JavaScript", "bytes": "12395741" }, { "name": "PLSQL", "bytes": "109354" }, { "name": "PLpgSQL", "bytes": "11691" }, { "name": "SQLPL", "bytes": "1265" }, { "name": "Shell", "bytes": "19145" } ], "symlink_target": "" }
layout: post title: "An Illustrative Guide to Coffee" excerpt: "One of the things I love about being in the 9rules Network is the community of bloggers it encompasses. While reading the Bright Meadow blog today I stumbled across a flickr picture she ran across." modified: categories: opinions tags: [Opinion] image: feature: opinions-default.jpg credit: WeGraphics creditlink: http://wegraphics.net/downloads/free-ultimate-blurred-background-pack/ comments: true share: true author: --- Lokesh Dhakar’s guide to coffee is easy to understand and something I think most people would welcome seeing in their favorite coffee house. It answers common questions in a no nonsense way. ![Guide to Coffee](/images/coffee_drinks.png){: .pull-center} Check out the full sized copy on Lokesh's website [here](http://lokeshdhakar.com/media/posts/coffee-drinks-illustrated/coffee%20drinks.png).
{ "content_hash": "e3475f0913c7e35dbaa00680bd2d3978", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 208, "avg_line_length": 40.72727272727273, "alnum_prop": 0.7834821428571429, "repo_name": "coffeedrinkers/coffeedrinkers.github.com", "id": "ea9df33a41f46f7d6bbcd58dd275477be569c861", "size": "902", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "_posts/opinions/2007-10-15-An-Illustrative-Guide-to-Coffee.md", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "615" }, { "name": "CSS", "bytes": "50154" }, { "name": "HTML", "bytes": "18734" }, { "name": "JavaScript", "bytes": "78237" }, { "name": "Ruby", "bytes": "2196" } ], "symlink_target": "" }
typedef NS_ENUM(NSInteger, ChannelSectionHeaderViewType) { ChannelSectionHeaderViewTypeClose = 1 << 0, ChannelSectionHeaderViewTypeOpen = 1 << 1 }; @protocol ChannelSectionHeaderViewDelegate <NSObject> - (void)foldChannel:(ChannelSectionHeaderViewType)type tag:(NSInteger)tag; @end @interface ChannelSectionHeaderView : UIView @property (nonatomic, weak) id<ChannelSectionHeaderViewDelegate>delegate; - (void)updateContent:(NSString *)title; - (void)setFoldState:(BOOL)state; @end
{ "content_hash": "67c82686797fa58ffd40f56816ff317b", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 74, "avg_line_length": 26.105263157894736, "alnum_prop": 0.7903225806451613, "repo_name": "fmok/FMChannelManageDemo", "id": "52e1f1e4b46e278b89a1e430def66edd2eaaf2ca", "size": "667", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "DemoHaHaHa/View/ChannelSectionHeaderView.h", "mode": "33188", "license": "mit", "language": [ { "name": "Objective-C", "bytes": "119589" } ], "symlink_target": "" }
from dragonflow.db.db_common import DbUpdate from dragonflow.db import pub_sub_api from dragonflow.db.pubsub_drivers.redis_db_pubsub_driver \ import RedisPublisherAgent from dragonflow.db.pubsub_drivers.redis_db_pubsub_driver \ import RedisSubscriberAgent from dragonflow.tests import base as tests_base import mock from oslo_serialization import jsonutils class TestRedisPubSub(tests_base.BaseTestCase): def setUp(self): super(TestRedisPubSub, self).setUp() self.RedisPublisherAgent = RedisPublisherAgent() self.RedisSubscriberAgent = RedisSubscriberAgent() def test_publish_success(self): client = mock.Mock() self.RedisPublisherAgent.client = client client.publish.return_value = 1 update = DbUpdate("router", "key", "action", "value", topic='teststring') result = self.RedisPublisherAgent.send_event(update, 'teststring') self.assertIsNone(result) def test_subscribe_success(self): pubsub = mock.Mock() self.RedisSubscriberAgent.pub_sub = pubsub update = DbUpdate("router", "key", "action", "value", topic='teststring') event_json = jsonutils.dumps(update.to_dict()) data = pub_sub_api.pack_message(event_json) self.RedisSubscriberAgent.pub_sub.listen.return_value = \ [{'type': 'message', 'data': data}] self.RedisSubscriberAgent.pub_sub.subscribe.return_value = 1 self.RedisSubscriberAgent.pub_sub.unsubscribe.return_value = 1 result = self.RedisSubscriberAgent.register_topic('subscribe') self.assertIsNone(result) result = self.RedisSubscriberAgent.unregister_topic('subscribe') self.RedisSubscriberAgent.db_changes_callback = mock.Mock() self.RedisSubscriberAgent.db_changes_callback.return_value = 1 self.assertIsNone(result)
{ "content_hash": "25be7efc456afe14b8c8959c90ea4535", "timestamp": "", "source": "github", "line_count": 50, "max_line_length": 74, "avg_line_length": 41.3, "alnum_prop": 0.6343825665859564, "repo_name": "FrankDuan/df_code", "id": "922303cf125dc9e9b60508009379c84517274ec6", "size": "2663", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "dragonflow/tests/unit/test_redis_pubsub.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Mako", "bytes": "1053" }, { "name": "Python", "bytes": "588918" }, { "name": "Ruby", "bytes": "2626" }, { "name": "Shell", "bytes": "39720" } ], "symlink_target": "" }
import sys from setuptools import setup, find_packages from fabric_encrypt.version import get_version with open("README.rst") as f: readme = f.read() long_description = readme install_requires = ["fab-classic==1.18.0"] setup( name="Fabric-encrypt", version=get_version("short"), description="Fabric-encrypt - securely store your production secrets.", long_description=long_description, author="Jervis Whitley", author_email="[email protected]", packages=find_packages(), install_requires=install_requires, classifiers=[ "Development Status :: 3 - Alpha", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Operating System :: MacOS :: MacOS X", "Operating System :: Unix", "Operating System :: POSIX", "Programming Language :: Python", "Programming Language :: Python :: 3.8", "Topic :: Software Development", "Topic :: Software Development :: Build Tools", "Topic :: Software Development :: Libraries", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: System :: Clustering", "Topic :: System :: Software Distribution", "Topic :: System :: Systems Administration", ], )
{ "content_hash": "9528ff6d3f650ad25b7bb01b31916afe", "timestamp": "", "source": "github", "line_count": 44, "max_line_length": 75, "avg_line_length": 31.545454545454547, "alnum_prop": 0.6340057636887608, "repo_name": "jtrain/fabric-encrypt", "id": "cbbed7965cbf6dd71d4113bce57665e1f83ee9ba", "size": "1412", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "setup.py", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Python", "bytes": "7108" } ], "symlink_target": "" }
[![Build Status](https://travis-ci.org/iisador/jBTCE.svg?branch=master)](https://travis-ci.org/iisador/jBTCE) [![Coverage Status](https://coveralls.io/repos/github/iisador/jBTCE/badge.svg?branch=master)](https://coveralls.io/github/iisador/jBTCE?branch=master) jBTCE ===== --- NOTE: Uses java version >= 1.8 --- Brief description -------------------- Trade api wrapper to www.btc-e.com. Provides full access to public and private API. Features ======== * Mirrors support (https://btc-e.com, https://btc-e.nz) * Public v2 API: [ticker](https://btc-e.com/api/2/btc_usd/ticker), [trades](https://btc-e.com/api/2/btc_usd/trades), [depth](https://btc-e.com/api/2/btc_usd/depth), [fee](https://btc-e.com/api/2/btc_usd/fee) * Public v3 API: [ticker](https://btc-e.com/api/3/ticker/btc_usd-btc-rur), [trades](https://btc-e.com/api/3/trades/btc_usd-btc-rur), [depth](https://btc-e.com/api/3/depth/btc_usd-btc-rur), [fee](https://btc-e.com/api/3/fee/btc_usd-btc-rur), [info](https://btc-e.com/api/3/info) * Private API - getInfo - OrderList - TransHistory - TradeHistory - CancelOrder - Trade How-To ====== use Public v2 api: ```java // Create public api using default connector PublicApiV2 api = new PublicApiV2(); Tick tick = api.getTick(BTC_USD); System.out.println(tick); ``` use Public v3 api: ```java // Create public api using default connector PublicApiV3 api = new PublicApiV3(); Map<Pair, Tick> ticks = api.getTicks(BTC_USD, BTC_RUR); ticks.entrySet().stream() .map(e -> String.format("%s: %s", e.getKey(), e.getValue())) .forEach(System.out::println); ``` or Private api: ```java String key = "..."; String secret = "..."; // Create private api using default connector PrivateApi api = new PrivateApi(key, secret); UserInfo info = api.getUserInfo(); System.out.println(info); ``` Next release goals: * Nonce fix
{ "content_hash": "682c194ae8d067bda709634ac27134d5", "timestamp": "", "source": "github", "line_count": 61, "max_line_length": 277, "avg_line_length": 30.852459016393443, "alnum_prop": 0.6657810839532412, "repo_name": "iisador/jBTCE", "id": "87a2fa875261a778c0567905915f754514103bb0", "size": "1882", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "200558" } ], "symlink_target": "" }
using System; namespace Storm.Mvvm.Navigation { [AttributeUsage(AttributeTargets.Property, Inherited = true, AllowMultiple = false)] public sealed class NavigationParameterAttribute : Attribute { public NavigationParameterAttribute() { Name = null; Mode = NavigationParameterMode.Required; } public NavigationParameterAttribute(string name) { Name = name; Mode = NavigationParameterMode.Required; } public string Name { get; set; } public NavigationParameterMode Mode { get; set; } } }
{ "content_hash": "38c100aa5aedbb85c50034c92b6decac", "timestamp": "", "source": "github", "line_count": 24, "max_line_length": 85, "avg_line_length": 21.75, "alnum_prop": 0.7413793103448276, "repo_name": "Julien-Mialon/StormXamarin", "id": "26878fdc506cce224972179f0ace3c40882da038", "size": "524", "binary": false, "copies": "2", "ref": "refs/heads/develop", "path": "StormXamarin/Storm.Mvvm/Navigation/NavigationParameterAttribute.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "621713" }, { "name": "Lex", "bytes": "1136" }, { "name": "PHP", "bytes": "4310" }, { "name": "Shell", "bytes": "1988" }, { "name": "Yacc", "bytes": "1526" } ], "symlink_target": "" }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1"> <title>graph-basics: Not compatible 👼</title> <link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" /> <link href="../../../../../bootstrap.min.css" rel="stylesheet"> <link href="../../../../../bootstrap-custom.css" rel="stylesheet"> <link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet"> <script src="../../../../../moment.min.js"></script> <!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries --> <!-- WARNING: Respond.js doesn't work if you view the page via file:// --> <!--[if lt IE 9]> <script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script> <script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script> <![endif]--> </head> <body> <div class="container"> <div class="navbar navbar-default" role="navigation"> <div class="container-fluid"> <div class="navbar-header"> <a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a> </div> <div id="navbar" class="collapse navbar-collapse"> <ul class="nav navbar-nav"> <li><a href="../..">clean / released</a></li> <li class="active"><a href="">8.14.1 / graph-basics - 8.8.0</a></li> </ul> </div> </div> </div> <div class="article"> <div class="row"> <div class="col-md-12"> <a href="../..">« Up</a> <h1> graph-basics <small> 8.8.0 <span class="label label-info">Not compatible 👼</span> </small> </h1> <p>📅 <em><script>document.write(moment("2022-05-17 07:36:45 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2022-05-17 07:36:45 UTC)</em><p> <h2>Context</h2> <pre># Packages matching: installed # Name # Installed # Synopsis base-bigarray base base-threads base base-unix base conf-findutils 1 Virtual package relying on findutils conf-gmp 4 Virtual package relying on a GMP lib system installation coq 8.14.1 Formal proof management system dune 3.1.1 Fast, portable, and opinionated build system ocaml 4.14.0 The OCaml compiler (virtual package) ocaml-base-compiler 4.14.0 Official release 4.14.0 ocaml-config 2 OCaml Switch Configuration ocaml-options-vanilla 1 Ensure that OCaml is compiled with no special options enabled ocamlfind 1.9.3 A library manager for OCaml zarith 1.12 Implements arithmetic and logical operations over arbitrary-precision integers # opam file: opam-version: &quot;2.0&quot; maintainer: &quot;[email protected]&quot; homepage: &quot;https://github.com/coq-contribs/graph-basics&quot; license: &quot;LGPL 2.1&quot; build: [make &quot;-j%{jobs}%&quot;] install: [make &quot;install&quot;] remove: [&quot;rm&quot; &quot;-R&quot; &quot;%{lib}%/coq/user-contrib/GraphBasics&quot;] depends: [ &quot;ocaml&quot; &quot;coq&quot; {&gt;= &quot;8.8&quot; &amp; &lt; &quot;8.9~&quot;} ] tags: [ &quot;keyword: graph theory&quot; &quot;keyword: Curry-Howard&#39;s isomorphism&quot; &quot;keyword: inductive definitions&quot; &quot;category: Mathematics/Combinatorics and Graph Theory&quot; &quot;date: April 2001&quot; ] authors: [ &quot;Jean Duprat&quot; ] bug-reports: &quot;https://github.com/coq-contribs/graph-basics/issues&quot; dev-repo: &quot;git+https://github.com/coq-contribs/graph-basics.git&quot; synopsis: &quot;A Coq toolkit for graph theory&quot; description: &quot;&quot;&quot; This library offers inductive definitions of basics in graph theory. The goal is to offer the possibility to write proofs and programs on graphs in the same formalism : the Coq language. It now contains : vertices, arcs, edges, degrees, graphs, directed graphs, paths, acyclic graphs, connected graphs and tree.&quot;&quot;&quot; flags: light-uninstall url { src: &quot;https://github.com/coq-contribs/graph-basics/archive/v8.8.0.tar.gz&quot; checksum: &quot;md5=b5d085350550d24babbaa1cac024df60&quot; } </pre> <h2>Lint</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Dry install 🏜️</h2> <p>Dry install with the current Coq version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam install -y --show-action coq-graph-basics.8.8.0 coq.8.14.1</code></dd> <dt>Return code</dt> <dd>5120</dd> <dt>Output</dt> <dd><pre>[NOTE] Package coq is already installed (current version is 8.14.1). The following dependencies couldn&#39;t be met: - coq-graph-basics -&gt; coq &lt; 8.9~ -&gt; ocaml &lt; 4.10 base of this switch (use `--unlock-base&#39; to force) No solution found, exiting </pre></dd> </dl> <p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-graph-basics.8.8.0</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Install dependencies</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Install 🚀</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Installation size</h2> <p>No files were installed.</p> <h2>Uninstall 🧹</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Missing removes</dt> <dd> none </dd> <dt>Wrong removes</dt> <dd> none </dd> </dl> </div> </div> </div> <hr/> <div class="footer"> <p class="text-center"> Sources are on <a href="https://github.com/coq-bench">GitHub</a> © Guillaume Claret 🐣 </p> </div> </div> <script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script> <script src="../../../../../bootstrap.min.js"></script> </body> </html>
{ "content_hash": "33636aee03d166d2cc99ba384b18d316", "timestamp": "", "source": "github", "line_count": 169, "max_line_length": 232, "avg_line_length": 43.15976331360947, "alnum_prop": 0.5512750205648478, "repo_name": "coq-bench/coq-bench.github.io", "id": "97448b49ed9cb30fdc9b1ccbabcfb3dbe0bcfa35", "size": "7319", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "clean/Linux-x86_64-4.14.0-2.0.10/released/8.14.1/graph-basics/8.8.0.html", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
<?php namespace PlaygroundGame\Service\Factory; use PlaygroundGame\Controller\Frontend\TradingCardController; use Laminas\ServiceManager\Factory\FactoryInterface; use Interop\Container\ContainerInterface; class FrontendTradingCardControllerFactory implements FactoryInterface { public function __invoke(ContainerInterface $container, $requestedName, $options = null) { $controller = new TradingCardController($container); return $controller; } }
{ "content_hash": "7eb767a3b3ed96d627ec312552ff237e", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 92, "avg_line_length": 29.8125, "alnum_prop": 0.790356394129979, "repo_name": "gregorybesson/PlaygroundGame", "id": "2cf623baedfc7e6ca8a931c4215c3a88b409a4cb", "size": "477", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "src/Service/Factory/FrontendTradingCardControllerFactory.php", "mode": "33188", "license": "mit", "language": [ { "name": "ASP.NET", "bytes": "1305" }, { "name": "CSS", "bytes": "137" }, { "name": "HTML", "bytes": "415768" }, { "name": "JavaScript", "bytes": "1189938" }, { "name": "PHP", "bytes": "1940617" } ], "symlink_target": "" }
import { Component, OnInit, Input, Output, EventEmitter } from '@angular/core'; import { ItemState } from '../shared/item-state'; import { Item } from '../shared/item'; @Component({ selector: 'app-item', templateUrl: './item.component.html', styleUrls: ['./item.component.css'] }) export class ItemComponent implements OnInit { @Input() item: Item; @Input() index: number; @Output() onDone = new EventEmitter<ItemState>(); constructor() { } ngOnInit() { } toggleDone() { this.item.done = !this.item.done; const itemState: ItemState = { done: this.item.done, index: this.index } this.onDone.emit(itemState); } }
{ "content_hash": "4a6cd69c95bff3dda70e340ffb236227", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 79, "avg_line_length": 21.225806451612904, "alnum_prop": 0.6504559270516718, "repo_name": "railsstudent/angular-30", "id": "7d4a3bcd2cf2db1ac0bac6ae388051116f7816a6", "size": "658", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Day15-Localstorage/src/app/item/item.component.ts", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "29021" }, { "name": "HTML", "bytes": "55162" }, { "name": "JavaScript", "bytes": "44196" }, { "name": "TypeScript", "bytes": "217357" } ], "symlink_target": "" }
PROJECT_NAME=$1 GIT_TARGET_URL=$2 GIT_TARGET_BRANCH=$3 GIT_TARGET_SSH_KEY=$4 BASE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" WORKING_PATH="$BASE_DIR/repos/$PROJECT_NAME/${GIT_TARGET_BRANCH}" export GIT_SSH_KEY="$GIT_TARGET_SSH_KEY" export GIT_SSH="$BASE_DIR/git-ssh-helper.sh" if [ -d "$WORKING_PATH" ]; then echo "Pushing Git data to $GIT_TARGET_URL..." pushd "$WORKING_PATH" git remote remove bridge git remote add bridge $GIT_TARGET_URL git push bridge $GIT_TARGET_BRANCH:$GIT_TARGET_BRANCH -f popd fi
{ "content_hash": "502ff81c7f1fe4135a96b13e9f15296e", "timestamp": "", "source": "github", "line_count": 20, "max_line_length": 65, "avg_line_length": 27.9, "alnum_prop": 0.6559139784946236, "repo_name": "larsxschneider/git-p4-bridge", "id": "8aed9a2b07f5fae35b9d18ad1dde59aa7d3081ba", "size": "728", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "push-git-via-ssh.sh", "mode": "33261", "license": "bsd-2-clause", "language": [ { "name": "Shell", "bytes": "5578" } ], "symlink_target": "" }
define(function () { 'use strict'; return { local: { path: 'text!tests/assets/text/local.html', result: '<!--\n/**\n * Copyright © 2016 Magento. All rights reserved.\n * See COPYING.txt for license details.\n */\n-->\n<span>Local Template</span>' }, external: { path: 'text!tests/assets/text/external.html', result: '<!--\n/**\n * Copyright © 2016 Magento. All rights reserved.\n * See COPYING.txt for license details.\n */\n-->\n<span>External Template</span>' } }; });
{ "content_hash": "90e50a5f92a5366776a3f39832d458b3", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 165, "avg_line_length": 37.666666666666664, "alnum_prop": 0.552212389380531, "repo_name": "enettolima/magento-training", "id": "d20ecb0c87f40962b8a03b9dfd036663baf5b0dc", "size": "665", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "magento2ce/dev/tests/js/jasmine/assets/text/config.js", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "22648" }, { "name": "CSS", "bytes": "3382928" }, { "name": "HTML", "bytes": "8749335" }, { "name": "JavaScript", "bytes": "7355635" }, { "name": "PHP", "bytes": "58607662" }, { "name": "Perl", "bytes": "10258" }, { "name": "Shell", "bytes": "41887" }, { "name": "XSLT", "bytes": "19889" } ], "symlink_target": "" }
"""misc functions that have no real home.""" import logging import gzip from typing import Tuple, Iterator, Any from collections import defaultdict def init_logging(log): log.setLevel(logging.INFO) ch = logging.StreamHandler() ch.setLevel(logging.INFO) log.addHandler(ch) def try_len(o: Any) -> int: """ Return len of `o` or None if `o` doesn't support len >>> try_len([1, 2]) 2 >>> try_len(print) >>> try_len(None) """ if not o: return None try: return len(o) except TypeError: return None def parse_version(version: str) -> tuple: """Parse a string formatted X.Y.Z version number into a tuple >>> parse_version('10.2.3') (10, 2, 3) """ if not version: return None major, minor, patch = version.split('.', maxsplit=3) return (int(major), int(minor), int(patch)) def parse_table(fq_table: str) -> Tuple[str, str]: """Parse a tablename into tuple(<schema>, <table>). Schema defaults to doc if the table name doesn't contain a schema. >>> parse_table('x.users') ('x', 'users') >>> parse_table('users') ('doc', 'users') """ parts = fq_table.split('.') if len(parts) == 1: return 'doc', parts[0] elif len(parts) == 2: return parts[0], parts[1] else: raise ValueError def as_bulk_queries(queries, bulk_size): """Group a iterable of (stmt, args) by stmt into (stmt, bulk_args). bulk_args will be a list of the args grouped by stmt. len(bulk_args) will be <= bulk_size """ stmt_dict = defaultdict(list) for stmt, args in queries: bulk_args = stmt_dict[stmt] bulk_args.append(args) if len(bulk_args) == bulk_size: yield stmt, bulk_args del stmt_dict[stmt] for stmt, bulk_args in stmt_dict.items(): yield stmt, bulk_args def get_lines(filename: str) -> Iterator[str]: """Create an iterator that returns the lines of a utf-8 encoded file.""" if filename.endswith('.gz'): with gzip.open(filename, 'r') as f: for line in f: yield line.decode('utf-8') else: with open(filename, 'r', encoding='utf-8') as f: for line in f: yield line def as_statements(lines: Iterator[str]) -> Iterator[str]: """Create an iterator that transforms lines into sql statements. Statements within the lines must end with ";" The last statement will be included even if it does not end in ';' >>> list(as_statements(['select * from', '-- comments are filtered', 't;'])) ['select * from t'] >>> list(as_statements(['a;', 'b', 'c;', 'd', ' '])) ['a', 'b c', 'd'] """ lines = (l.strip() for l in lines if l) lines = (l for l in lines if l and not l.startswith('--')) parts = [] for line in lines: parts.append(line.rstrip(';')) if line.endswith(';'): yield ' '.join(parts) parts.clear() if parts: yield ' '.join(parts) def break_iterable(iterable, pred): """Break a iterable on the item that matches the predicate into lists. The item that matched the predicate is not included in the result. >>> list(break_iterable([1, 2, 3, 4], lambda x: x == 3)) [[1, 2], [4]] """ sublist = [] for i in iterable: if pred(i): yield sublist sublist = [] else: sublist.append(i) yield sublist
{ "content_hash": "dc0ea3f63d3e055014d8c7b99c0701f8", "timestamp": "", "source": "github", "line_count": 134, "max_line_length": 80, "avg_line_length": 26.111940298507463, "alnum_prop": 0.5724492712203487, "repo_name": "mikethebeer/cr8", "id": "898661323de74a4142134e799c2728a3318618ed", "size": "3499", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "cr8/misc.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "107689" } ], "symlink_target": "" }
module Jasonette class Base include Properties attr_reader :context attr_reader :attributes def initialize context @context = context @attributes = {} encode(&::Proc.new) if ::Kernel.block_given? end # Fixed for below error : # IOError - not opened for reading: # activesupport (5.0.1) lib/active_support/core_ext/object/json.rb:130:in `as_json' # Eventually called by multi_json/adapter.rb:25:in `dump' def as_json(options = nil) attributes! end def encode binding = eval "self", ::Proc.new.binding if (binding.method(:encode).parameters.first.include?(:req) rescue false) binding.encode(self, &::Proc.new) else instance_eval(&::Proc.new) end self end def empty? properties_empty? && @attributes.empty? end def klass name set! "class", name end alias :css_class :klass def _method name = nil if block_given? set! 'method', _scope { yield } else set! 'method', name end end alias :action_method :_method def with_attributes @attributes.merge! _scope { yield self } self end def inline json @attributes.merge! JSON.parse(json) self end def attributes! merge_properties @attributes end def set! key, value=nil, *args result = if ::Kernel.block_given? if !_blank?(value) # comments @post.comments { |comment| ... } # { "comments": [ { ... }, { ... } ] } _scope{ array! value, &::Proc.new } else # comments { ... } # { "comments": ... } _merge_block(key){ yield self } end elsif args.empty? if _is_collection?(value) || Jasonette::Base === value # person another_jasonette # { "person": { ... } } # comments [ { content: "...", created_at: "..." } ] # { "comments": [ { "content": "hello", "created_at": "..." } ] } # comments { content: "...", created_at: "..." } # { "comments": [ { "content": "hello", "created_at": "..." } ] } _scope{ merge! value } else _key(value) end elsif _is_collection?(value) # comments @post.comments, :content, :created_at # { "comments": [ { "content": "hello", "created_at": "..." }, { "content": "world", "created_at": "..." } ] } _scope{ array! value, *args } else # author @post.creator, :name, :email_address # { "author": { "name": "David", "email_address": "[email protected]" } } _merge_block(key){ extract! value, *args } end _set_key_value key, result self end def array! collection = [], *args array = if collection.nil? [] elsif ::Kernel.block_given? _map_collection(collection, &::Proc.new) else _map_collection(collection) { |element| extract! element, *args } end merge! array end def extract! object, *attributes if ::Hash === object _extract_hash_values(object, attributes) elsif Jasonette::Base === object _extract_hash_values(object.attributes!, attributes) else _extract_method_values(object, attributes) end end def merge! key case key when Jasonette::Base merge! key.attributes! when Hash key.each{ |key, value| set! _key(key), value } when Array _set_value key end @attributes end private def implicit_set! name, *args, &block if property_names.include? name with_attributes { property_set! name, *args, &block } else set!(name) { encode(&block) } end end def attr_value name if property_names.include? name instance_variable_get :"@#{name}" else @attributes[name.to_s] end end def method_missing name, *args, &block if ::Kernel.block_given? implicit_set! name, *args, &block else if property_names.include? name return property_get! name else if args.present? set! name, *args else raise NoMethodError, "undefined method `#{name}`" end end end end def _extract_hash_values(object, attributes) if attributes.blank? object.each{ |key, value| _set_key_value key, value } else attributes.each{ |key| _set_key_value key, object.fetch(key) } end end def _extract_method_values(object, attributes) if attributes.blank? _set_value object else attributes.each{ |key| _set_key_value key, object.public_send(key) } end end def _merge_block(key) current_value = @attributes.fetch(_key(key), {}) new_value = _scope{ yield self } _merge_values(current_value, new_value) end def _merge_values(current_value, updates) if _blank?(updates) current_value elsif _blank?(current_value) || updates.nil? || current_value.empty? && ::Array === updates updates elsif ::Array === current_value && ::Array === updates current_value + updates elsif ::Hash === current_value && ::Hash === updates current_value.merge(updates) else raise "MergeError" end end def _key(key) key.to_s end def _set_key_value(key, value) raise "ArrayError" if ::Array === @attributes @attributes[_key(key)] = value unless _blank?(value) end def _set_value(value) raise "HashError" if ::Hash === @attributes && !_blank? @attributes = value unless _blank?(value) end def _map_collection(collection) collection.map do |element| _scope{ yield element } end # - [BLANK] end def _scope parent_attributes = @attributes @attributes = {} yield @attributes ensure @attributes = parent_attributes end def _is_collection?(object) _object_respond_to?(object, :map, :count) # && NON_ENUMERABLES.none?{ |klass| klass === object } end def _blank?(value=@attributes) value.nil? ? true : value.blank? end def _object_respond_to?(object, *methods) methods.all?{ |m| object.respond_to?(m) } end end end
{ "content_hash": "4d33e873a2d738f131fc63d830e5ea57", "timestamp": "", "source": "github", "line_count": 249, "max_line_length": 118, "avg_line_length": 25.823293172690764, "alnum_prop": 0.5528771384136858, "repo_name": "mwlang/jasonette-rails", "id": "0fa288ea94802f52c32f2f322629c380af22749b", "size": "6430", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/jasonette/core/base.rb", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "1941" }, { "name": "HTML", "bytes": "6760" }, { "name": "JavaScript", "bytes": "1136" }, { "name": "Ruby", "bytes": "137915" } ], "symlink_target": "" }
/* -*- mode: C -*- */ /* vim:set ts=2 sw=2 sts=2 et: */ /* IGraph library. Copyright (C) 2007-2012 Gabor Csardi <[email protected]> 334 Harvard street, Cambridge, MA 02139 USA This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ #ifndef LAPACK_INTERNAL_H #define LAPACK_INTERNAL_H /* Note: only files calling the LAPACK routines directly need to include this header. */ #include "igraph_types.h" #include "config.h" #ifndef INTERNAL_LAPACK #define igraphdgeevx_ dgeevx_ #define igraphdgeev_ dgeev_ #define igraphdgebak_ dgebak_ #define igraphxerbla_ xerbla_ #define igraphdgebal_ dgebal_ #define igraphdisnan_ disnan_ #define igraphdlaisnan_ dlaisnan_ #define igraphdgehrd_ dgehrd_ #define igraphdgehd2_ dgehd2_ #define igraphdlarf_ dlarf_ #define igraphiladlc_ iladlc_ #define igraphiladlr_ iladlr_ #define igraphdlarfg_ dlarfg_ #define igraphdlapy2_ dlapy2_ #define igraphdlahr2_ dlahr2_ #define igraphdlacpy_ dlacpy_ #define igraphdlarfb_ dlarfb_ #define igraphilaenv_ ilaenv_ #define igraphieeeck_ ieeeck_ #define igraphiparmq_ iparmq_ #define igraphdhseqr_ dhseqr_ #define igraphdlahqr_ dlahqr_ #define igraphdlabad_ dlabad_ #define igraphdlanv2_ dlanv2_ #define igraphdlaqr0_ dlaqr0_ #define igraphdlaqr3_ dlaqr3_ #define igraphdlaqr4_ dlaqr4_ #define igraphdlaqr2_ dlaqr2_ #define igraphdlaset_ dlaset_ #define igraphdormhr_ dormhr_ #define igraphdormqr_ dormqr_ #define igraphdlarft_ dlarft_ #define igraphdorm2r_ dorm2r_ #define igraphdtrexc_ dtrexc_ #define igraphdlaexc_ dlaexc_ #define igraphdlange_ dlange_ #define igraphdlassq_ dlassq_ #define igraphdlarfx_ dlarfx_ #define igraphdlartg_ dlartg_ #define igraphdlasy2_ dlasy2_ #define igraphdlaqr5_ dlaqr5_ #define igraphdlaqr1_ dlaqr1_ #define igraphdlascl_ dlascl_ #define igraphdorghr_ dorghr_ #define igraphdorgqr_ dorgqr_ #define igraphdorg2r_ dorg2r_ #define igraphdtrevc_ dtrevc_ #define igraphdlaln2_ dlaln2_ #define igraphdladiv_ dladiv_ #define igraphdsyevr_ dsyevr_ #define igraphdlansy_ dlansy_ #define igraphdormtr_ dormtr_ #define igraphdormql_ dormql_ #define igraphdorm2l_ dorm2l_ #define igraphdstebz_ dstebz_ #define igraphdlaebz_ dlaebz_ #define igraphdstein_ dstein_ #define igraphdlagtf_ dlagtf_ #define igraphdlagts_ dlagts_ #define igraphdlarnv_ dlarnv_ #define igraphdlaruv_ dlaruv_ #define igraphdstemr_ dstemr_ #define igraphdlae2_ dlae2_ #define igraphdlaev2_ dlaev2_ #define igraphdlanst_ dlanst_ #define igraphdlarrc_ dlarrc_ #define igraphdlarre_ dlarre_ #define igraphdlarra_ dlarra_ #define igraphdlarrb_ dlarrb_ #define igraphdlaneg_ dlaneg_ #define igraphdlarrd_ dlarrd_ #define igraphdlarrk_ dlarrk_ #define igraphdlasq2_ dlasq2_ #define igraphdlasq3_ dlasq3_ #define igraphdlasq4_ dlasq4_ #define igraphdlasq5_ dlasq5_ #define igraphdlasq6_ dlasq6_ #define igraphdlasrt_ dlasrt_ #define igraphdlarrj_ dlarrj_ #define igraphdlarrr_ dlarrr_ #define igraphdlarrv_ dlarrv_ #define igraphdlar1v_ dlar1v_ #define igraphdlarrf_ dlarrf_ #define igraphdsterf_ dsterf_ #define igraphdsytrd_ dsytrd_ #define igraphdlatrd_ dlatrd_ #define igraphdsytd2_ dsytd2_ #define igraphdlanhs_ dlanhs_ #define igraphdgeqr2_ dgeqr2_ #define igraphdtrsen_ dtrsen_ #define igraphdlacn2_ dlacn2_ #define igraphdtrsyl_ dtrsyl_ #define igraphdlasr_ dlasr_ #define igraphdsteqr_ dsteqr_ #define igraphdgesv_ dgesv_ #define igraphdgetrf_ dgetrf_ #define igraphdgetf2_ dgetf2_ #define igraphdlaswp_ dlaswp_ #define igraphdgetrs_ dgetrs_ #define igraphlen_trim_ len_trim_ #define igraph_dlamc1_ dlamc1_ #define igraph_dlamc2_ dlamc2_ #define igraph_dlamc3_ dlamc3_ #define igraph_dlamc4_ dlamc4_ #define igraph_dlamc5_ dlamc5_ #endif int igraphdgetrf_(int *m, int *n, igraph_real_t *a, int *lda, int *ipiv, int *info); int igraphdgetrs_(char *trans, int *n, int *nrhs, igraph_real_t *a, int *lda, int *ipiv, igraph_real_t *b, int *ldb, int *info); int igraphdgesv_(int *n, int *nrhs, igraph_real_t *a, int *lda, int *ipiv, igraph_real_t *b, int *ldb, int *info); igraph_real_t igraphdlapy2_(igraph_real_t *x, igraph_real_t *y); int igraphdsyevr_(char *jobz, char *range, char *uplo, int *n, igraph_real_t *a, int *lda, igraph_real_t *vl, igraph_real_t *vu, int * il, int *iu, igraph_real_t *abstol, int *m, igraph_real_t *w, igraph_real_t *z, int *ldz, int *isuppz, igraph_real_t *work, int *lwork, int *iwork, int *liwork, int *info); int igraphdgeev_(char *jobvl, char *jobvr, int *n, igraph_real_t *a, int *lda, igraph_real_t *wr, igraph_real_t *wi, igraph_real_t *vl, int *ldvl, igraph_real_t *vr, int *ldvr, igraph_real_t *work, int *lwork, int *info); int igraphdgeevx_(char *balanc, char *jobvl, char *jobvr, char *sense, int *n, igraph_real_t *a, int *lda, igraph_real_t *wr, igraph_real_t *wi, igraph_real_t *vl, int *ldvl, igraph_real_t *vr, int *ldvr, int *ilo, int *ihi, igraph_real_t *scale, igraph_real_t *abnrm, igraph_real_t *rconde, igraph_real_t *rcondv, igraph_real_t *work, int *lwork, int *iwork, int *info); int igraphdgehrd_(int *n, int *ilo, int *ihi, igraph_real_t *A, int *lda, igraph_real_t *tau, igraph_real_t *work, int *lwork, int *info); #endif
{ "content_hash": "ba664f4e2b69d9835ffabfc336bd37ee", "timestamp": "", "source": "github", "line_count": 178, "max_line_length": 73, "avg_line_length": 32.87078651685393, "alnum_prop": 0.7378225944283029, "repo_name": "hlzz/dotfiles", "id": "1466b9a2fa8c4d12c99e4108662446c64b9a47f8", "size": "5851", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "science/igraph-0.7.1/src/igraph_lapack_internal.h", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "AppleScript", "bytes": "1240" }, { "name": "Arc", "bytes": "38" }, { "name": "Assembly", "bytes": "449468" }, { "name": "Batchfile", "bytes": "16152" }, { "name": "C", "bytes": "102303195" }, { "name": "C++", "bytes": "155056606" }, { "name": "CMake", "bytes": "7200627" }, { "name": "CSS", "bytes": "179330" }, { "name": "Cuda", "bytes": "30026" }, { "name": "D", "bytes": "2152" }, { "name": "Emacs Lisp", "bytes": "14892" }, { "name": "FORTRAN", "bytes": "5276" }, { "name": "Forth", "bytes": "3637" }, { "name": "GAP", "bytes": "14495" }, { "name": "GLSL", "bytes": "438205" }, { "name": "Gnuplot", "bytes": "327" }, { "name": "Groff", "bytes": "518260" }, { "name": "HLSL", "bytes": "965" }, { "name": "HTML", "bytes": "2003175" }, { "name": "Haskell", "bytes": "10370" }, { "name": "IDL", "bytes": "2466" }, { "name": "Java", "bytes": "219109" }, { "name": "JavaScript", "bytes": "1618007" }, { "name": "Lex", "bytes": "119058" }, { "name": "Lua", "bytes": "23167" }, { "name": "M", "bytes": "1080" }, { "name": "M4", "bytes": "292475" }, { "name": "Makefile", "bytes": "7112810" }, { "name": "Matlab", "bytes": "1582" }, { "name": "NSIS", "bytes": "34176" }, { "name": "Objective-C", "bytes": "65312" }, { "name": "Objective-C++", "bytes": "269995" }, { "name": "PAWN", "bytes": "4107117" }, { "name": "PHP", "bytes": "2690" }, { "name": "Pascal", "bytes": "5054" }, { "name": "Perl", "bytes": "485508" }, { "name": "Pike", "bytes": "1338" }, { "name": "Prolog", "bytes": "5284" }, { "name": "Python", "bytes": "16799659" }, { "name": "QMake", "bytes": "89858" }, { "name": "Rebol", "bytes": "291" }, { "name": "Ruby", "bytes": "21590" }, { "name": "Scilab", "bytes": "120244" }, { "name": "Shell", "bytes": "2266191" }, { "name": "Slash", "bytes": "1536" }, { "name": "Smarty", "bytes": "1368" }, { "name": "Swift", "bytes": "331" }, { "name": "Tcl", "bytes": "1911873" }, { "name": "TeX", "bytes": "11981" }, { "name": "Verilog", "bytes": "3893" }, { "name": "VimL", "bytes": "595114" }, { "name": "XSLT", "bytes": "62675" }, { "name": "Yacc", "bytes": "307000" }, { "name": "eC", "bytes": "366863" } ], "symlink_target": "" }
'use strict'; exports.__generic = function (t, a) { var iterator = t.call(this); a.deep(iterator.next(), { value: [0, '1'], done: false }); a.deep(iterator.next(), { value: [1, '2'], done: false }); a.deep(iterator.next(), { value: [2, '3'], done: false }); a.deep(iterator.next(), { value: undefined, done: true }); }; //# sourceMappingURL=shim-compiled.js.map
{ "content_hash": "ecb4b6cda0d8c4763885d3c64c7f6020", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 59, "avg_line_length": 33.45454545454545, "alnum_prop": 0.6086956521739131, "repo_name": "patelsan/fetchpipe", "id": "353f2c312f746d6c5f316e8e01c519ed9f3a163b", "size": "368", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "node_modules/es5-ext/test/array/#/entries/shim-compiled.js", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "15821" } ], "symlink_target": "" }
<bill session="110" type="h" number="4239" updated="2009-01-09T01:41:16-05:00"> <status><introduced date="1195102800" datetime="2007-11-15"/></status> <introduced date="1195102800" datetime="2007-11-15"/> <titles> <title type="short" as="introduced">Establishing the House Independent Commission on Standards Act</title> <title type="official" as="introduced">To establish a House ethics commission, and for other purposes.</title> </titles> <sponsor id="412194"/> <cosponsors> <cosponsor id="412229" joined="2007-11-15"/> </cosponsors> <actions> <action date="1195102800" datetime="2007-11-15"><text>Referred to the Committee on House Administration, and in addition to the Committee on Rules, for a period to be subsequently determined by the Speaker, in each case for consideration of such provisions as fall within the jurisdiction of the committee concerned.</text></action> <action date="1195102800" datetime="2007-11-15"><text>Referred to House House Administration</text></action> <action date="1195102800" datetime="2007-11-15"><text>Referred to House Rules</text></action> </actions> <committees> </committees> <relatedbills> </relatedbills> <subjects> </subjects> <amendments> </amendments> <summary> 11/15/2007--Introduced.<br/>Establishing The House Independent Commission on Standards Act - Establishes within the legislative branch The House Independent Commission on Standards. Amends Rule XI (Procedures of Committees and Unfinished Business) of the Rules of the House of Representatives to transfer to the Commission certain recommendations for administrative action and investigative duties of the Committee on Standards of Official Conduct regarding Members, Delegates, the Resident Commissioner, and House officers and employees.<br/> </summary> </bill>
{ "content_hash": "6b74a9bac92d3d25b50e8923d8302408", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 544, "avg_line_length": 54.90909090909091, "alnum_prop": 0.7649006622516556, "repo_name": "hashrocket/localpolitics.in", "id": "47c23496c9e6ecd56b1475d531263d516bfd1966", "size": "1812", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "public/govtrack/110_bills/h4239.xml", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "155887" }, { "name": "Ruby", "bytes": "147059" } ], "symlink_target": "" }
module Azure::ARM::Logic module Models # # Model object. # # class RecurrenceSchedule include MsRestAzure # @return [Array<Integer>] The minutes. attr_accessor :minutes # @return [Array<Integer>] The hours. attr_accessor :hours # @return [Array<DaysOfWeek>] The days of the week. attr_accessor :week_days # @return [Array<Integer>] The month days. attr_accessor :month_days # @return [Array<RecurrenceScheduleOccurrence>] The monthly occurrences. attr_accessor :monthly_occurrences # # Mapper for RecurrenceSchedule class as Ruby Hash. # This will be used for serialization/deserialization. # def self.mapper() { required: false, serialized_name: 'RecurrenceSchedule', type: { name: 'Composite', class_name: 'RecurrenceSchedule', model_properties: { minutes: { required: false, serialized_name: 'minutes', type: { name: 'Sequence', element: { required: false, serialized_name: 'NumberElementType', type: { name: 'Number' } } } }, hours: { required: false, serialized_name: 'hours', type: { name: 'Sequence', element: { required: false, serialized_name: 'NumberElementType', type: { name: 'Number' } } } }, week_days: { required: false, serialized_name: 'weekDays', type: { name: 'Sequence', element: { required: false, serialized_name: 'DaysOfWeekElementType', type: { name: 'Enum', module: 'DaysOfWeek' } } } }, month_days: { required: false, serialized_name: 'monthDays', type: { name: 'Sequence', element: { required: false, serialized_name: 'NumberElementType', type: { name: 'Number' } } } }, monthly_occurrences: { required: false, serialized_name: 'monthlyOccurrences', type: { name: 'Sequence', element: { required: false, serialized_name: 'RecurrenceScheduleOccurrenceElementType', type: { name: 'Composite', class_name: 'RecurrenceScheduleOccurrence' } } } } } } } end end end end
{ "content_hash": "5527a82b422afff07266d84a1894ec75", "timestamp": "", "source": "github", "line_count": 117, "max_line_length": 81, "avg_line_length": 28.82051282051282, "alnum_prop": 0.3858244365361803, "repo_name": "devigned/azure-sdk-for-ruby", "id": "8f27876bf5af06d9513a92de782b5556b258b913", "size": "3543", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "management/azure_mgmt_logic/lib/generated/azure_mgmt_logic/models/recurrence_schedule.rb", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Ruby", "bytes": "11168894" }, { "name": "Shell", "bytes": "157" } ], "symlink_target": "" }
/** * Module dependencies */ var Lifecycle = require('./helpers/lifecycle'), Uploader = require('./helpers/uploader'), _ = require('lodash'), util = require('util'), path = require('path'), assert = require('assert'), toValidateTheHTTPResponse = require('./helpers/toValidateTheHTTPResponse'), fsx = require('fs-extra'); // Fixtures var actionFixtures = { uploadAvatar: require('./fixtures/uploadAvatar.usingUploadMethod.action') }; describe('req.file(...).upload(...) ::', function() { var suite = Lifecycle(); before(suite.setup); after(suite.teardown); it('bind a file uploader action', function() { suite.app.post('/upload', actionFixtures.uploadAvatar); }); it('sends a multi-part file upload request', function(done) { // Builds an HTTP request var httpRequest = Uploader({ baseurl: 'http://localhost:3000' }, toValidateTheHTTPResponse(done)); // Attaches a multi-part form upload to the HTTP request., var form = httpRequest.form(); var pathToSmallFile = suite.srcFiles[0].path; form.append('avatar', fsx.createReadStream(pathToSmallFile)); }); it('should have uploaded a file to `suite.outputDir`', function(done) { // Check that a file landed adapter.ls(suite.outputDir.path, function (err, filesUploaded) { if (err) return done(err); assert(filesUploaded.length === 1); // Check that its contents are correct var srcFileContents = fsx.readFileSync(suite.srcFiles[0].path); adapter.read(filesUploaded[0], function (err, uploadedFileContents) { if (err) return done(err); assert(uploadedFileContents.toString() === srcFileContents.toString()); done(); }); }); }); });
{ "content_hash": "8f8b3f17bea76c1ec65f5ec1f0398083", "timestamp": "", "source": "github", "line_count": 66, "max_line_length": 79, "avg_line_length": 26.37878787878788, "alnum_prop": 0.6582423894313613, "repo_name": "listepo/skipper-adapter-tests", "id": "6c788aeac85c0fdef27d3f8aaebd632e4bb4e2c9", "size": "1741", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "tests/base/req.file.upload.test.js", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "35446" } ], "symlink_target": "" }
class Org::Settings::Crm::ImportStubsController < Org::OrgController before_action :load_crm def new @stub = @crm.import_stubs.build respond_to do |format| format.js { render 'new'} end end def create @stub = @crm.import_stubs.build(import_stub_params) respond_to do |format| format.js do if @stub.save render 'create_success' else render 'create_error' end end end end def destroy @stub = @crm.import_stubs.find(params[:id]) @stub.destroy respond_to do |format| format.js do render 'create_success' end end end private def import_stub_params params.require(:import_stub).permit(:donation_currency, :payment_account) end def load_crm @crm = current_organization.crm end end
{ "content_hash": "cec84652651cb9b9baa0f050b9352889", "timestamp": "", "source": "github", "line_count": 43, "max_line_length": 77, "avg_line_length": 19.3953488372093, "alnum_prop": 0.6187050359712231, "repo_name": "controlshift/prague-server", "id": "d899682ff2bf530820ab12a2b25150268a88bc9c", "size": "834", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/controllers/org/settings/crm/import_stubs_controller.rb", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "32002" }, { "name": "CoffeeScript", "bytes": "2601" }, { "name": "HTML", "bytes": "44025" }, { "name": "JavaScript", "bytes": "800" }, { "name": "Ruby", "bytes": "311383" }, { "name": "Shell", "bytes": "3768" } ], "symlink_target": "" }
using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Runtime.InteropServices.WindowsRuntime; using Windows.ApplicationModel; using Windows.ApplicationModel.Activation; using Windows.Foundation; using Windows.Foundation.Collections; using Windows.UI.Xaml; using Windows.UI.Xaml.Controls; using Windows.UI.Xaml.Controls.Primitives; using Windows.UI.Xaml.Data; using Windows.UI.Xaml.Input; using Windows.UI.Xaml.Media; using Windows.UI.Xaml.Navigation; namespace SketchClassifyDebugger { /// <summary> /// Provides application-specific behavior to supplement the default Application class. /// </summary> sealed partial class App : Application { /// <summary> /// Initializes the singleton application object. This is the first line of authored code /// executed, and as such is the logical equivalent of main() or WinMain(). /// </summary> public App() { this.InitializeComponent(); this.Suspending += OnSuspending; } /// <summary> /// Invoked when the application is launched normally by the end user. Other entry points /// will be used such as when the application is launched to open a specific file. /// </summary> /// <param name="e">Details about the launch request and process.</param> protected override void OnLaunched(LaunchActivatedEventArgs e) { #if DEBUG if (System.Diagnostics.Debugger.IsAttached) { this.DebugSettings.EnableFrameRateCounter = false; } #endif Frame rootFrame = Window.Current.Content as Frame; // Do not repeat app initialization when the Window already has content, // just ensure that the window is active if (rootFrame == null) { // Create a Frame to act as the navigation context and navigate to the first page rootFrame = new Frame(); rootFrame.NavigationFailed += OnNavigationFailed; if (e.PreviousExecutionState == ApplicationExecutionState.Terminated) { //TODO: Load state from previously suspended application } // Place the frame in the current Window Window.Current.Content = rootFrame; } if (e.PrelaunchActivated == false) { if (rootFrame.Content == null) { // When the navigation stack isn't restored navigate to the first page, // configuring the new page by passing required information as a navigation // parameter rootFrame.Navigate(typeof(MainPage), e.Arguments); } // Ensure the current window is active Window.Current.Activate(); } } /// <summary> /// Invoked when Navigation to a certain page fails /// </summary> /// <param name="sender">The Frame which failed navigation</param> /// <param name="e">Details about the navigation failure</param> void OnNavigationFailed(object sender, NavigationFailedEventArgs e) { throw new Exception("Failed to load Page " + e.SourcePageType.FullName); } /// <summary> /// Invoked when application execution is being suspended. Application state is saved /// without knowing whether the application will be terminated or resumed with the contents /// of memory still intact. /// </summary> /// <param name="sender">The source of the suspend request.</param> /// <param name="e">Details about the suspend request.</param> private void OnSuspending(object sender, SuspendingEventArgs e) { var deferral = e.SuspendingOperation.GetDeferral(); //TODO: Save application state and stop any background activity deferral.Complete(); } } }
{ "content_hash": "a8eab3a94740a3f6a1ace5ab4652282b", "timestamp": "", "source": "github", "line_count": 106, "max_line_length": 99, "avg_line_length": 38.698113207547166, "alnum_prop": 0.6128717698683569, "repo_name": "paultaele/srl-research-hub", "id": "7c41b37bd073976ff954cf7c13af0bc420c3dde3", "size": "4104", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "_old/SketchClassifyDebugger/SketchClassifyDebugger/App.xaml.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "578008" }, { "name": "PowerShell", "bytes": "197736" } ], "symlink_target": "" }
<div class="trainers-default-index"> <h1><?= $this->context->action->uniqueId ?></h1> <p> This is the view content for action "<?= $this->context->action->id ?>". The action belongs to the controller "<?= get_class($this->context) ?>" in the "<?= $this->context->module->id ?>" module. </p> <p> You may customize this page by editing the following file:<br> <code><?= __FILE__ ?></code> </p> </div>
{ "content_hash": "c5aa2cf0ccc08f78806715673ce12af8", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 80, "avg_line_length": 38.25, "alnum_prop": 0.5533769063180828, "repo_name": "wanyos2005/smartLearn", "id": "5b3990afb3dd6ac235afab274e0edb9d308a7a14", "size": "459", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "frontend/modules/trainers/views/default/index.php", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "ApacheConf", "bytes": "416" }, { "name": "Batchfile", "bytes": "1541" }, { "name": "CSS", "bytes": "819331" }, { "name": "HTML", "bytes": "3173935" }, { "name": "JavaScript", "bytes": "3657972" }, { "name": "PHP", "bytes": "377014" }, { "name": "Shell", "bytes": "423" } ], "symlink_target": "" }
```{include} _templates/nav.html ``` # censusbatchgeocoder A simple Python wrapper for the [U.S. Census Geocoding Services API batch service](https://www.documentcloud.org/documents/3894452-Census-Geocoding-Services-API.html) ```{contents} Table of contents :local: :depth: 2 ``` ## Installation ```bash pipenv install censusbatchgeocoder ``` ## Basic usage Importing the library ```python import censusbatchgeocoder ``` According to the [official Census documentation](https://www.documentcloud.org/documents/3894452-Census-Geocoding-Services-API.html), the input is expected to contain the following fields: * ``id``: Your unique identifier for the record * ``address``: Structure number and street name (required) * ``city``: City name (required) * ``state``: State (optional) * ``zipcode``: ZIP Code (optional) You can geocode a comma-delimited file from the filesystem. Results are returned as a list of dictionaries. An example could look like this: ```text id,address,city,state,zipcode 1,1600 Pennsylvania Ave NW,Washington,DC,20006 2,202 W. 1st Street,Los Angeles,CA,90012 ``` Which is then passed in like this: ```python results = censusbatchgeocoder.geocode("./my_file.csv") ``` The results are returned with the following columns from the Census * ``id``: The unique id provided with the record. * ``returned_address``: The address that was submitted to the geocoder. * ``geocoded_address``: The address of the match returned by the geocoder. * ``is_match``: Whether or not the geocoder found a match. * ``is_exact``: The precision of the match. * ``coordinates``: The longitude and latitude of the match together in a string. * ``longitude``: The longitude of the match as a float. * ``latitude``: The latitude of the match as a float. * ``tiger_line``: The Census TIGER line of the match. * ``side``: The side of the Census TIGER line of the match. * ``state_fips``: The FIPS state code identifying the state of the match. * ``county_fips``: The FIPS county code identifying the county of the match. * ``tract``: The Census tract of the match. * ``block``: The Census block of the match. ```python print(results) [ { "address": "1600 Pennsylvania Ave NW", "block": "1031", "city": "Washington", "coordinates": "-77.03535,38.898754", "county_fips": "001", "geocoded_address": "1600 Pennsylvania Ave NW, Washington, DC, 20006", "id": "1", "is_exact": "Non_Exact", "is_match": "Match", "latitude": 38.898754, "longitude": -77.03535, "returned_address": "1600 PENNSYLVANIA AVE NW, WASHINGTON, DC, 20502", "side": "L", "state": "DC", "state_fips": "11", "tiger_line": "76225813", "tract": "006202", "zipcode": "20006", }, { "address": "202 W. 1st Street", "block": "1034", "city": "Los Angeles", "coordinates": "-118.24456,34.053005", "county_fips": "037", "geocoded_address": "202 W. 1st Street, Los Angeles, CA, 90012", "id": "2", "is_exact": "Exact", "is_match": "Match", "latitude": 34.053005, "longitude": -118.24456, "returned_address": "202 W 1ST ST, LOS ANGELES, CA, 90012", "side": "L", "state": "CA", "state_fips": "06", "tiger_line": "141618115", "tract": "207400", "zipcode": "90012", }, ] ``` Any extra metadata fields included in the file are still present in the returned data. So the ``my_metadata`` column here... ```text id,address,city,state,zipcode,my_metadata 1,1600 Pennsylvania Ave NW,Washington,DC,20006,foo 2,202 W. 1st Street,Los Angeles,CA,90012,bar ``` .. is still there after you geocode. ```python censusbatchgeocoder.geocode("./my_file.csv") [ { "address": "1600 Pennsylvania Ave NW", "block": "1031", "city": "Washington", "coordinates": "-77.03535,38.898754", "county_fips": "001", "geocoded_address": "1600 Pennsylvania Ave NW, Washington, DC, 20006", "id": "1", "is_exact": "Non_Exact", "is_match": "Match", "latitude": 38.898754, "longitude": -77.03535, "returned_address": "1600 PENNSYLVANIA AVE NW, WASHINGTON, DC, 20502", "my_metadata": "foo", "side": "L", "state": "DC", "state_fips": "11", "tiger_line": "76225813", "tract": "006202", "zipcode": "20006", }, { "address": "202 W. 1st Street", "block": "1034", "city": "Los Angeles", "coordinates": "-118.24456,34.053005", "county_fips": "037", "geocoded_address": "202 W. 1st Street, Los Angeles, CA, 90012", "id": "2", "is_exact": "Exact", "is_match": "Match", "latitude": 34.053005, "longitude": -118.24456, "returned_address": "202 W 1ST ST, LOS ANGELES, CA, 90012", "my_metadata": "foo", "side": "L", "state": "CA", "state_fips": "06", "tiger_line": "141618115", "tract": "207400", "zipcode": "90012", }, ] ``` ### Custom column names If you have column headers that do not exactly match those expected by the geocoder you should override them. So a file like this: ```text foo,bar,baz,bada,boom 1,521 SWARTHMORE AVENUE,PACIFIC PALISADES,CA,90272-4350 2,2015 W TEMPLE STREET,LOS ANGELES,CA,90026-4913 ``` Can be mapped like this: ```python censusbatchgeocoder.geocode( self.weird_path, id="foo", address="bar", city="baz", state="bada", zipcode="boom" ) ``` ### Optional columns The state and ZIP Code columns are optional. If your data doesn't have them, pass ``None`` as keyword arguments. ```python censusbatchgeocoder.geocode("./my_file.csv", state=None, zipcode=None) ``` ### Lists of dictionaries A list of dictionaries, like those created by the csv module's ``DictReader`` can also be mapped. ```python my_list = [ { "address": "521 SWARTHMORE AVENUE", "city": "PACIFIC PALISADES", "id": "1", "state": "CA", "zipcode": "90272-4350", }, { "address": "2015 W TEMPLE STREET", "city": "LOS ANGELES", "id": "2", "state": "CA", "zipcode": "90026-4913", }, ] censusbatchgeocoder.geocode(my_list) ``` ### pandas DataFrames You can geocode a pandas DataFrame by converting it into a list of dictionaries. ```python result = censusbatchgeocoder.geocode(df.to_dict("records")) ``` Then convert it back into a DataFrame. ```python result_df = pd.DataFrame(result) ``` That's it. ### File objects You can also geocode an in-memory file object of data in CSV format. ```python my_data = """id,address,city,state,zipcode 1,1600 Pennsylvania Ave NW,Washington,DC,20006 2,202 W. 1st Street,Los Angeles,CA,90012""" censusbatchgeocoder.geocode(io.StringIO(my_data)) ``` ### Different encodings If you are using Python 2 and your CSV file has an unusual encoding that's causing problems, try explicitly passing in the encoding name. ```python censusbatchgeocoder.geocode("./my_file.csv", encoding="utf-8-sig") ``` ## Links * Issues: [github.com/datadesk/python-censusbatchgeocoder/issues](https://github.com/datadesk/censusbatchgeocoder/issues) * Packaging: [pypi.python.org/pypi/censusbatchgeocoder](https://pypi.python.org/pypi/censusbatchgeocoder)
{ "content_hash": "096d484438746611f05d1b6188a8dfe5", "timestamp": "", "source": "github", "line_count": 264, "max_line_length": 188, "avg_line_length": 28.08712121212121, "alnum_prop": 0.6302090357383682, "repo_name": "datadesk/python-censusbatchgeocoder", "id": "ae9db9562c16b5fb75d49fd188434d2f06500a06", "size": "7415", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "docs/index.md", "mode": "33188", "license": "mit", "language": [ { "name": "Makefile", "bytes": "175" }, { "name": "Python", "bytes": "13123" } ], "symlink_target": "" }
<!doctype html> <html class="no-js" lang=""> <head> <meta charset="utf-8"> <meta http-equiv="x-ua-compatible" content="ie=edge"> <title></title> <meta name="description" content=""> <meta name="viewport" content="width=device-width, initial-scale=1"> <link rel="apple-touch-icon" href="apple-touch-icon.png"> <!-- Place favicon.ico in the root directory --> <link rel="stylesheet" href="css/normalize.css"> <link rel="stylesheet" href="css/main.css"> <!--<script src="js/vendor/modernizr-2.8.3.min.js"></script>--> <!--<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.2/jquery.min.js"></script>--> <!--<script>window.jQuery || document.write('<script src="js/vendor/jquery-1.11.3.min.js"><\/script>')</script>--> <script src="js/vendor/jquery-1.11.3.min.js"></script> <script src="js/plugins.js"></script> <script src="js/main.js"></script> <script src="js/oscilloscope.js"></script> <!--<script src="js/fractal-machine.js"></script>--> </head> <body> <!--[if lt IE 8]> <p class="browserupgrade">You are using an <strong>outdated</strong> browser. Please <a href="http://browsehappy.com/">upgrade your browser</a> to improve your experience.</p> <![endif]--> <canvas id="demo-2d" width="450" height="300"></canvas> <button id="stop_anim">Pause</button> <script>oscilloscope.init();</script> <!-- Add your site or application content here --> <!--<canvas id="main" width="2048" height="2048" style="border:0px solid #000000;" onmousedown="fract.drag.down(event)"></canvas>--> <script> //fract.init(); </script> <!-- Google Analytics: change UA-XXXXX-X to be your site's ID. --> <!--<script>--> <!--(function(b,o,i,l,e,r){b.GoogleAnalyticsObject=l;b[l]||(b[l]=--> <!--function(){(b[l].q=b[l].q||[]).push(arguments)});b[l].l=+new Date;--> <!--e=o.createElement(i);r=o.getElementsByTagName(i)[0];--> <!--e.src='https://www.google-analytics.com/analytics.js';--> <!--r.parentNode.insertBefore(e,r)}(window,document,'script','ga'));--> <!--ga('create','UA-XXXXX-X','auto');ga('send','pageview');--> <!--</script>--> </body> </html>
{ "content_hash": "4ab179f3d126ce6cb5abfa07745a47a7", "timestamp": "", "source": "github", "line_count": 61, "max_line_length": 187, "avg_line_length": 39.36065573770492, "alnum_prop": 0.5593502707205331, "repo_name": "diafour/canvas-oscilloscope", "id": "6a2fa3ba90b188d298d954e0b069e0ba7020247a", "size": "2401", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "index.html", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "39693" }, { "name": "CSS", "bytes": "5760" }, { "name": "HTML", "bytes": "3673" }, { "name": "JavaScript", "bytes": "81234" } ], "symlink_target": "" }
package org.jboss.resteasy.test.asynch; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.jboss.arquillian.container.test.api.Deployment; import org.jboss.arquillian.container.test.api.RunAsClient; import org.jboss.arquillian.junit.Arquillian; import org.jboss.resteasy.category.ExpectedFailing; import org.jboss.resteasy.client.jaxrs.ResteasyClientBuilder; import org.jboss.resteasy.test.asynch.resource.JaxrsAsyncServletApp; import org.jboss.resteasy.test.asynch.resource.JaxrsAsyncServletAsyncResponseBlockingQueue; import org.jboss.resteasy.test.asynch.resource.JaxrsAsyncServletJaxrsResource; import org.jboss.resteasy.test.asynch.resource.JaxrsAsyncServletResource; import org.jboss.resteasy.test.asynch.resource.JaxrsAsyncServletServiceUnavailableExceptionMapper; import org.jboss.resteasy.test.asynch.resource.JaxrsAsyncServletTimeoutHandler; import org.jboss.resteasy.test.asynch.resource.JaxrsAsyncServletXmlData; import org.jboss.resteasy.test.asynch.resource.JaxrsAsyncServletPrintingErrorHandler; import org.jboss.resteasy.util.HttpResponseCodes; import org.jboss.resteasy.utils.PortProviderUtil; import org.jboss.shrinkwrap.api.Archive; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.spec.WebArchive; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import javax.ws.rs.client.AsyncInvoker; import javax.ws.rs.client.Client; import javax.ws.rs.client.ClientBuilder; import javax.ws.rs.client.Entity; import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import java.io.ByteArrayInputStream; import java.io.IOException; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Locale; import java.util.TimeZone; import java.util.concurrent.Future; /** * @tpSubChapter Asynchronous RESTEasy * @tpChapter Integration tests * @tpTestCaseDetails Test for asyncHttpServlet module. Check stage URL property. * @tpSince RESTEasy 3.0.16 */ @RunWith(Arquillian.class) @RunAsClient public class ComprehensiveJaxrsTest { protected static final Logger logger = LogManager.getLogger(ComprehensiveJaxrsTest.class.getName()); @Deployment public static Archive<?> createTestArchive() { WebArchive war = ShrinkWrap.create(WebArchive.class, AsyncServletTest.class.getSimpleName() + ".war"); war.addClasses(JaxrsAsyncServletXmlData.class, JaxrsAsyncServletAsyncResponseBlockingQueue.class, JaxrsAsyncServletJaxrsResource.class, JaxrsAsyncServletApp.class, JaxrsAsyncServletPrintingErrorHandler.class, JaxrsAsyncServletTimeoutHandler.class, JaxrsAsyncServletResource.class, JaxrsAsyncServletServiceUnavailableExceptionMapper.class, JaxrsAsyncServletXmlData.class); war.addAsWebInfResource(AsyncPostProcessingTest.class.getPackage(), "JaxrsAsyncServletWeb.xml", "web.xml"); return war; } private String generateURL(String path) { return PortProviderUtil.generateURL(path, AsyncServletTest.class.getSimpleName()); } protected Client client; @Before public void beforeTest() { client = new ResteasyClientBuilder().connectionPoolSize(10).build(); } @After public void afterTest() { client.close(); } protected static String objectsToString(Object... objects) { StringBuilder sb = new StringBuilder(); for (Object o : objects) { sb.append(o).append(" "); } return sb.toString().trim(); } public static void logMsg(Object... msg) { logger.info(objectsToString(msg)); } protected static void checkEquals(Object expected, Object actual, Object... msg) { Assert.assertEquals(objectsToString(msg), expected, actual); } public static final TimeZone findTimeZoneInDate(String date) { StringBuilder sb = new StringBuilder(); StringBuilder dateBuilder = new StringBuilder(date.trim()).reverse(); int index = 0; char c; while ((c = dateBuilder.charAt(index++)) != ' ') { sb.append(c); } TimeZone timezone = TimeZone.getTimeZone(sb.reverse().toString()); return timezone; } public static final DateFormat createDateFormat(TimeZone timezone) { SimpleDateFormat sdf = new SimpleDateFormat( "EEE, dd MMM yyyy HH:mm:ss z", Locale.US); sdf.setTimeZone(timezone); return sdf; } private void suspendResumeTest() throws Exception { invokeClear(); String expectedResponse = "Expected response"; Future<Response> suspend = invokeRequest("suspend"); Future<Response> resume = invokeRequest("resume?stage=0", expectedResponse); checkString(resume, JaxrsAsyncServletResource.TRUE); checkString(suspend, expectedResponse); } private void cancelVoidTest() throws Exception { invokeClear(); Future<Response> suspend = invokeRequest("suspend"); Future<Response> cancel = invokeRequest("cancelvoid?stage=0"); checktStatus(getResponse(suspend), Status.SERVICE_UNAVAILABLE); checkString(cancel, JaxrsAsyncServletResource.TRUE); } private void setTimeoutTest() throws Exception { invokeClear(); logMsg("here 1"); Future<Response> suspend = invokeRequest("suspend"); logMsg("here 2"); Future<Response> setTimeout = invokeRequest("settimeout?stage=0", 200); logMsg("here 3"); checktStatus(getResponse(setTimeout), Status.NO_CONTENT); logMsg("here 4"); // WebApplication exception with 503 is caught by ServiceUnavailableExceptionMapper Response fromMapper = getResponse(suspend); logMsg("here 5"); checktStatus(fromMapper, Status.REQUEST_TIMEOUT); String entity = fromMapper.readEntity(String.class); checkContains(entity, 503); logMsg("Found expected status 503"); } private void cancelDateTest() throws Exception { long milis = (System.currentTimeMillis() / 1000) * 1000 + 20000; invokeClear(); Future<Response> suspend = invokeRequest("suspend"); Future<Response> cancel = invokeRequest("canceldate?stage=0", milis); Response response = getResponse(suspend); checktStatus(response, Status.SERVICE_UNAVAILABLE); checkString(cancel, JaxrsAsyncServletResource.TRUE); String header = response.getHeaderString(HttpHeaders.RETRY_AFTER); TimeZone timezone = findTimeZoneInDate(header); Date retry = null; try { retry = createDateFormat(timezone).parse(header); } catch (ParseException e) { throw new Exception(e); } checkEquals(new Date(milis), retry, "Unexpected", HttpHeaders.RETRY_AFTER, "header value received", retry.getTime(), "expected", milis); logMsg("Found expected", HttpHeaders.RETRY_AFTER, "=", header); } private void cancelIntTest() throws Exception { String seconds = "20"; invokeClear(); Future<Response> suspend = invokeRequest("suspend"); Future<Response> cancel = invokeRequest("cancelretry?stage=0", seconds); Response response = getResponse(suspend); checktStatus(response, Status.SERVICE_UNAVAILABLE); checkString(cancel, JaxrsAsyncServletResource.TRUE); String retry = response.getHeaderString(HttpHeaders.RETRY_AFTER); checkEquals(seconds, retry, "Unexpected", HttpHeaders.RETRY_AFTER, "header value received", retry, "expected", seconds); logMsg("Found expected", HttpHeaders.RETRY_AFTER, "=", retry); } /** * @tpTestDetails Complex test. Check stage=0 and stage=1 values. * @tpSince RESTEasy 3.0.16 */ @Test @Category({ExpectedFailing.class}) // [RESTEASY-1446] FIXME public void complexTest() throws Exception { // cancelVoidTest { cancelVoidTest(); } // cancelVoidOnResumedTest { suspendResumeTest(); Future<Response> cancel = invokeRequest("cancelvoid?stage=1"); checkString(cancel, JaxrsAsyncServletResource.FALSE); } // cancelVoidOnCanceledTest { cancelVoidTest(); Future<Response> cancel = invokeRequest("cancelvoid?stage=1"); checkString(cancel, JaxrsAsyncServletResource.TRUE); } // resumeCanceledTest { cancelVoidTest(); Future<Response> resumeCanceled = invokeRequest("resume?stage=1", ""); checkString(resumeCanceled, JaxrsAsyncServletResource.FALSE); } // cancelIntTest { cancelIntTest(); } // cancelIntOnResumedTest { suspendResumeTest(); Future<Response> cancel = invokeRequest("cancelretry?stage=1", "20"); checkString(cancel, JaxrsAsyncServletResource.FALSE); } // cancelIntOnCanceledTest { cancelVoidTest(); Future<Response> cancel = invokeRequest("cancelretry?stage=1", "20"); checkString(cancel, JaxrsAsyncServletResource.TRUE); } // resumeCanceledIntTest { cancelIntTest(); Future<Response> resume = invokeRequest("resume?stage=1", ""); checkString(resume, JaxrsAsyncServletResource.FALSE); } // cancelDateTest { cancelDateTest(); } // cancelDateOnResumedTest { suspendResumeTest(); Future<Response> cancel = invokeRequest("canceldate?stage=1", System.currentTimeMillis()); checkString(cancel, JaxrsAsyncServletResource.FALSE); } // cancelDateOnCanceledTest { cancelVoidTest(); Future<Response> cancel = invokeRequest("canceldate?stage=1", System.currentTimeMillis()); checkString(cancel, JaxrsAsyncServletResource.TRUE); } // resumeCanceledDateTest { cancelDateTest(); Future<Response> resumeResumed = invokeRequest("resume?stage=1", ""); checkString(resumeResumed, JaxrsAsyncServletResource.FALSE); } // isCanceledWhenCanceledTest { cancelVoidTest(); Future<Response> is = invokeRequest("iscanceled?stage=1"); checkString(is, JaxrsAsyncServletResource.TRUE); } // isCanceledWhenSuspendedTest { invokeClear(); invokeRequest("suspend"); Future<Response> is = invokeRequest("iscanceled?stage=0"); checkString(is, JaxrsAsyncServletResource.FALSE); } // isCanceledWhenResumedTest { suspendResumeTest(); Future<Response> is = invokeRequest("iscanceled?stage=1"); checkString(is, JaxrsAsyncServletResource.FALSE); } // isDoneWhenResumedTest { suspendResumeTest(); Future<Response> is = invokeRequest("isdone?stage=1"); checkString(is, JaxrsAsyncServletResource.TRUE); } // isDoneWhenSuspendedTest { invokeClear(); invokeRequest("suspend"); Future<Response> is = invokeRequest("isdone?stage=0"); checkString(is, JaxrsAsyncServletResource.FALSE); } // isDoneWhenCanceledTest { cancelVoidTest(); Future<Response> is = invokeRequest("isdone?stage=1"); checkString(is, JaxrsAsyncServletResource.TRUE); } // isDoneWhenTimedOutTest { setTimeoutTest(); Future<Response> is = invokeRequest("isdone?stage=1"); checkString(is, JaxrsAsyncServletResource.TRUE); } // isSuspendedWhenSuspendedTest { invokeClear(); invokeRequest("suspend"); Future<Response> is = invokeRequest("issuspended?stage=0"); checkString(is, JaxrsAsyncServletResource.TRUE); } // isSuspendedWhenCanceledTest { cancelVoidTest(); Future<Response> is = invokeRequest("issuspended?stage=1"); checkString(is, JaxrsAsyncServletResource.FALSE); } // isSuspendedWhenResumedTest { suspendResumeTest(); Future<Response> is = invokeRequest("issuspended?stage=1"); checkString(is, JaxrsAsyncServletResource.FALSE); } // suspendResumeTest { suspendResumeTest(); } // resumeAnyJavaObjectInputStreamTest { invokeClear(); String expectedResponse = "Expected response"; Future<Response> suspend = invokeRequest("suspend"); Future<Response> resume = invokeRequest("resume?stage=0", new ByteArrayInputStream(expectedResponse.getBytes())); checkString(resume, JaxrsAsyncServletResource.TRUE); checkString(suspend, expectedResponse); } // resumeResumedTest { suspendResumeTest(); // resume & store Future<Response> resumeResumed = invokeRequest("resume?stage=1", ""); checkString(resumeResumed, JaxrsAsyncServletResource.FALSE); } // resumeWithCheckedExceptionTest { invokeClear(); Future<Response> suspend = invokeRequest("suspend"); Future<Response> resume = invokeRequest("resumechecked?stage=0"); checkString(resume, JaxrsAsyncServletResource.TRUE); checkException(suspend, IOException.class); } // resumeWithRuntimeExceptionTest { invokeClear(); Future<Response> suspend = invokeRequest("suspend"); Future<Response> resume = invokeRequest("resumeruntime?stage=0"); checkString(resume, JaxrsAsyncServletResource.TRUE); checkException(suspend, RuntimeException.class); } // resumeWithExceptionReturnsFalseWhenResumedTest { suspendResumeTest(); Future<Response> resume = invokeRequest("resumechecked?stage=1"); checkString(resume, JaxrsAsyncServletResource.FALSE); } // setTimeoutTest { setTimeoutTest(); } // updateTimeoutTest { invokeClear(); Future<Response> suspend = invokeRequest("suspend"); Future<Response> setTimeout = invokeRequest("settimeout?stage=0", 600000); checktStatus(getResponse(setTimeout), Status.NO_CONTENT); checkFalse(suspend.isDone(), "Suspended AsyncResponse already received"); setTimeout = invokeRequest("settimeout?stage=1", 200); checktStatus(getResponse(setTimeout), Status.NO_CONTENT); // WebApplication exception with 503 is caught by ServiceUnavailableExceptionMapper Response fromMapper = getResponse(suspend); checktStatus(fromMapper, Status.REQUEST_TIMEOUT); String entity = fromMapper.readEntity(String.class); checkContains(entity, HttpResponseCodes.SC_SERVICE_UNAVAILABLE); logMsg("Found expected status 503"); } // handleTimeOutWaitsForeverTest { String responseMsg = "handleTimeOutWaitsForeverTest"; invokeClear(); Future<Response> suspend = invokeRequest("suspend"); Future<Response> setTimeout = invokeRequest("timeouthandler?stage=0", 1); Future<Response> resume = invokeRequest("resume?stage=1", responseMsg); checktStatus(getResponse(setTimeout), Status.NO_CONTENT); checkString(resume, JaxrsAsyncServletResource.TRUE); checkString(suspend, responseMsg); } // handleTimeoutCancelsTest { invokeClear(); Future<Response> suspend = invokeRequest("suspend"); Future<Response> setTimeout = invokeRequest("timeouthandler?stage=0", 2); checktStatus(getResponse(setTimeout), Status.NO_CONTENT); checktStatus(getResponse(suspend), Status.SERVICE_UNAVAILABLE); Future<Response> resume = invokeRequest("issuspended?stage=1"); checkString(resume, JaxrsAsyncServletResource.FALSE); } // handleTimeoutResumesTest { invokeClear(); Future<Response> suspend = invokeRequest("suspend"); Future<Response> setTimeout = invokeRequest("timeouthandler?stage=0", 3); checktStatus(getResponse(setTimeout), Status.NO_CONTENT); checkString(suspend, JaxrsAsyncServletResource.RESUMED); Future<Response> resume = invokeRequest("issuspended?stage=1"); checkString(resume, JaxrsAsyncServletResource.FALSE); } } protected String getAbsoluteUrl() { return generateURL("/resource"); } private void invokeClear() throws Exception { Response response = client.target(getAbsoluteUrl()).path("clear").request().get(); Assert.assertEquals(HttpResponseCodes.SC_NO_CONTENT, response.getStatus()); } private Future<Response> invokeRequest(String resource) { AsyncInvoker async = createAsyncInvoker(resource); Future<Response> future = async.get(); return future; } private <T> Future<Response> invokeRequest(String resource, T entity) { AsyncInvoker async = createAsyncInvoker(resource); Future<Response> future = async.post(Entity.entity(entity, MediaType.TEXT_PLAIN_TYPE)); return future; } private WebTarget createWebTarget(String resource) { Client client = ClientBuilder.newClient(); WebTarget target = client.target(generateURL("/resource/" + resource)); return target; } private AsyncInvoker createAsyncInvoker(String resource) { WebTarget target = createWebTarget(resource); AsyncInvoker async = target.request().async(); return async; } private static Response getResponse(Future<Response> future) throws Exception { Response response = future.get(); return response; } private static void checktStatus(Response response, Response.Status status) throws Exception { checkEquals(response.getStatus(), status.getStatusCode(), "Unexpected status code received", response.getStatus(), "expected was", status); logMsg("Found expected status", status); } private static void checkString(Future<Response> future, String check) throws Exception { Response response = getResponse(future); checktStatus(response, Status.OK); String content = response.readEntity(String.class); checkEquals(check, content, "Unexpected response content", content); logMsg("Found expected string", check); } private static void checkException(Future<Response> future, Class<? extends Throwable> e) throws Exception { String clazz = e.getName(); Response response = getResponse(future); checktStatus(response, Response.Status.NOT_ACCEPTABLE); checkContainsString(response.readEntity(String.class), clazz, clazz, "not thrown"); logMsg(clazz, "has been thrown as expected"); } public static void checkContainsString(String string, String substring, Object... message) throws Exception { checkTrue(string.contains(substring), message); } public static <T> void checkContains(T text, T subtext, Object... message) throws Exception { checkContainsString(text.toString(), subtext.toString(), message); } public static void checkTrue(boolean condition, Object... message) { if (!condition) { Assert.fail(objectsToString(message)); } } public static void checkFalse(boolean condition, Object... message) throws Exception { checkTrue(!condition, message); } }
{ "content_hash": "7ada3e847a8cad0fc0aba870e0e3c5d6", "timestamp": "", "source": "github", "line_count": 553, "max_line_length": 143, "avg_line_length": 38.495479204339965, "alnum_prop": 0.6296974821495678, "repo_name": "awhitford/Resteasy", "id": "aab43626690c2845079ed72e9881d11babcb46b2", "size": "21288", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "testsuite/integration-tests/src/test/java/org/jboss/resteasy/test/asynch/ComprehensiveJaxrsTest.java", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "226" }, { "name": "Java", "bytes": "8760338" }, { "name": "JavaScript", "bytes": "17786" }, { "name": "Python", "bytes": "4868" }, { "name": "Shell", "bytes": "1606" } ], "symlink_target": "" }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package pl.altkom.spring.spring.capgemini; import java.util.List; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.TestExecutionListeners; import org.springframework.test.context.junit4.AbstractTransactionalJUnit4SpringContextTests; import org.springframework.test.context.transaction.TransactionConfiguration; import pl.altkom.spring.spring.capgemini.core.dao.springdata.UserDAO; import pl.altkom.spring.spring.capgemini.core.model.User; /** * * @author instruktor */ @ContextConfiguration("/core-test-context.xml") @TransactionConfiguration(defaultRollback = false) public class TestContext extends AbstractTransactionalJUnit4SpringContextTests{ @Autowired private UserDAO userDAO; @Test public void test() { System.out.println("OK"); //User res = userDAO.findOne(5L); //System.out.println(res); } @Test public void saveUserTest() { User u = new User(); u.setLogin("ala"); u.setPassword("alamakota"); userDAO.save(u); System.out.println(u.getId()); } @Test public void testFind() { List<User> users = userDAO.findByLoginLikeIgnoreCase("ala"); System.out.println(users); } @Test public void testFindByActiveTrueOrderById() { System.out.println(userDAO.findByActiveTrue()); } }
{ "content_hash": "129241d5a8faf1e41cb1d971d5608045", "timestamp": "", "source": "github", "line_count": 68, "max_line_length": 93, "avg_line_length": 26.13235294117647, "alnum_prop": 0.6736072031513787, "repo_name": "cocoJamboo/spring-tutorials", "id": "d59a0a8ab714bb57c0d06a4c1ffdd3364a852354", "size": "1777", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "spring-capgemini-core/src/test/java/pl/altkom/spring/spring/capgemini/TestContext.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "986" }, { "name": "Java", "bytes": "91663" } ], "symlink_target": "" }
<html dir="LTR"> <head> <meta http-equiv="Content-Type" content="text/html; charset=Windows-1252" /> <meta name="vs_targetSchema" content="http://schemas.microsoft.com/intellisense/ie5" /> <title>Category Property</title> <xml> </xml> <link rel="stylesheet" type="text/css" href="MSDN.css" /> </head> <body id="bodyID" class="dtBODY"> <div id="nsbanner"> <div id="bannerrow1"> <table class="bannerparthead" cellspacing="0"> <tr id="hdr"> <td class="runninghead">log4net SDK Documentation - Microsoft .NET Framework 4.0</td> <td class="product"> </td> </tr> </table> </div> <div id="TitleRow"> <h1 class="dtH1">EventLogAppender.Category Property</h1> </div> </div> <div id="nstext"> <p> Gets or sets the <code>Category</code> to use unless one is explicitly specified via the <code>LoggingEvent</code>'s properties. </p> <div class="syntax"> <span class="lang">[Visual Basic]</span> <br />Public Property Category As <a href="ms-help://MS.NETFrameworkSDKv1.1/cpref/html/frlrfSystemInt16ClassTopic.htm">Short</a></div> <div class="syntax"> <span class="lang">[C#]</span> <br />public <a href="ms-help://MS.NETFrameworkSDKv1.1/cpref/html/frlrfSystemInt16ClassTopic.htm">short</a> Category {get; set;}</div> <p> </p> <h4 class="dtH4">Remarks</h4> <p> The <code>Category</code> of the event log entry will normally be set using the <code>Category</code> property (<a href="log4net.Core.LoggingEvent.Properties.html">Properties</a>) on the <a href="log4net.Core.LoggingEvent.html">LoggingEvent</a>. This property provides the fallback value which defaults to 0. </p> <h4 class="dtH4">See Also</h4><p><a href="log4net.Appender.EventLogAppender.html">EventLogAppender Class</a> | <a href="log4net.Appender.html">log4net.Appender Namespace</a></p><object type="application/x-oleobject" classid="clsid:1e2a7bd0-dab9-11d0-b93a-00c04fc99f9e" viewastext="true" style="display: none;"><param name="Keyword" value="Category property"></param><param name="Keyword" value="Category property, EventLogAppender class"></param><param name="Keyword" value="EventLogAppender.Category property"></param></object><hr /><div id="footer"><p><a href="http://logging.apache.org/log4net">Copyright 2004-2011 The Apache Software Foundation.</a></p><p></p></div></div> </body> </html>
{ "content_hash": "6b16ac7b1650c0f17a76f834ccd09f40", "timestamp": "", "source": "github", "line_count": 39, "max_line_length": 688, "avg_line_length": 63.92307692307692, "alnum_prop": 0.6558363417569194, "repo_name": "npruehs/slash-framework", "id": "766de568ce281cba611f6050a39abc6a8f1a18e0", "size": "2493", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Ext/log4net-1.2.11/doc/release/sdk/log4net.Appender.EventLogAppender.Category.html", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "7145" }, { "name": "C#", "bytes": "4000351" }, { "name": "C++", "bytes": "28426" }, { "name": "CSS", "bytes": "21726" }, { "name": "GLSL", "bytes": "45378" }, { "name": "HTML", "bytes": "10267333" }, { "name": "JavaScript", "bytes": "7017" }, { "name": "Perl", "bytes": "15600" }, { "name": "Shell", "bytes": "24755" }, { "name": "Visual Basic", "bytes": "49736" }, { "name": "XSLT", "bytes": "3323" } ], "symlink_target": "" }
package server // import "github.com/docker/infrakit/pkg/rpc/server" import ( "io/ioutil" "net/http" "path" "testing" rpc_flavor "github.com/docker/infrakit/pkg/rpc/flavor" rpc_instance "github.com/docker/infrakit/pkg/rpc/instance" "github.com/docker/infrakit/pkg/spi/flavor" "github.com/docker/infrakit/pkg/template" testing_flavor "github.com/docker/infrakit/pkg/testing/flavor" testing_instance "github.com/docker/infrakit/pkg/testing/instance" "github.com/stretchr/testify/require" ) func tempSocket() string { dir, err := ioutil.TempDir("", "infrakit-test-") if err != nil { panic(err) } return path.Join(dir, "instance-impl-test") } func TestFetchAPIInfoFromPlugin(t *testing.T) { socketPath := tempSocket() url := "unix://" + socketPath server, err := StartPluginAtPath(socketPath, rpc_instance.PluginServer(&testing_instance.Plugin{})) require.NoError(t, err) buff, err := template.Fetch(url, template.Options{ CustomizeFetch: func(req *http.Request) { req.URL.Path = "/info/api.json" req.URL.Host = "h" }, }) require.NoError(t, err) decoded, err := template.FromJSON(buff) require.NoError(t, err) result, err := template.QueryObject("Implements[].Name | [0]", decoded) require.NoError(t, err) require.Equal(t, "Instance", result) url = "unix://" + socketPath buff, err = template.Fetch(url, template.Options{ CustomizeFetch: func(req *http.Request) { req.URL.Path = "/info/functions.json" req.URL.Host = "h" }, }) require.NoError(t, err) server.Stop() } type exporter struct { flavor.Plugin } func (p *exporter) Funcs() []template.Function { return []template.Function{ { Name: "greater", Description: []string{"Returns true if a is greater than b"}, Func: func(a, b int) bool { return a > b }, }, { Name: "equal", Description: []string{"Returns true if a is same as b"}, Func: func(a, b string) bool { return a == b }, }, { Name: "join_token", Description: []string{"Returns the join token"}, Func: func() string { return "token" }, }, } } func TestFetchFunctionsFromPlugin(t *testing.T) { socketPath := tempSocket() url := "unix://" + socketPath server, err := StartPluginAtPath(socketPath, rpc_flavor.PluginServer(&exporter{&testing_flavor.Plugin{}})) require.NoError(t, err) buff, err := template.Fetch(url, template.Options{ CustomizeFetch: func(req *http.Request) { req.URL.Path = "/info/functions.json" req.URL.Host = "d" }, }) require.NoError(t, err) decoded, err := template.FromJSON(buff) require.NoError(t, err) list := decoded.(map[string]interface{})["base"].([]interface{}) require.Equal(t, 3, len(list)) result, err := template.QueryObject("[].Usage | [2]", list) require.NoError(t, err) require.Equal(t, "{{ join_token }}", result) server.Stop() }
{ "content_hash": "07dc312b47d0435e6fc9a2f5ef966870", "timestamp": "", "source": "github", "line_count": 119, "max_line_length": 107, "avg_line_length": 24.016806722689076, "alnum_prop": 0.6665500349895032, "repo_name": "thebsdbox/infrakit", "id": "95d3dbc9ec8c0508eb2f4a0f76426d2964e7c7a0", "size": "2858", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "pkg/rpc/server/info_test.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "419" }, { "name": "Go", "bytes": "3371082" }, { "name": "HCL", "bytes": "2075" }, { "name": "Makefile", "bytes": "14391" }, { "name": "Nix", "bytes": "652" }, { "name": "Ruby", "bytes": "2453" }, { "name": "Shell", "bytes": "30794" } ], "symlink_target": "" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (version 1.7.0_79) on Mon Jul 20 03:32:03 AEST 2015 --> <title>infodynamics.measures.mixed</title> <meta name="date" content="2015-07-20"> <link rel="stylesheet" type="text/css" href="../../../stylesheet.css" title="Style"> </head> <body> <h1 class="bar"><a href="../../../infodynamics/measures/mixed/package-summary.html" target="classFrame">infodynamics.measures.mixed</a></h1> <div class="indexContainer"> <h2 title="Interfaces">Interfaces</h2> <ul title="Interfaces"> <li><a href="ConditionalMutualInfoCalculatorMultiVariateWithDiscrete.html" title="interface in infodynamics.measures.mixed" target="classFrame"><i>ConditionalMutualInfoCalculatorMultiVariateWithDiscrete</i></a></li> <li><a href="ConditionalMutualInfoCalculatorMultiVariateWithDiscreteSource.html" title="interface in infodynamics.measures.mixed" target="classFrame"><i>ConditionalMutualInfoCalculatorMultiVariateWithDiscreteSource</i></a></li> <li><a href="MutualInfoCalculatorMultiVariateWithDiscrete.html" title="interface in infodynamics.measures.mixed" target="classFrame"><i>MutualInfoCalculatorMultiVariateWithDiscrete</i></a></li> </ul> <h2 title="Classes">Classes</h2> <ul title="Classes"> <li><a href="ConditionalMutualInfoCalculatorMultiVariateWithDiscreteSourceCommon.html" title="class in infodynamics.measures.mixed" target="classFrame">ConditionalMutualInfoCalculatorMultiVariateWithDiscreteSourceCommon</a></li> </ul> </div> </body> </html>
{ "content_hash": "27afdc35a56b5064d1787ddb4cce1957", "timestamp": "", "source": "github", "line_count": 25, "max_line_length": 228, "avg_line_length": 63.92, "alnum_prop": 0.7715894868585732, "repo_name": "kamir/WikiExplorer.NG", "id": "1c7f5ab10aa5228f54f423b0e2ab42c10c8b5c16", "size": "1598", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "artifacts/infodynamics-dist-1.3/javadocs/infodynamics/measures/mixed/package-frame.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "4056" }, { "name": "CSS", "bytes": "11582" }, { "name": "Clojure", "bytes": "9942" }, { "name": "HTML", "bytes": "6693361" }, { "name": "Java", "bytes": "5259976" }, { "name": "Julia", "bytes": "13274" }, { "name": "MATLAB", "bytes": "160725" }, { "name": "Python", "bytes": "27104" }, { "name": "R", "bytes": "18410" }, { "name": "Shell", "bytes": "6734" } ], "symlink_target": "" }
import nls = require('vs/nls'); import { TPromise } from 'vs/base/common/winjs.base'; import { RunOnceScheduler } from 'vs/base/common/async'; import lifecycle = require('vs/base/common/lifecycle'); import env = require('vs/base/common/platform'); import uri from 'vs/base/common/uri'; import { IAction, Action } from 'vs/base/common/actions'; import { KeyCode } from 'vs/base/common/keyCodes'; import keyboard = require('vs/base/browser/keyboardEvent'); import editorbrowser = require('vs/editor/browser/editorBrowser'); import editorcommon = require('vs/editor/common/editorCommon'); import { DebugHoverWidget } from 'vs/workbench/parts/debug/browser/debugHover'; import debugactions = require('vs/workbench/parts/debug/electron-browser/debugActions'); import debug = require('vs/workbench/parts/debug/common/debug'); import { IWorkspaceContextService } from 'vs/workbench/services/workspace/common/contextService'; import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation'; import { IContextMenuService } from 'vs/platform/contextview/browser/contextView'; import {Range} from 'vs/editor/common/core/range'; const HOVER_DELAY = 300; export class DebugEditorContribution implements debug.IDebugEditorContribution { private toDispose: lifecycle.IDisposable[]; private breakpointHintDecoration: string[]; private hoverWidget: DebugHoverWidget; private showHoverScheduler: RunOnceScheduler; private hideHoverScheduler: RunOnceScheduler; private hoverRange: Range; private hoveringOver: string; static getDebugEditorContribution(editor: editorcommon.ICommonCodeEditor): DebugEditorContribution { return <DebugEditorContribution>editor.getContribution(debug.EDITOR_CONTRIBUTION_ID); } constructor( private editor: editorbrowser.ICodeEditor, @debug.IDebugService private debugService: debug.IDebugService, @IWorkspaceContextService private contextService: IWorkspaceContextService, @IContextMenuService private contextMenuService: IContextMenuService, @IInstantiationService private instantiationService: IInstantiationService ) { this.breakpointHintDecoration = []; this.hoverWidget = new DebugHoverWidget(this.editor, this.debugService, this.instantiationService); this.toDispose = [this.hoverWidget]; this.showHoverScheduler = new RunOnceScheduler(() => this.showHover(this.hoverRange, this.hoveringOver, false), HOVER_DELAY); this.hideHoverScheduler = new RunOnceScheduler(() => this.hoverWidget.hide(), HOVER_DELAY); this.registerListeners(); } private getContextMenuActions(breakpoint: debug.IBreakpoint, uri: uri, lineNumber: number): TPromise<IAction[]> { const actions = []; if (breakpoint) { actions.push(this.instantiationService.createInstance(debugactions.RemoveBreakpointAction, debugactions.RemoveBreakpointAction.ID, debugactions.RemoveBreakpointAction.LABEL)); actions.push(this.instantiationService.createInstance(debugactions.EditConditionalBreakpointAction, debugactions.EditConditionalBreakpointAction.ID, debugactions.EditConditionalBreakpointAction.LABEL, this.editor, lineNumber)); actions.push(this.instantiationService.createInstance(debugactions.ToggleEnablementAction, debugactions.ToggleEnablementAction.ID, debugactions.ToggleEnablementAction.LABEL)); } else { actions.push(new Action( 'addBreakpoint', nls.localize('addBreakpoint', "Add Breakpoint"), null, true, () => this.debugService.addBreakpoints([{ uri, lineNumber }]) )); actions.push(this.instantiationService.createInstance(debugactions.AddConditionalBreakpointAction, debugactions.AddConditionalBreakpointAction.ID, debugactions.AddConditionalBreakpointAction.LABEL, this.editor, lineNumber)); } return TPromise.as(actions); } private registerListeners(): void { this.toDispose.push(this.editor.onMouseDown((e: editorbrowser.IEditorMouseEvent) => { if (e.target.type !== editorcommon.MouseTargetType.GUTTER_GLYPH_MARGIN || /* after last line */ e.target.detail) { return; } if (!this.debugService.getConfigurationManager().canSetBreakpointsIn(this.editor.getModel())) { return; } const lineNumber = e.target.position.lineNumber; const uri = this.editor.getModel().uri; if (e.event.rightButton || (env.isMacintosh && e.event.leftButton && e.event.ctrlKey)) { const anchor = { x: e.event.posx + 1, y: e.event.posy }; const breakpoint = this.debugService.getModel().getBreakpoints().filter(bp => bp.lineNumber === lineNumber && bp.source.uri.toString() === uri.toString()).pop(); this.contextMenuService.showContextMenu({ getAnchor: () => anchor, getActions: () => this.getContextMenuActions(breakpoint, uri, lineNumber), getActionsContext: () => breakpoint }); } else { const breakpoint = this.debugService.getModel().getBreakpoints() .filter(bp => bp.source.uri.toString() === uri.toString() && bp.lineNumber === lineNumber).pop(); if (breakpoint) { this.debugService.removeBreakpoints(breakpoint.getId()); } else { this.debugService.addBreakpoints([{ uri, lineNumber }]); } } })); this.toDispose.push(this.editor.onMouseMove((e: editorbrowser.IEditorMouseEvent) => { var showBreakpointHintAtLineNumber = -1; if (e.target.type === editorcommon.MouseTargetType.GUTTER_GLYPH_MARGIN && this.debugService.getConfigurationManager().canSetBreakpointsIn(this.editor.getModel())) { if (!e.target.detail) { // is not after last line showBreakpointHintAtLineNumber = e.target.position.lineNumber; } } this.ensureBreakpointHintDecoration(showBreakpointHintAtLineNumber); })); this.toDispose.push(this.editor.onMouseLeave((e: editorbrowser.IEditorMouseEvent) => { this.ensureBreakpointHintDecoration(-1); })); this.toDispose.push(this.debugService.onDidChangeState(state => this.onDebugStateUpdate(state))); // hover listeners & hover widget this.toDispose.push(this.editor.onMouseDown((e: editorbrowser.IEditorMouseEvent) => this.onEditorMouseDown(e))); this.toDispose.push(this.editor.onMouseMove((e: editorbrowser.IEditorMouseEvent) => this.onEditorMouseMove(e))); this.toDispose.push(this.editor.onMouseLeave((e: editorbrowser.IEditorMouseEvent) => this.hoverWidget.hide())); this.toDispose.push(this.editor.onKeyDown((e: keyboard.IKeyboardEvent) => this.onKeyDown(e))); this.toDispose.push(this.editor.onDidChangeModel(() => this.hideHoverWidget())); this.toDispose.push(this.editor.onDidScrollChange(() => this.hideHoverWidget)); } public getId(): string { return debug.EDITOR_CONTRIBUTION_ID; } public showHover(range: Range, hoveringOver: string, focus: boolean): TPromise<void> { return this.hoverWidget.showAt(range, hoveringOver, focus); } private ensureBreakpointHintDecoration(showBreakpointHintAtLineNumber: number): void { var newDecoration: editorcommon.IModelDeltaDecoration[] = []; if (showBreakpointHintAtLineNumber !== -1) { newDecoration.push({ options: DebugEditorContribution.BREAKPOINT_HELPER_DECORATION, range: { startLineNumber: showBreakpointHintAtLineNumber, startColumn: 1, endLineNumber: showBreakpointHintAtLineNumber, endColumn: 1 } }); } this.breakpointHintDecoration = this.editor.deltaDecorations(this.breakpointHintDecoration, newDecoration); } private onDebugStateUpdate(state: debug.State): void { if (state !== debug.State.Stopped) { this.hideHoverWidget(); } this.contextService.updateOptions('editor', { hover: state !== debug.State.Stopped }); } private hideHoverWidget(): void { if (!this.hideHoverScheduler.isScheduled() && this.hoverWidget.isVisible) { this.hideHoverScheduler.schedule(); } this.showHoverScheduler.cancel(); this.hoveringOver = null; } // hover business private onEditorMouseDown(mouseEvent: editorbrowser.IEditorMouseEvent): void { if (mouseEvent.target.type === editorcommon.MouseTargetType.CONTENT_WIDGET && mouseEvent.target.detail === DebugHoverWidget.ID) { return; } this.hideHoverWidget(); } private onEditorMouseMove(mouseEvent: editorbrowser.IEditorMouseEvent): void { if (this.debugService.state !== debug.State.Stopped) { return; } const targetType = mouseEvent.target.type; const stopKey = env.isMacintosh ? 'metaKey' : 'ctrlKey'; if (targetType === editorcommon.MouseTargetType.CONTENT_WIDGET && mouseEvent.target.detail === DebugHoverWidget.ID && !(<any>mouseEvent.event)[stopKey]) { // mouse moved on top of debug hover widget return; } if (targetType === editorcommon.MouseTargetType.CONTENT_TEXT) { const wordAtPosition = this.editor.getModel().getWordAtPosition(mouseEvent.target.range.getStartPosition()); if (wordAtPosition && this.hoveringOver !== wordAtPosition.word) { this.hoverRange = mouseEvent.target.range; this.hoveringOver = wordAtPosition.word; this.showHoverScheduler.schedule(); } } else { this.hideHoverWidget(); } } private onKeyDown(e: keyboard.IKeyboardEvent): void { const stopKey = env.isMacintosh ? KeyCode.Meta : KeyCode.Ctrl; if (e.keyCode !== stopKey) { // do not hide hover when Ctrl/Meta is pressed this.hideHoverWidget(); } } // end hover business private static BREAKPOINT_HELPER_DECORATION: editorcommon.IModelDecorationOptions = { glyphMarginClassName: 'debug-breakpoint-hint-glyph', stickiness: editorcommon.TrackedRangeStickiness.NeverGrowsWhenTypingAtEdges }; public dispose(): void { this.toDispose = lifecycle.dispose(this.toDispose); } }
{ "content_hash": "cc5241eb6ba673d299eb30ba22ceb797", "timestamp": "", "source": "github", "line_count": 224, "max_line_length": 230, "avg_line_length": 42.45982142857143, "alnum_prop": 0.7565976238040164, "repo_name": "bsmr-x-script/vscode", "id": "3b4f88d8d4331b37cb72c0434e3a60ed6763b205", "size": "9862", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/vs/workbench/parts/debug/browser/debugEditorContribution.ts", "mode": "33188", "license": "mit", "language": [ { "name": "AppleScript", "bytes": "2179" }, { "name": "Batchfile", "bytes": "2345" }, { "name": "C", "bytes": "818" }, { "name": "C#", "bytes": "1152" }, { "name": "C++", "bytes": "1000" }, { "name": "CSS", "bytes": "438701" }, { "name": "Clojure", "bytes": "1206" }, { "name": "CoffeeScript", "bytes": "590" }, { "name": "F#", "bytes": "634" }, { "name": "GLSL", "bytes": "330" }, { "name": "Go", "bytes": "572" }, { "name": "Groovy", "bytes": "3928" }, { "name": "HTML", "bytes": "35640" }, { "name": "Java", "bytes": "576" }, { "name": "JavaScript", "bytes": "9023425" }, { "name": "Lua", "bytes": "252" }, { "name": "Makefile", "bytes": "245" }, { "name": "Objective-C", "bytes": "1387" }, { "name": "PHP", "bytes": "802" }, { "name": "Perl", "bytes": "857" }, { "name": "PowerShell", "bytes": "1432" }, { "name": "Python", "bytes": "1531" }, { "name": "R", "bytes": "362" }, { "name": "Ruby", "bytes": "1703" }, { "name": "Rust", "bytes": "275" }, { "name": "Shell", "bytes": "8336" }, { "name": "Swift", "bytes": "220" }, { "name": "TypeScript", "bytes": "10235910" }, { "name": "Visual Basic", "bytes": "893" } ], "symlink_target": "" }
package com.example.young.backup; import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.net.URLConnection; import android.annotation.SuppressLint; import android.app.Activity; import android.os.Bundle; import android.os.Environment; import android.os.Handler; import android.os.Message; import android.util.Log; import android.view.View; import android.view.View.OnClickListener; import android.widget.ProgressBar; import android.widget.TextView; import android.widget.Toast; /** * @author young * @2017-6-30 */ public class Upload extends Activity implements OnClickListener { private static final String TAG = Upload.class.getSimpleName(); /** 显示上传进度TextView */ private TextView mMessageView; /** 显示上传进度ProgressBar */ private ProgressBar mProgressbar; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_upload); findViewById(R.id.upload_btn).setOnClickListener(this); mMessageView = (TextView) findViewById(R.id.upload_message); mProgressbar = (ProgressBar) findViewById(R.id.upload_progress); } @Override public void onClick(View v) { if (v.getId() == R.id.upload_btn) { doUpload(); } } /** * 使用Handler更新UI界面信息 */ @SuppressLint("HandlerLeak") Handler mHandler = new Handler() { @Override public void handleMessage(Message msg) { mProgressbar.setProgress(msg.getData().getInt("size")); float temp = (float) mProgressbar.getProgress() / (float) mProgressbar.getMax(); int progress = (int) (temp * 100); if (progress == 100) { Toast.makeText(Upload.this, "上传完成!", Toast.LENGTH_LONG).show(); } mMessageView.setText("上传进度:" + progress + " %"); } }; /** * 上传准备工作,获取SD卡路径、开启线程 */ private void doUpload() { // 获取SD卡路径 String path = Environment.getExternalStorageDirectory() + "/amosdownload/"; File file = new File(path); // 如果SD卡目录不存在创建 if (!file.exists()) { file.mkdir(); } // 设置progressBar初始化 mProgressbar.setProgress(0); // 简单起见,我先把URL和文件名称写死,其实这些都可以通过HttpHeader获取到 String uploadUrl = "http://gdown.baidu.com/data/wisegame/91319a5a1dfae322/baidu_16785426.apk"; String fileName = "baidu_16785426.apk"; int threadNum = 5; String filepath = path + fileName; Log.d(TAG, "upload file path:" + filepath); uploadTask task = new uploadTask(uploadUrl, threadNum, filepath); task.start(); } /** * 多线程文件上传 * * 6-30 */ class uploadTask extends Thread { private String uploadUrl;// 上传链接地址 private int threadNum;// 开启的线程数 private String filePath;// 保存文件路径地址 private int blockSize;// 每一个线程的上传量 public uploadTask(String uploadUrl, int threadNum, String fileptah) { this.uploadUrl = uploadUrl; this.threadNum = threadNum; this.filePath = fileptah; } @Override public void run() { FileUploadThread[] threads = new FileUploadThread[threadNum]; try { URL url = new URL(uploadUrl); Log.d(TAG, "download file http path:" + uploadUrl); URLConnection conn = url.openConnection(); // 读取下载文件总大小 int fileSize = conn.getContentLength(); if (fileSize <= 0) { System.out.println("读取文件失败"); return; } // 设置ProgressBar最大的长度为文件Size mProgressbar.setMax(fileSize); // 计算每条线程下载的数据长度 blockSize = (fileSize % threadNum) == 0 ? fileSize / threadNum : fileSize / threadNum + 1; Log.d(TAG, "fileSize:" + fileSize + " blockSize:"+blockSize); File file = new File(filePath); for (int i = 0; i < threads.length; i++) { // 启动线程,分别下载每个线程需要下载的部分 threads[i] = new FileUploadThread(url, file, blockSize, (i + 1)); threads[i].setName("Thread:" + i); threads[i].start(); } boolean isfinished = false; int uploadedAllSize = 0; while (!isfinished) { isfinished = true; // 当前所有线程下载总量 uploadedAllSize = 0; for (int i = 0; i < threads.length; i++) { uploadedAllSize += threads[i].getUploadLength(); if (!threads[i].UpIsCompleted()) { isfinished = false; } } // 通知handler去更新视图组件 Message msg = new Message(); msg.getData().putInt("size", uploadedAllSize); mHandler.sendMessage(msg); // Log.d(TAG, "current uploadSize:" + downloadedAllSize); Thread.sleep(1000);// 休息1秒后再读取下载进度 } Log.d(TAG, " all of uploadSize:" + uploadedAllSize); } catch (MalformedURLException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } catch (InterruptedException e) { e.printStackTrace(); } } } }
{ "content_hash": "7f8595e26ce9b0ff032fdc552ee97c25", "timestamp": "", "source": "github", "line_count": 179, "max_line_length": 102, "avg_line_length": 32.055865921787706, "alnum_prop": 0.5428720808644127, "repo_name": "AntikYoung/Backup", "id": "92ea5b970d5da97e4c10596e9496331069f18e9e", "size": "6174", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/main/java/com/example/young/backup/Upload.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "67336" } ], "symlink_target": "" }
package com.techshroom.unplanned.test; import static org.junit.Assert.*; import java.util.function.Supplier; import org.junit.Test; import com.flowpowered.math.vector.Vector4f; import com.flowpowered.math.vector.Vector4i; import com.techshroom.unplanned.core.util.Color; public class ColorTest { @Test public void testFromString() throws Exception { assertColorString(0xFF, 0xFF, 0xFF, 0xFF, "FFFFFF"); assertColorString(0x12, 0x34, 0x56, 0xFF, "123456"); assertColorString(0x12, 0x34, 0x56, 0x78, "12345678"); assertColorString(0xFF, 0xFF, 0xFF, 0xFF, "FFF"); assertColorString(0x11, 0x22, 0x33, 0xFF, "123"); assertColorString(0x11, 0x22, 0x33, 0x44, "1234"); } @Test public void testFromStringInvalid() throws Exception { assertCreateFail(() -> Color.fromString("FOO")); assertCreateFail(() -> Color.fromString("FOOF")); assertCreateFail(() -> Color.fromString("12345")); } @Test public void testOutOfBounds() throws Exception { assertCreateFail(() -> Color.fromFloat(5, 1, 1, 1)); assertCreateFail(() -> Color.fromFloat(1, 5, 1, 1)); assertCreateFail(() -> Color.fromFloat(1, 1, 5, 1)); assertCreateFail(() -> Color.fromFloat(1, 1, 1, 5)); assertCreateFail(() -> Color.fromFloat(-1, 1, 1, 1)); assertCreateFail(() -> Color.fromFloat(1, -1, 1, 1)); assertCreateFail(() -> Color.fromFloat(1, 1, -1, 1)); assertCreateFail(() -> Color.fromFloat(1, 1, 1, -1)); } @Test public void testConversions() throws Exception { assertColors(0xFF_FF_FF_FF, Color.WHITE); assertColors(0xFF_00_FF_00, Color.GREEN); assertColors(0xFF_FF_00_00, Color.BLUE); assertColors(0xFF_00_00_00, Color.BLACK); assertColors(0x78_56_34_12, Color.fromString("12345678")); } private void assertColors(int abgr, Color c) { int a = (abgr >> 24) & 0xFF; int b = (abgr >> 16) & 0xFF; int g = (abgr >> 8) & 0xFF; int r = (abgr >> 0) & 0xFF; assertEquals(abgr, c.asABGRInt()); assertEquals(Vector4i.from(r, g, b, a), c.asVector4i()); assertEquals(Vector4f.from(r, g, b, a).div(255), c.asVector4f()); } @Test public void testFromFloat() throws Exception { assertColor(0, 0, 0, 0, Color.fromFloat(0, 0, 0, 0)); assertColor(0xFF, 0xFF, 0xFF, 0xFF, Color.fromFloat(1, 1, 1, 1)); } @Test public void testFromInt() throws Exception { assertColor(0, 0, 0, 0, Color.fromInt(0, 0, 0, 0)); assertColor(0xFF, 0xFF, 0xFF, 0xFF, Color.fromInt(0xFF, 0xFF, 0xFF, 0xFF)); } @Test public void testDarker() throws Exception { testDarkerHelper(Color.WHITE); testDarkerHelper(Color.BLUE); testDarkerHelper(Color.RED); } private void testDarkerHelper(Color color) { Color colorDarker = color.darker(); boolean anyLower = false; int[] wArray = color.asVector4i().toArray(); int[] wdArray = colorDarker.asVector4i().toArray(); for (int i = 0; i < wArray.length; i++) { if (wdArray[i] < wArray[i]) { anyLower = true; } else if (wdArray[i] > wArray[i]) { fail("darker() raised values on some components."); } } assertTrue("darker() made no changes", anyLower); } @Test public void testLighter() throws Exception { testLigherHelper(Color.BLACK); testLigherHelper(Color.BLUE); testLigherHelper(Color.RED); } private void testLigherHelper(Color color) { Color colorLighter = color.lighter(); boolean anyHigher = false; int[] bArray = color.asVector4i().toArray(); int[] blArray = colorLighter.asVector4i().toArray(); for (int i = 0; i < bArray.length; i++) { if (blArray[i] > bArray[i]) { anyHigher = true; } else if (blArray[i] < bArray[i]) { fail("lighter() lowered values on some components."); } } assertTrue("lighter() made no changes", anyHigher); } @Test public void testHslCycle() throws Exception { assertEquals(Color.BLACK, Color.fromHsl(Color.BLACK.toHsl())); assertEquals(Color.BLUE, Color.fromHsl(Color.BLUE.toHsl())); assertEquals(Color.RED, Color.fromHsl(Color.RED.toHsl())); } private void assertColorString(int r, int g, int b, int a, String color) { Color c = Color.fromString(color); assertColor(r, g, b, a, c); c = Color.fromString("#" + color); assertColor(r, g, b, a, c); } private void assertColor(int r, int g, int b, int a, Color c) { assertEquals(r, c.getRed()); assertEquals(g, c.getGreen()); assertEquals(b, c.getBlue()); assertEquals(a, c.getAlpha()); } private void assertCreateFail(Supplier<Color> create) { try { create.get(); fail("Got a color."); } catch (IllegalArgumentException e) { // ok } } }
{ "content_hash": "222e40e1ef1342b6e2b308a405f43a01", "timestamp": "", "source": "github", "line_count": 151, "max_line_length": 83, "avg_line_length": 34.13907284768212, "alnum_prop": 0.5885548011639186, "repo_name": "TechShroom/UnplannedDescent", "id": "0a2facf62c9c89ca745dc01abb1fcc4628db69db", "size": "6411", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "api/src/test/java/com/techshroom/unplanned/test/ColorTest.java", "mode": "33188", "license": "mit", "language": [ { "name": "GLSL", "bytes": "1708" }, { "name": "Java", "bytes": "842852" }, { "name": "Kotlin", "bytes": "14742" }, { "name": "Shell", "bytes": "1107" } ], "symlink_target": "" }
""" Cohorte Web Admin Servlet :authors: Bassem Debbabi :copyright: Copyright 2014, isandlaTech :license: Apache Software License 2.0 """ # iPOPO decorators from pelix.ipopo.decorators import ComponentFactory, Provides, Property, Instantiate, \ Validate, Invalidate, Requires, RequiresMap, Bind, BindField, UnbindField import pelix.remote # Herald import herald import herald.beans as beans # Cohorte import cohorte.composer import cohorte.monitor import logging import threading import json, time, os try: # Python 3 import urllib.parse as urlparse except ImportError: # Python 2 import urlparse _logger = logging.getLogger("webadmin.webadmin") # collecting information SUBJECT_GET_HTTP = "cohorte/shell/agent/get_http" """ Signal to request the ports to access HTTP services """ """ TODO: should have a local cache of all information. """ # Name the component factory @ComponentFactory("cohorte-webadmin-factory") @Provides(['pelix.http.servlet', herald.SERVICE_DIRECTORY_LISTENER]) @Property('_path', 'pelix.http.path', "/admin") # Consume a single Herald Directory service @Requires("_directory", herald.SERVICE_DIRECTORY) @Requires('_herald', herald.SERVICE_HERALD) # Consume an Isolate Composer service @RequiresMap("_icomposers", cohorte.composer.SERVICE_COMPOSER_ISOLATE, 'endpoint.framework.uuid', optional=True, allow_none=False) @Requires("_icomposerlocal", cohorte.composer.SERVICE_COMPOSER_ISOLATE, optional=True, spec_filter="(!(service.imported=*))") @Requires("_isolates", cohorte.composer.SERVICE_COMPOSER_ISOLATE, aggregate=True, optional=True) # Reject the export the servlet specification @Property('_reject', pelix.remote.PROP_EXPORT_REJECT, ['pelix.http.servlet', herald.SERVICE_DIRECTORY_LISTENER]) @Instantiate('webadmin') class WebAdmin(object): """ A component that provides a web interface for check spelling words. """ def __init__(self): """ Defines class members """ self._path = None # herald directory service self._directory = None self._herald = None # isolate composer service self._icomposers = {} self._icomposerlocal = None self._isolates = [] # pooling related states self._nodes_list_lastupdate = None self._isolates_list_lastupdate = {} self._components_list_lastupdate = {} self._tabs_list_lastupdate = None """ API ---------------------------------------------------------------------------------------------------------------- """ def get_nodes(self): """ Get the list of nodes of all the system. Return example: { "meta" : { "code": 200, "lastupdate" : "1411979225.65", "count": 2 }, "nodes": [ { "uid": "41110b1d-b510-4e51-9945-a752da04a16d", "name": "central" }, { "uid": "56e5b100-c8a2-4bd8-a818-30edaf9a8fe9", "name": "raspberry-pi-1" } ] }""" nodes = {"meta": {}, "nodes": []} lp = self._directory.get_local_peer() nodes["nodes"].append({"uid": lp.node_uid, "name": lp.node_name}) count = 1 for p in self._directory.get_peers(): # if nodes["nodes"]["uid"] is None: found = False for i in nodes["nodes"]: if i["uid"] == p.node_uid: found = True if found == False: nodes["nodes"].append({"uid": p.node_uid, "name": p.node_name}) count += 1 if self._nodes_list_lastupdate is None: self._nodes_list_lastupdate = time.time() nodes["meta"]["code"] = 200 nodes["meta"]["lastupdate"] = self._nodes_list_lastupdate nodes["meta"]["count"] = count return nodes def get_isolates(self): """ Get the list of isolates of all the system. Return example: { "meta" : { "code": 200, "count": 2 }, "isolates": [ { "uid": "6c4cd65b-b501-41db-ab40-d4cf612b2ffe", "name": "webadmin-isolate", "node_uid": "41110b1d-b510-4e51-9945-a752da04a16d", "node_name": "central" }, { "uid": "03ff839a-df24-4bdf-b734-9fac1c886c65", "name": "spellcheck-isolate", "node_uid": "41110b1d-b510-4e51-9945-a752da04a16d", "node_name": "central" } ] }""" isolates = {"meta": {}, "isolates": []} lp = self._directory.get_local_peer() isolates["isolates"].append({"uid": lp.uid, "name": lp.name, "node_uid": lp.node_uid, "node_name": lp.node_name}) count = 1 for p in self._directory.get_peers(): isolates["isolates"].append({"uid": p.uid, "name": p.name, "node_uid": p.node_uid, "node_name": p.node_name}) count += 1 isolates["meta"]["code"] = 200 isolates["meta"]["count"] = count return isolates def get_components(self): """ Get All Components. Return example: { "meta": { "code": 200, "count": 2 } "components": [ { "name": "spell_check_client", "factory": "spell_check_client_factory", "language": "python", "isolate_uid": "03ff839a-df24-4bdf-b734-9fac1c886c65", "isolate_name": "spellcheck-isolate", "node_uid": "41110b1d-b510-4e51-9945-a752da04a16d", "node_name": "central", }, { "name": "spell_dictionray_FR", "factory": "spell_dictionary_FR_factory", "language": "python", "isolate_uid": "03ff839a-df24-4bdf-b734-9fac1c886c65", "isolate_name": "spellcheck-isolate", "node_uid": "41110b1d-b510-4e51-9945-a752da04a16d", "node_name": "central", } ] } """ components = {"meta": {}, "components": []} count = 0 for rcomposer in self._icomposers.values(): uid = rcomposer.get_isolate_uid() info = rcomposer.get_isolate_info() #_logger.critical('Getting info: %s -- %s', info, info.components) for com in info.components: components["components"].append({"name": com.name, "factory": com.factory, "language": com.language, "isolate_uid": uid, "isolate_name": info.name}) count += 1 if self._icomposerlocal is not None: for c in self._icomposerlocal.get_isolate_info().components: components["components"].append({"name": c.name, "factory": c.factory, "language": c.language, "isolate_uid": self._icomposerlocal.get_isolate_uid(), "isolate_name": self._icomposerlocal.get_isolate_info().name}) count += 1 components["meta"]["code"] = 200 components["meta"]["count"] = count return components def get_node_detail(self, node_uid): """ Get Node details. Return example: { "meta" : { "node": "41110b1d-b510-4e51-9945-a752da04a16d" "code": 200 }, "node": { "name": "central", "nbr_isolates": 3 } }""" node = {"meta": {}, "node": {}} lp = self._directory.get_local_peer() count = 0 if lp.node_uid == node_uid: node["node"]["name"] = lp.node_name count = 1 for p in self._directory.get_peers(): if p.node_uid == node_uid: node["node"]["name"] = p.node_name count += 1 node["node"]["nbr_isolates"] = count node["meta"]["node"] = node_uid node["meta"]["code"] = 200 return node def get_isolate_http_port(self, uid): msg = beans.Message(SUBJECT_GET_HTTP) reply = self._herald.send(uid, msg) return reply.content['http.port'] def get_isolate_detail(self, isolate_uid): """ Get Isolate details. Return example: { "meta" : { "isolate": "03ff839a-df24-4bdf-b734-9fac1c886c65" "code": 200 }, "isolate": { "name": "spellcheck-isolate", "type": "application dynamic isolate", "nbr_components": 3, "node_uid": "", "node_name": "", "http_port": 9000, "http_access" : "localhost", "shell_port": 9001 } }""" isolate = {"meta": {}, "isolate": {}} isolate["isolate"]["type"] = "app-dynamic-isolate" lp = self._directory.get_local_peer() if lp.uid == isolate_uid: isolate["isolate"]["name"] = lp.name isolate["isolate"]["node_uid"] = lp.node_uid isolate["isolate"]["node_name"] = lp.node_name isolate["isolate"]["http_port"] = self.get_isolate_http_port(isolate_uid) try: http_access = self._directory.get_local_peer().get_access("http").host if http_access.startswith("::ffff:"): http_access = http_access[7:] except KeyError: http_access = "" isolate["isolate"]["http_access"] = http_access isolate["isolate"]["shell_port"] = 0 if lp.name == "cohorte.internals.forker": isolate["isolate"]["type"] = "cohorte-isolate" else: for p in self._directory.get_peers(): if p.uid == isolate_uid: isolate["isolate"]["name"] = p.name isolate["isolate"]["node_uid"] = p.node_uid isolate["isolate"]["node_name"] = p.node_name isolate["isolate"]["http_port"] = self.get_isolate_http_port(isolate_uid) try: http_access = p.get_access("http").host if http_access.startswith("::ffff:"): http_access = http_access[7:] except KeyError: http_access = None isolate["isolate"]["http_access"] = http_access isolate["isolate"]["shell_port"] = 0 if p.name == "cohorte.internals.forker": isolate["isolate"]["type"] = "cohorte-isolate" break if isolate["isolate"]["type"] == "cohorte-isolate": isolate["isolate"]["nbr_components"] = -1 else: count = 0 try: for c in self._icomposers.keys(): if self._icomposers.get(c).get_isolate_uid() == isolate_uid: comps = self._icomposers.get(c).get_isolate_info().components for com in comps: count += 1 if self._icomposerlocal is not None: if self._icomposerlocal.get_isolate_uid() == isolate_uid: for c in self._icomposerlocal.get_isolate_info().components: count += 1 except: pass isolate["isolate"]["nbr_components"] = count isolate["meta"]["isolate"] = isolate_uid isolate["meta"]["code"] = 200 return isolate def get_component_detail(self, component_name): """ Get Isolate details. Return example: { "meta" : { "component": "spell_check_client" "code": 200 }, "component": { "factory": "spell_check_client_factory", "language": "python", "isolate_uid": "03ff839a-df24-4bdf-b734-9fac1c886c65", "isolate_name": "spellcheck-isolate", "bundle_name": "spell_checker", "bundle_version": "1.0.0", "properties": { "p1": "v1", "p2": "v2" } } }""" component = {"meta": {}, "component": {}} if self._icomposerlocal is not None: for c in self._icomposerlocal.get_isolate_info().components: if c.name == component_name: component["component"]["factory"] = c.factory component["component"]["language"] = c.language component["component"]["isolate_uid"] = self._icomposerlocal.get_isolate_uid() component["component"]["isolate_name"] = self._icomposerlocal.get_isolate_info().name component["component"]["bundle_name"] = c.bundle_name component["component"]["bundle_version"] = c.bundle_version component["component"]["properties"] = {} component["component"]["properties"] = c.properties break for c in self._icomposers.keys(): comps = self._icomposers.get(c).get_isolate_info().components for com in comps: if com.name == component_name: component["component"]["factory"] = com.factory component["component"]["language"] = com.language component["component"]["isolate_uid"] = self._icomposers.get(c).get_isolate_uid() component["component"]["isolate_name"] = self._icomposers.get(c).get_isolate_info().name component["component"]["bundle_name"] = com.bundle_name component["component"]["bundle_version"] = com.bundle_version component["component"]["properties"] = {} component["component"]["properties"] = com.properties break component["meta"]["code"] = 200 component["meta"]["component"] = component_name return component def get_node_isolates(self, node_uid): """ Get the list of isolates of one particular Node. Return example: { "meta" : { "node": "41110b1d-b510-4e51-9945-a752da04a16d" "code": 200, "count": 2 }, "isolates": [ { "uid": "6c4cd65b-b501-41db-ab40-d4cf612b2ffe", "name": "webadmin-isolate", "type": "app-dynamic-isolate" }, { "uid": "03ff839a-df24-4bdf-b734-9fac1c886c65", "name": "spellcheck-isolate", "type": "cohorte-isolate" } ] }""" isolates = {"meta": {}, "isolates": []} lp = self._directory.get_local_peer() count = 0 if lp.node_uid == node_uid: if lp.name == "cohorte.internals.forker": itype = "cohorte-isolate" else: itype = "app-dynamic-isolate" isolates["isolates"].append({"uid": lp.uid, "name": lp.name, "type": itype}) count = 1 for p in self._directory.get_peers(): if p.node_uid == node_uid: if p.name == "cohorte.internals.forker": itype = "cohorte-isolate" else: itype = "app-dynamic-isolate" isolates["isolates"].append({"uid": p.uid, "name": p.name, "type": itype}) count += 1 isolates["meta"]["node"] = node_uid isolates["meta"]["code"] = 200 isolates["meta"]["count"] = count return isolates def get_isolate_components(self, isolate_uid): """ Get Components of one particular Isolate { "meta": { "isolate": "50684926acb4387d0f007ced" "code": 200, "count": 3 } "components": [ { "name": "spell_dictionary_FR", "factory": "spell_dictionary_FR_factory", "language": "python" }, { "name": "spell_check", "factory": "spell_check_factory", "language": "python" }, { "name": "spell_client", "factory": "spell_client_factory", "language": "python" } ] } """ components = {"meta": {}, "components": []} count = 0 try: for c in self._icomposers.keys(): if self._icomposers.get(c) is not None: if self._icomposers.get(c).get_isolate_uid() == isolate_uid: comps = self._icomposers.get(c).get_isolate_info().components for com in comps: components["components"].append(dict(name=com.name, factory=com.factory, language=com.language)) count += 1 if self._icomposerlocal is not None: if self._icomposerlocal.get_isolate_uid() == isolate_uid: for c in self._icomposerlocal.get_isolate_info().components: components["components"].append(dict(name=c.name, factory=c.factory, language=c.language)) count += 1 except: pass components["meta"]["isolate"] = isolate_uid components["meta"]["code"] = 200 components["meta"]["count"] = count return components """ Actions------------------------------------------------------------------------------------------------------------- """ def killall_nodes(self): """ Safely destroy all nodes { "meta": { "code": 200 }, "status": { "code": 0, "description": "Node successfully destroyed" } } """ status = {"meta": {}, "status": {}} status["meta"]["code"] = 200 msg = beans.Message(cohorte.monitor.SIGNAL_STOP_PLATFORM) try: # send to other monitors self._herald.fire_group('monitors', msg) except: pass try: msg2 = beans.MessageReceived(msg.uid, msg.subject, msg.content, "local", "local", "local") threading.Thread(target=self._herald.handle_message, args=[msg2]).start() status["status"]["code"] = 0 status["status"]["description"] = "All nodes successfully destroyed" except: # send to local monitor status["status"]["code"] = 1 status["status"]["description"] = "Error destroying all nodes" return status def kill_node(self, node_uid): """ Safely destroys the identified node { "meta": { "node": "41110b1d-b510-4e51-9945-a752da04a16d", "code": 200 }, "status": { "code": 0, "description": "Node successfully destroyed" } } """ status = {"meta": {}, "status": {}} status["meta"]["node"] = node_uid status["meta"]["code"] = 200 # get its "cohorte.internals.forker" UID isolates = self.get_node_isolates(node_uid) forker_uid = None for i in isolates["isolates"]: if i["name"] == "cohorte.internals.forker": forker_uid = i["uid"] msg = beans.Message(cohorte.monitor.SIGNAL_STOP_NODE) try: self._herald.fire(forker_uid, msg) except KeyError: # if forker is the local one, send the message locally lp = self._directory.get_local_peer() if lp.node_uid == node_uid: if lp.name == "cohorte.internals.forker": msg2 = beans.MessageReceived(msg.uid, msg.subject, msg.content, "local", "local", "local") threading.Thread(target=self._herald.handle_message, args=[msg2]).start() status["status"]["code"] = 0 status["status"]["description"] = "Node successfully destroyed" return status def kill_isolate(self, isolate_uid): """ Safely destroys the identified isolate { "meta": { "isolate": "41110b1d-b510-4e51-9945-a752da04a16d", "code": 200 }, "status": { "code": 0, "description": "Isolate successfully destroyed" } } """ status = {"meta": {}, "status": {}} status["meta"]["isolate"] = isolate_uid status["meta"]["code"] = 200 # STOP ISOLATE # fire (uid, cohorte.monitor.SIGNAL_STOP_ISOLATE) msg = beans.Message(cohorte.monitor.SIGNAL_STOP_ISOLATE) self._herald.fire(isolate_uid, msg) status["status"]["code"] = 0 status["status"]["description"] = "Isolate successfully destroyed" return status """ Polling------------------------------------------------------------------------------------------------------------- """ def get_nodes_lastupdate(self): nodes = {"meta": {}} nodes["meta"]["list"] = "nodes" nodes["meta"]["code"] = 200 nodes["meta"]["lastupdate"] = self._nodes_list_lastupdate return nodes def peer_registered(self, peer): self._nodes_list_lastupdate = time.time() def peer_updated(self, peer, access_id, data, previous): self._nodes_list_lastupdate = time.time() def peer_unregistered(self, peer): self._nodes_list_lastupdate = time.time() """ Resources----------------------------------------------------------------------------------------------------------- """ def root_dir(self): # pragma: no cover return os.path.abspath(os.path.dirname(__file__)) def get_file(self, filename): # pragma: no cover try: src = os.path.join(self.root_dir(), filename) with open(src, 'rb') as fp: return fp.read() except IOError as exc: return str(exc) def load_resource(self, path, request, response): mimetypes = { ".css": "text/css", ".html": "text/html", ".js": "application/javascript", ".jpeg": "image/jpeg", ".png": "image/png", ".gif": "image/gif" } complete_path = os.path.join(self.root_dir(), path) ext = os.path.splitext(path)[1] mimetype = mimetypes.get(ext, "text/html") content = self.get_file(complete_path) return response.send_content(200, content, mimetype) """ GUI ---------------------------------------------------------------------------------------------------------------- """ def get_tabs(self): """ Get the list of active tabs of the GUI. Return example: { "meta" : { "code": 200, "lastupdate" : "1411979225.65", "count": 1 }, "tabs": [ { "name": "Dashboard", "icon": "fa-dashboard", "page": "ajax/dashboard.html" } ] }""" tabs = {"meta": {}, "tabs": []} tabs["tabs"].append({"name": "Dashboard", "icon": "fa-dashboard", "page": "ajax/dashboard.html"}) tabs["tabs"].append({"name": "Global view", "icon": "fa-sitemap", "page": "ajax/globalview.html"}) #tabs["tabs"].append({"name": "Log", "icon": "fa-desktop", "page": "ajax/log.html"}) tabs["meta"]["code"] = 200 tabs["meta"]["lastupdate"] = self._nodes_list_lastupdate tabs["meta"]["count"] = 1 return tabs def get_globalview(self): """ { "name": "COHORTE", "children": [ {"name": "gateway", "children": [ {"name": "forker", "size":10, "children": [ ]}, {"name": "web-interface", "size":10, "children": [ {"name": "UI", "size":1} ] } ] }, {"name": "python-sensor-pc", "children": [ {"name": "forker", "size":10, "children": [ ]}, {"name": "py.components", "size":10, "children": [ {"name": "PS", "size":1} ] } ]}, {"name": "java-sensor-pc", "children": [ {"name": "forker", "size":10, "children": [ ]} ]}, {"name": "raspberry-pi", "children": [ {"name": "forker", "size":10, "children": [ ]}, {"name": "rasp-components", "size":10, "children": [ {"name": "PS-rasp", "size":1} ] } ]} ] } """ gv = {"name":"COHORTE", "children": []} nodes = self.get_nodes() for n in nodes["nodes"]: node = {"name": n["name"], "size":100, "children": []} isolates = self.get_node_isolates(n["uid"]) for i in isolates["isolates"]: #if i["name"] == "cohorte.internals.forker": # continue isolate = {"name": i["name"], "size":10, "children": []} components = self.get_isolate_components(i["uid"]) for c in components["components"]: component = {"name": c["name"], "size":1} isolate["children"].append(component) node["children"].append(isolate) gv["children"].append(node) return gv """ Pages -------------------------------------------------------------------------------------------------------------- """ def show_admin_page(self, request, response): content = "<html><head><meta http-equiv='refresh' content='0; URL=" + self._path content += "/static/web/index.html'/></head><body></body></html>" response.send_content(200, content) def show_error_page(self, request, response): content = """<html> <head><title>COHORTE</title><head><body><h3>404 This is not the web page you are looking for!</h3></body></html>""" response.send_content(404, content) def show_api_welcome_page(self, request, response): content = """<html> <head><title>COHORTE</title><head><body><h3>Welcome to COHORTE API v1!</h3></body></html>""" response.send_content(200, content) """ SERVLET ------------------------------------------------------------------------------------------------------------ """ def do_GET(self, request, response): """ Handle a GET """ query = request.get_path() # prepare query path: remove first and last '/' if exists if query[0] == '/': query = query[1:] if query[-1] == '/': query = query[:-1] parts = str(query).split('/') if str(parts[0]) == "admin": if len(parts) == 1: self.show_admin_page(request, response) elif len(parts) > 1: if str(parts[1]) == "static": if len(parts) > 2: self.load_resource('/'.join(parts[2:]), request, response) else: self.show_error_page(request, response) elif str(parts[1]) == "api": if len(parts) == 3: self.show_api_welcome_page(request, response) if len(parts) == 4: if str(parts[3]).lower() == "nodes": nodes = self.get_nodes() self.sendJson(nodes, response) elif str(parts[3]).lower() == "isolates": isolates = self.get_isolates() self.sendJson(isolates, response) elif str(parts[3]).lower() == "components": components = self.get_components() self.sendJson(components, response) if len(parts) == 5: if str(parts[3]).lower() == "nodes": if str(parts[4]).lower() == "killall": result = self.killall_nodes() self.sendJson(result, response) elif str(parts[4]) == "lastupdate": node = self.get_nodes_lastupdate() self.sendJson(node, response) else: node = self.get_node_detail(str(parts[4])) self.sendJson(node, response) elif str(parts[3]).lower() == "isolates": isolate = self.get_isolate_detail(str(parts[4])) self.sendJson(isolate, response) elif str(parts[3]).lower() == "components": isolate = self.get_component_detail(str(parts[4])) self.sendJson(isolate, response) if len(parts) == 6: if str(parts[3]).lower() == "nodes": if str(parts[5]).lower() == "isolates": isolates = self.get_node_isolates(str(parts[4])) self.sendJson(isolates, response) elif str(parts[5]).lower() == "kill": result = self.kill_node(str(parts[4])) self.sendJson(result, response) if str(parts[3]).lower() == "isolates": if str(parts[5]).lower() == "components": components = self.get_isolate_components(str(parts[4])) self.sendJson(components, response) elif str(parts[5]).lower() == "kill": result = self.kill_isolate(str(parts[4])) self.sendJson(result, response) elif str(parts[1]) == "gui": if len(parts) == 3: if str(parts[2]).lower() == "tabs": tabs = self.get_tabs() self.sendJson(tabs, response) else: self.show_error_page(request, response) elif len(parts) == 4: if str(parts[2]).lower() == "tabs": if str(parts[3]).lower() == "globalview": globalview = self.get_globalview() if globalview is None: self.show_error_page(request, response) result = json.dumps(globalview, sort_keys=False, indent=4, separators=(',', ': ')) response.send_content(200, result, "application/json") else: self.show_error_page(request, response) else: self.show_error_page(request, response) else: self.show_error_page(request, response) else: self.show_error_page(request, response) def sendJson(self, data, response): result = json.dumps(data, sort_keys=False, indent=4, separators=(',', ': ')) response.send_content(data["meta"]["code"], result, "application/json") """ OTHER STUFF -------------------------------------------------------------------------------------------------------- """ @Validate def validate(self, context): """ Component validated, just print a trace to visualize the event. Between this call and the call to invalidate, the _spell_checker member will point to a valid spell checker service. """ _logger.info("Webadmin validated") @Invalidate def invalidate(self, context): """ Component invalidated, just print a trace to visualize the event """ _logger.info("Webadmin invalidated") def bound_to(self, path, params): """ Servlet bound to a path """ _logger.info('Bound to ' + path) return True def unbound_from(self, path, params): """ Servlet unbound from a path """ _logger.info('Unbound from ' + path) return None
{ "content_hash": "95196328c13b962d1d170114d9ff1533", "timestamp": "", "source": "github", "line_count": 912, "max_line_length": 124, "avg_line_length": 38.07127192982456, "alnum_prop": 0.44932461622649117, "repo_name": "isandlaTech/cohorte-demos", "id": "b233dd0ff69bbf086fbe6924014564e368b5c8e7", "size": "34771", "binary": false, "copies": "4", "ref": "refs/heads/dev", "path": "led/dump/led-demo-raspberry/cohorte/dist/cohorte-1.0.0-1.0.0-20141201.234602-19-python-distribution/repo/bundles/webadmin/webadmin.py", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "Arduino", "bytes": "4339" }, { "name": "Batchfile", "bytes": "3349" }, { "name": "CSS", "bytes": "722861" }, { "name": "HTML", "bytes": "267983" }, { "name": "Java", "bytes": "22060" }, { "name": "JavaScript", "bytes": "11127825" }, { "name": "Python", "bytes": "16153349" }, { "name": "Shell", "bytes": "33275" } ], "symlink_target": "" }
#include <csignal> #include <ctime> #include <thread> #include <osquery/core.h> #include <osquery/database.h> #include <osquery/flags.h> #include <osquery/sql.h> #include "osquery/core/json.h" #include "osquery/core/process.h" #include "osquery/tests/test_additional_util.h" #include "osquery/tests/test_util.h" namespace fs = boost::filesystem; namespace osquery { DECLARE_string(tls_hostname); DECLARE_string(enroll_tls_endpoint); DECLARE_string(tls_server_certs); DECLARE_string(enroll_secret_path); DECLARE_bool(disable_caching); void TLSServerRunner::start() { auto& self = instance(); if (self.server_ != nullptr) { return; } // Pick a port in an ephemeral range at random. self.port_ = std::to_string(rand() % 10000 + 20000); // Fork then exec a shell. auto python_server = (fs::path(kTestDataPath) / "test_http_server.py") .make_preferred() .string() + " --tls " + self.port_; self.server_ = PlatformProcess::launchTestPythonScript(python_server); if (self.server_ == nullptr) { return; } size_t delay = 0; std::string query = "select pid from listening_ports where port = '" + self.port_ + "'"; while (delay < 2 * 1000) { auto caching = FLAGS_disable_caching; FLAGS_disable_caching = true; auto results = SQL(query); FLAGS_disable_caching = caching; if (!results.rows().empty()) { self.server_.reset( new PlatformProcess(std::atoi(results.rows()[0].at("pid").c_str()))); break; } sleepFor(100); delay += 100; } } void TLSServerRunner::setClientConfig() { auto& self = instance(); self.tls_hostname_ = Flag::getValue("tls_hostname"); Flag::updateValue("tls_hostname", "localhost:" + port()); self.enroll_tls_endpoint_ = Flag::getValue("enroll_tls_endpoint"); Flag::updateValue("enroll_tls_endpoint", "/enroll"); self.tls_server_certs_ = Flag::getValue("tls_server_certs"); Flag::updateValue("tls_server_certs", (fs::path(kTestDataPath) / "test_server_ca.pem") .make_preferred() .string()); self.enroll_secret_path_ = Flag::getValue("enroll_secret_path"); Flag::updateValue("enroll_secret_path", (fs::path(kTestDataPath) / "test_enroll_secret.txt") .make_preferred() .string()); } void TLSServerRunner::unsetClientConfig() { auto& self = instance(); Flag::updateValue("tls_hostname", self.tls_hostname_); Flag::updateValue("enroll_tls_endpoint", self.enroll_tls_endpoint_); Flag::updateValue("tls_server_certs", self.tls_server_certs_); Flag::updateValue("enroll_secret_path", self.enroll_secret_path_); } void TLSServerRunner::stop() { auto& self = instance(); if (self.server_ != nullptr) { self.server_->kill(); self.server_.reset(); } } } // namespace osquery
{ "content_hash": "357e6dbee4cba72e61a93f8ec5f242ef", "timestamp": "", "source": "github", "line_count": 103, "max_line_length": 79, "avg_line_length": 28.563106796116504, "alnum_prop": 0.6240652617267165, "repo_name": "jedi22/osquery", "id": "310284e920509e3425a65cf863aebf26bd5ee7ab", "size": "3324", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "osquery/tests/test_additional_util.cpp", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "38093" }, { "name": "C++", "bytes": "2437561" }, { "name": "CMake", "bytes": "78446" }, { "name": "Makefile", "bytes": "7926" }, { "name": "Objective-C++", "bytes": "65363" }, { "name": "Shell", "bytes": "2038" }, { "name": "Thrift", "bytes": "2969" } ], "symlink_target": "" }
#include "pch.hpp" #include "main_application.hpp" #include "python27_interface_test.hpp" #include "core_generic_plugin/interfaces/i_command_line_parser.hpp" #include "core_generic_plugin/interfaces/i_component_context.hpp" #include <cassert> namespace wgt { MainApplication::MainApplication( IComponentContext & contextManager ) : contextManager_( contextManager ) { } int MainApplication::startApplication() /* override */ { // Pass reference to unit tests g_contextManager = &contextManager_; auto clp = contextManager_.queryInterface< ICommandLineParser >(); assert( clp != nullptr ); return BWUnitTest::runTest( "python27_interface_test", clp->argc(), clp->argv() ); } void MainApplication::quitApplication() /* override */ { } } // end namespace wgt
{ "content_hash": "5606e49728b763515d633f292478e800", "timestamp": "", "source": "github", "line_count": 37, "max_line_length": 70, "avg_line_length": 21, "alnum_prop": 0.7361647361647362, "repo_name": "dava/wgtf", "id": "6aa1a6413c4baa8272a235ed2d341691d83a3847", "size": "777", "binary": false, "copies": "1", "ref": "refs/heads/dava/development", "path": "src/core/testing/plg_python27_interface_test/main_application.cpp", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Batchfile", "bytes": "3332" }, { "name": "C", "bytes": "586" }, { "name": "C++", "bytes": "4348584" }, { "name": "CMake", "bytes": "198187" }, { "name": "JavaScript", "bytes": "135317" }, { "name": "Makefile", "bytes": "936" }, { "name": "Python", "bytes": "32510" }, { "name": "QML", "bytes": "1293442" }, { "name": "Shell", "bytes": "8109" } ], "symlink_target": "" }
import _plotly_utils.basevalidators class SizeValidator(_plotly_utils.basevalidators.NumberValidator): def __init__(self, plotly_name="size", parent_name="scatter.marker", **kwargs): super(SizeValidator, self).__init__( plotly_name=plotly_name, parent_name=parent_name, anim=kwargs.pop("anim", True), array_ok=kwargs.pop("array_ok", True), edit_type=kwargs.pop("edit_type", "calc"), min=kwargs.pop("min", 0), role=kwargs.pop("role", "style"), **kwargs )
{ "content_hash": "0f968cef4dd171e6dc7b6aefaa059c79", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 83, "avg_line_length": 38.13333333333333, "alnum_prop": 0.5769230769230769, "repo_name": "plotly/python-api", "id": "60713a89cd124827c549c8bb482971d6b88ec7f3", "size": "572", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "packages/python/plotly/plotly/validators/scatter/marker/_size.py", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "6870" }, { "name": "Makefile", "bytes": "1708" }, { "name": "Python", "bytes": "823245" }, { "name": "Shell", "bytes": "3238" } ], "symlink_target": "" }
package com.example.fragments; //check import support.v4 import android.os.Bundle; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentTransaction; import android.support.v7.app.ActionBarActivity; import android.view.Menu; import android.view.MenuItem; public class MainActivity extends ActionBarActivity implements MyFragment1.Listener{ private final static String TAG_FRAGMENT = "TAG_FRAGMENT"; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); MyFragment1 mf1 = (MyFragment1) MyFragment1.newInstance(1); FragmentManager manager = getSupportFragmentManager(); FragmentTransaction transaction = manager.beginTransaction(); transaction.add(R.id.main_container, mf1); transaction.commit(); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.menu_main, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); //noinspection SimplifiableIfStatement if (id == R.id.action_settings) { return true; } return super.onOptionsItemSelected(item); } @Override public void onReadyToDoSomething(String url) { // Create new fragment and transaction MyFragment2 mf2 = (MyFragment2) MyFragment2.newInstance(url); FragmentManager manager = getSupportFragmentManager(); FragmentTransaction transaction = manager.beginTransaction(); // Replace whatever is in the main_container view with new fragment transaction.replace(R.id.main_container, mf2, TAG_FRAGMENT); // Add the transaction to a back stack of fragment transactions transaction.addToBackStack(null); // Commit the transaction transaction.commit(); } @Override public void onBackPressed() { super.onBackPressed(); } }
{ "content_hash": "2a5c1d3d6dcf1149d1608cd6f7c20091", "timestamp": "", "source": "github", "line_count": 72, "max_line_length": 84, "avg_line_length": 32.736111111111114, "alnum_prop": 0.6945269410267289, "repo_name": "nishtahir/Mektory-BeginnersAndroid", "id": "784b69e3e34e57c8256aa27769eb0861128d7e78", "size": "2357", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Fragments/app/src/main/java/com/example/fragments/MainActivity.java", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "48303" } ], "symlink_target": "" }
var router = require('express').Router({mergeParams: true}); var app = module.exports = router; app.callbacks = require('./controllers/index'); app.socket = require('./modules/tattler'); // routes app.post('/rooms', app.callbacks.postRooms); app.get('/emit', app.callbacks.getEmit); app.post('/emit', app.callbacks.postEmit);
{ "content_hash": "a8fe74e6c973f03b914aef299e80234e", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 60, "avg_line_length": 27.416666666666668, "alnum_prop": 0.7082066869300911, "repo_name": "grohman/tattler", "id": "9cb6ef3e7df1f1e8bd99a25628c581684876a14a", "size": "329", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/tattler/index.js", "mode": "33261", "license": "mit", "language": [ { "name": "Handlebars", "bytes": "6" }, { "name": "JavaScript", "bytes": "13423" }, { "name": "Shell", "bytes": "9906" } ], "symlink_target": "" }
<?php $this->breadcrumbs = array( Yii::app()->getModule('user')->getCategory() => array(), Yii::t('UserModule.user', 'Users') => array('/user/userBackend/index'), Yii::t('UserModule.user', 'Create'), ); $this->pageTitle = Yii::t('UserModule.user', 'Users - create'); $this->menu = array( array('label' => Yii::t('UserModule.user', 'Users'), 'items' => array( array('icon' => 'list-alt', 'label' => Yii::t('UserModule.user', 'Manage users'), 'url' => array('/user/userBackend/index')), array('icon' => 'plus-sign', 'label' => Yii::t('UserModule.user', 'Create user'), 'url' => array('/user/userBackend/create')), )), array('label' => Yii::t('UserModule.user', 'Tokens'), 'items' => array( array('icon' => 'list-alt', 'label' => Yii::t('UserModule.user', 'Token list'), 'url' => array('/user/tokensBackend/index')), )), ); ?> <div class="page-header"> <h1> <?php echo Yii::t('UserModule.user', 'Users'); ?> <small><?php echo Yii::t('UserModule.user', 'create'); ?></small> </h1> </div> <?php echo $this->renderPartial('_form', array('model' => $model)); ?>
{ "content_hash": "1ee0c16e29d1c232edcef9d84a1cd8c1", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 138, "avg_line_length": 42.642857142857146, "alnum_prop": 0.541038525963149, "repo_name": "itrustam/yode", "id": "f99d80a30ae24f64893fbe8978b71079715768de", "size": "1194", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "protected/modules/user2/views/userBackend/create.php", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
"""Support for HomematicIP Cloud binary sensor.""" import logging from homematicip.aio.device import ( AsyncContactInterface, AsyncDevice, AsyncFullFlushContactInterface, AsyncMotionDetectorIndoor, AsyncMotionDetectorOutdoor, AsyncMotionDetectorPushButton, AsyncPresenceDetectorIndoor, AsyncRotaryHandleSensor, AsyncShutterContact, AsyncShutterContactMagnetic, AsyncSmokeDetector, AsyncWaterSensor, AsyncWeatherSensor, AsyncWeatherSensorPlus, AsyncWeatherSensorPro, ) from homematicip.aio.group import AsyncSecurityGroup, AsyncSecurityZoneGroup from homematicip.aio.home import AsyncHome from homematicip.base.enums import SmokeDetectorAlarmType, WindowState from homeassistant.components.binary_sensor import ( DEVICE_CLASS_BATTERY, DEVICE_CLASS_DOOR, DEVICE_CLASS_LIGHT, DEVICE_CLASS_MOISTURE, DEVICE_CLASS_MOTION, DEVICE_CLASS_OPENING, DEVICE_CLASS_PRESENCE, DEVICE_CLASS_SAFETY, DEVICE_CLASS_SMOKE, BinarySensorDevice, ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from . import DOMAIN as HMIPC_DOMAIN, HMIPC_HAPID, HomematicipGenericDevice from .device import ATTR_GROUP_MEMBER_UNREACHABLE _LOGGER = logging.getLogger(__name__) ATTR_LOW_BATTERY = "low_battery" ATTR_MOTIONDETECTED = "motion detected" ATTR_PRESENCEDETECTED = "presence detected" ATTR_POWERMAINSFAILURE = "power mains failure" ATTR_WINDOWSTATE = "window state" ATTR_MOISTUREDETECTED = "moisture detected" ATTR_WATERLEVELDETECTED = "water level detected" ATTR_SMOKEDETECTORALARM = "smoke detector alarm" ATTR_TODAY_SUNSHINE_DURATION = "today_sunshine_duration_in_minutes" async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the HomematicIP Cloud binary sensor devices.""" pass async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities ) -> None: """Set up the HomematicIP Cloud binary sensor from a config entry.""" home = hass.data[HMIPC_DOMAIN][config_entry.data[HMIPC_HAPID]].home devices = [] for device in home.devices: if isinstance(device, (AsyncContactInterface, AsyncFullFlushContactInterface)): devices.append(HomematicipContactInterface(home, device)) if isinstance( device, (AsyncShutterContact, AsyncShutterContactMagnetic, AsyncRotaryHandleSensor), ): devices.append(HomematicipShutterContact(home, device)) if isinstance( device, ( AsyncMotionDetectorIndoor, AsyncMotionDetectorOutdoor, AsyncMotionDetectorPushButton, ), ): devices.append(HomematicipMotionDetector(home, device)) if isinstance(device, AsyncPresenceDetectorIndoor): devices.append(HomematicipPresenceDetector(home, device)) if isinstance(device, AsyncSmokeDetector): devices.append(HomematicipSmokeDetector(home, device)) if isinstance(device, AsyncWaterSensor): devices.append(HomematicipWaterDetector(home, device)) if isinstance(device, (AsyncWeatherSensorPlus, AsyncWeatherSensorPro)): devices.append(HomematicipRainSensor(home, device)) if isinstance( device, (AsyncWeatherSensor, AsyncWeatherSensorPlus, AsyncWeatherSensorPro) ): devices.append(HomematicipStormSensor(home, device)) devices.append(HomematicipSunshineSensor(home, device)) if isinstance(device, AsyncDevice) and device.lowBat is not None: devices.append(HomematicipBatterySensor(home, device)) for group in home.groups: if isinstance(group, AsyncSecurityGroup): devices.append(HomematicipSecuritySensorGroup(home, group)) elif isinstance(group, AsyncSecurityZoneGroup): devices.append(HomematicipSecurityZoneSensorGroup(home, group)) if devices: async_add_entities(devices) class HomematicipContactInterface(HomematicipGenericDevice, BinarySensorDevice): """Representation of a HomematicIP Cloud contact interface.""" @property def device_class(self) -> str: """Return the class of this sensor.""" return DEVICE_CLASS_OPENING @property def is_on(self) -> bool: """Return true if the contact interface is on/open.""" if hasattr(self._device, "sabotage") and self._device.sabotage: return True if self._device.windowState is None: return None return self._device.windowState != WindowState.CLOSED class HomematicipShutterContact(HomematicipGenericDevice, BinarySensorDevice): """Representation of a HomematicIP Cloud shutter contact.""" @property def device_class(self) -> str: """Return the class of this sensor.""" return DEVICE_CLASS_DOOR @property def is_on(self) -> bool: """Return true if the shutter contact is on/open.""" if hasattr(self._device, "sabotage") and self._device.sabotage: return True if self._device.windowState is None: return None return self._device.windowState != WindowState.CLOSED class HomematicipMotionDetector(HomematicipGenericDevice, BinarySensorDevice): """Representation of a HomematicIP Cloud motion detector.""" @property def device_class(self) -> str: """Return the class of this sensor.""" return DEVICE_CLASS_MOTION @property def is_on(self) -> bool: """Return true if motion is detected.""" if hasattr(self._device, "sabotage") and self._device.sabotage: return True return self._device.motionDetected class HomematicipPresenceDetector(HomematicipGenericDevice, BinarySensorDevice): """Representation of a HomematicIP Cloud presence detector.""" @property def device_class(self) -> str: """Return the class of this sensor.""" return DEVICE_CLASS_PRESENCE @property def is_on(self) -> bool: """Return true if presence is detected.""" if hasattr(self._device, "sabotage") and self._device.sabotage: return True return self._device.presenceDetected class HomematicipSmokeDetector(HomematicipGenericDevice, BinarySensorDevice): """Representation of a HomematicIP Cloud smoke detector.""" @property def device_class(self) -> str: """Return the class of this sensor.""" return DEVICE_CLASS_SMOKE @property def is_on(self) -> bool: """Return true if smoke is detected.""" return self._device.smokeDetectorAlarmType != SmokeDetectorAlarmType.IDLE_OFF class HomematicipWaterDetector(HomematicipGenericDevice, BinarySensorDevice): """Representation of a HomematicIP Cloud water detector.""" @property def device_class(self) -> str: """Return the class of this sensor.""" return DEVICE_CLASS_MOISTURE @property def is_on(self) -> bool: """Return true, if moisture or waterlevel is detected.""" return self._device.moistureDetected or self._device.waterlevelDetected class HomematicipStormSensor(HomematicipGenericDevice, BinarySensorDevice): """Representation of a HomematicIP Cloud storm sensor.""" def __init__(self, home: AsyncHome, device) -> None: """Initialize storm sensor.""" super().__init__(home, device, "Storm") @property def icon(self) -> str: """Return the icon.""" return "mdi:weather-windy" if self.is_on else "mdi:pinwheel-outline" @property def is_on(self) -> bool: """Return true, if storm is detected.""" return self._device.storm class HomematicipRainSensor(HomematicipGenericDevice, BinarySensorDevice): """Representation of a HomematicIP Cloud rain sensor.""" def __init__(self, home: AsyncHome, device) -> None: """Initialize rain sensor.""" super().__init__(home, device, "Raining") @property def device_class(self) -> str: """Return the class of this sensor.""" return DEVICE_CLASS_MOISTURE @property def is_on(self) -> bool: """Return true, if it is raining.""" return self._device.raining class HomematicipSunshineSensor(HomematicipGenericDevice, BinarySensorDevice): """Representation of a HomematicIP Cloud sunshine sensor.""" def __init__(self, home: AsyncHome, device) -> None: """Initialize sunshine sensor.""" super().__init__(home, device, "Sunshine") @property def device_class(self) -> str: """Return the class of this sensor.""" return DEVICE_CLASS_LIGHT @property def is_on(self) -> bool: """Return true if sun is shining.""" return self._device.sunshine @property def device_state_attributes(self): """Return the state attributes of the illuminance sensor.""" attr = super().device_state_attributes if ( hasattr(self._device, "todaySunshineDuration") and self._device.todaySunshineDuration ): attr[ATTR_TODAY_SUNSHINE_DURATION] = self._device.todaySunshineDuration return attr class HomematicipBatterySensor(HomematicipGenericDevice, BinarySensorDevice): """Representation of a HomematicIP Cloud low battery sensor.""" def __init__(self, home: AsyncHome, device) -> None: """Initialize battery sensor.""" super().__init__(home, device, "Battery") @property def device_class(self) -> str: """Return the class of this sensor.""" return DEVICE_CLASS_BATTERY @property def is_on(self) -> bool: """Return true if battery is low.""" return self._device.lowBat class HomematicipSecurityZoneSensorGroup(HomematicipGenericDevice, BinarySensorDevice): """Representation of a HomematicIP Cloud security zone group.""" def __init__(self, home: AsyncHome, device, post: str = "SecurityZone") -> None: """Initialize security zone group.""" device.modelType = "HmIP-{}".format(post) super().__init__(home, device, post) @property def device_class(self) -> str: """Return the class of this sensor.""" return DEVICE_CLASS_SAFETY @property def available(self) -> bool: """Security-Group available.""" # A security-group must be available, and should not be affected by # the individual availability of group members. return True @property def device_state_attributes(self): """Return the state attributes of the security zone group.""" attr = super().device_state_attributes if self._device.motionDetected: attr[ATTR_MOTIONDETECTED] = True if self._device.presenceDetected: attr[ATTR_PRESENCEDETECTED] = True if ( self._device.windowState is not None and self._device.windowState != WindowState.CLOSED ): attr[ATTR_WINDOWSTATE] = str(self._device.windowState) if self._device.unreach: attr[ATTR_GROUP_MEMBER_UNREACHABLE] = True return attr @property def is_on(self) -> bool: """Return true if security issue detected.""" if ( self._device.motionDetected or self._device.presenceDetected or self._device.unreach or self._device.sabotage ): return True if ( self._device.windowState is not None and self._device.windowState != WindowState.CLOSED ): return True return False class HomematicipSecuritySensorGroup( HomematicipSecurityZoneSensorGroup, BinarySensorDevice ): """Representation of a HomematicIP security group.""" def __init__(self, home: AsyncHome, device) -> None: """Initialize security group.""" super().__init__(home, device, "Sensors") @property def device_state_attributes(self): """Return the state attributes of the security group.""" attr = super().device_state_attributes if self._device.powerMainsFailure: attr[ATTR_POWERMAINSFAILURE] = True if self._device.moistureDetected: attr[ATTR_MOISTUREDETECTED] = True if self._device.waterlevelDetected: attr[ATTR_WATERLEVELDETECTED] = True if self._device.lowBat: attr[ATTR_LOW_BATTERY] = True if ( self._device.smokeDetectorAlarmType is not None and self._device.smokeDetectorAlarmType != SmokeDetectorAlarmType.IDLE_OFF ): attr[ATTR_SMOKEDETECTORALARM] = str(self._device.smokeDetectorAlarmType) return attr @property def is_on(self) -> bool: """Return true if safety issue detected.""" parent_is_on = super().is_on if ( parent_is_on or self._device.powerMainsFailure or self._device.moistureDetected or self._device.waterlevelDetected or self._device.lowBat ): return True if ( self._device.smokeDetectorAlarmType is not None and self._device.smokeDetectorAlarmType != SmokeDetectorAlarmType.IDLE_OFF ): return True return False
{ "content_hash": "ce77da853166d70e0369dcb3fe17c125", "timestamp": "", "source": "github", "line_count": 394, "max_line_length": 88, "avg_line_length": 34.15736040609137, "alnum_prop": 0.6613909942041908, "repo_name": "fbradyirl/home-assistant", "id": "7bb7718f0b3584183bea726d763b49f08749de0d", "size": "13458", "binary": false, "copies": "1", "ref": "refs/heads/dev", "path": "homeassistant/components/homematicip_cloud/binary_sensor.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "1175" }, { "name": "Dockerfile", "bytes": "1829" }, { "name": "Python", "bytes": "16494727" }, { "name": "Ruby", "bytes": "745" }, { "name": "Shell", "bytes": "17784" } ], "symlink_target": "" }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta name="viewport" content="width=device-width, initial-scale=1"> <title>Login</title> <!--[if lt IE 9]> <script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script> <script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script> <![endif]--> </head> <body> <div class="container"> <div id="login"></div> </div> </body> </html>
{ "content_hash": "86d960c1620d13e154c70e01ed16531e", "timestamp": "", "source": "github", "line_count": 20, "max_line_length": 83, "avg_line_length": 26.4, "alnum_prop": 0.6041666666666666, "repo_name": "cleggatt/gift-tracker", "id": "80bcbc207247d59f621f55ad7626283c90d65dca", "size": "528", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/index.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "184" }, { "name": "HTML", "bytes": "528" }, { "name": "JavaScript", "bytes": "56422" }, { "name": "Shell", "bytes": "130" } ], "symlink_target": "" }
package repo import ( "github.com/CapillarySoftware/gostat/stat" log "github.com/cihub/seelog" "github.com/gocql/gocql" "time" ) type StatRepo struct { rawStats <-chan *stat.Stat // Stats to be persisted are read from this channel shutdown <-chan bool // signals a graceful shutdown } // NewStatRepo constructs a StatRepo func NewStatRepo(rawStats <-chan *stat.Stat, shutdown <-chan bool) *StatRepo { return &StatRepo{ rawStats: rawStats, shutdown: shutdown, } } // Run is a goroutine that writes stats from the input channel, placing them into // the appropriate bucket. Buckets are published on the output channel at the // specified interval func (s *StatRepo) Run() { done := false for !done { select { case stat := <-s.rawStats: log.Debugf("StatRepo got %+v", *stat) s.insertRawStat(stat) case done = <-s.shutdown: log.Debug("StatRepo shutting down ", time.Now()) case <-time.After(time.Second * 1): log.Debug("StatRepo Run() timeout ", time.Now()) } } log.Info("StatRepo InsertRawStats() exiting ", time.Now()) } func createSession() (session *gocql.Session, err error) { cluster := gocql.NewCluster("localhost") cluster.Keyspace = "gostat" cluster.Consistency = gocql.Quorum return cluster.CreateSession() } func (s *StatRepo) insertRawStat(stat *stat.Stat) { var session *gocql.Session var err error if session, err = createSession(); err != nil { log.Error("error connecting to Cassandra to insert raw stat: ", err) return } defer closeSession(session) if err := session.Query(`INSERT INTO raw_stats (name, ts, value) VALUES (?, ?, ?)`, stat.Name, stat.Timestamp, stat.Value).Exec(); err != nil { log.Error("error inserting raw stat: ", err) } } func closeSession(session *gocql.Session) { if session != nil { session.Close() } } func GetRawStats(name string, start, end time.Time) ([]stat.Stat, error) { var session *gocql.Session var err error rawStats := make([]stat.Stat, 0) if session, err = createSession(); err != nil { log.Error("failed to connect to Cassandra to query raw stats: ", err) return make([]stat.Stat, 0), err } defer closeSession(session) iter := session.Query(`SELECT ts, value FROM raw_stats WHERE name = ? AND ts >= ? AND ts <= ?`, name, start, end).Iter() var ts time.Time var value float64 for iter.Scan(&ts, &value) { stat := stat.Stat{Name: name, Timestamp: ts, Value: value} rawStats = append(rawStats, stat) } if err := iter.Close(); err != nil { log.Error("error transforming raw stats query results: ", err) return make([]stat.Stat, 0), err } return rawStats, nil } func GetLastNRawStats(name string, last int) ([]stat.Stat, error) { var session *gocql.Session var err error rawStats := make([]stat.Stat, 0) tmp := make([]stat.Stat, 1) if session, err = createSession(); err != nil { log.Error("failed to connect to Cassandra to query last n raw stats: ", err) return make([]stat.Stat, 0), err } defer session.Close() iter := session.Query(`SELECT ts, value FROM raw_stats WHERE name = ? ORDER BY ts DESC LIMIT ?`, name, last).Iter() var ts time.Time var value float64 for iter.Scan(&ts, &value) { stat := stat.Stat{Name: name, Timestamp: ts, Value: value} tmp[0] = stat rawStats = append(tmp, rawStats...) } if err := iter.Close(); err != nil { log.Error("error transforming last n raw stats query results: ", err) return make([]stat.Stat, 0), err } return rawStats, nil }
{ "content_hash": "6725d003ab50aa134f4dd8aa4df745d8", "timestamp": "", "source": "github", "line_count": 128, "max_line_length": 121, "avg_line_length": 27.0234375, "alnum_prop": 0.6808326105810928, "repo_name": "CapillarySoftware/gostat", "id": "ba4a3f30ea210f5ecef124ebc31b1e54adf22a4b", "size": "3459", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "repo/stat_repo.go", "mode": "33188", "license": "mit", "language": [ { "name": "Go", "bytes": "75033" }, { "name": "JavaScript", "bytes": "155214" }, { "name": "Shell", "bytes": "147" } ], "symlink_target": "" }
#ifndef UTILDG_H_INC #define UTILDG_H_INC /* *Give a number interval and returns a random number from that interval *Parameter: int MIN, int MAX *Return: INT */ int Randomizer(int min, int max); /* * Give a text to show, and returns the number the user entered. Only returns numbers, no text * Parameter: char* to show on screen * Return: INT option */ int MenuOpt(char *Question); /* * Don't do anything for X mlseconds * Parameter: int wiht how many mlseconds to wait */ void wait(int mlseconds); /* * Read Text from console */ char* ReadText(char *Question); /* *KeyPressed: *Retorna true se uma tecla for pressionada */ bool KeyPressed(); #endif // !UTILDG_H_INC
{ "content_hash": "1a49eff7a58d8ddbf549acd59bd0231b", "timestamp": "", "source": "github", "line_count": 35, "max_line_length": 93, "avg_line_length": 19.228571428571428, "alnum_prop": 0.7176820208023774, "repo_name": "dicamarques14/ProjectosProgramacao", "id": "5013e3cebfab4d45a3bdd379c24ead7c26cc474b", "size": "673", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Algoritmos Programacao/SDL_Robos/UtilsDG.h", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "6794" }, { "name": "C++", "bytes": "250222" }, { "name": "Java", "bytes": "16847" } ], "symlink_target": "" }
package com.dytech.edge.admin.script.ifmodel; import com.dytech.edge.admin.script.model.Clause; import com.dytech.edge.admin.script.model.Node; public class Block extends Node { protected Clause clause; protected Node parent; public Clause getClause() { return clause; } public void setClause(Clause clause) { this.clause = clause; } public String toScript(boolean first) { StringBuilder script = new StringBuilder(); // We have to add the literals first. if (!first) { script.append("else "); } script.append("if( "); script.append(clause.toScript()); // Add the remaining literals script.append(") \n{ \n bRet = true; \n} \n"); return script.toString(); } public String toEasyRead(boolean first) { if (first) { return "<b>if</b> "; } else { return "<b>else if</b> "; } } }
{ "content_hash": "2497296109eac0a571dbbd6097427821", "timestamp": "", "source": "github", "line_count": 45, "max_line_length": 53, "avg_line_length": 19.666666666666668, "alnum_prop": 0.6305084745762712, "repo_name": "equella/Equella", "id": "d8ea248aad756d632509d3b6bd3c5271009b0284", "size": "1688", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "Source/Plugins/Core/com.equella.admin/src/com/dytech/edge/admin/script/ifmodel/Block.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Awk", "bytes": "402" }, { "name": "Batchfile", "bytes": "38432" }, { "name": "CSS", "bytes": "648823" }, { "name": "Dockerfile", "bytes": "2055" }, { "name": "FreeMarker", "bytes": "370046" }, { "name": "HTML", "bytes": "865667" }, { "name": "Java", "bytes": "27081020" }, { "name": "JavaScript", "bytes": "1673995" }, { "name": "PHP", "bytes": "821" }, { "name": "PLpgSQL", "bytes": "1363" }, { "name": "PureScript", "bytes": "307610" }, { "name": "Python", "bytes": "79871" }, { "name": "Scala", "bytes": "765981" }, { "name": "Shell", "bytes": "64170" }, { "name": "TypeScript", "bytes": "146564" }, { "name": "XSLT", "bytes": "510113" } ], "symlink_target": "" }
<html> <head> <link rel="stylesheet" href="stylesheets/test.css"> </head> <body> <div class="b-text-center">Congratulations! Now this text has been aligned both horizontal center and vertical center running on cross browser!</div> <div class="cs-border-radius" style="padding: 30px; background-color: red; color: #fff; margin-bottom: 32px;">The border radius is working on cross browsers</div> <div class="cs-border-top-left-radius" style="padding: 30px; background-color: blue; color: #fff; margin-bottom: 32px;">The border top-left radius is working on cross browsers</div> <div class="cs-border-top-right-radius" style="padding: 30px; background-color: green; color: #fff; margin-bottom: 32px">The border top-right radius is working on cross browsers</div> <div class="cs-border-bottom-left-radius" style="padding: 30px; background-color: orange; color: #fff; margin-bottom: 32px;">The border bottom-left radius is working on cross browsers</div> <div class="cs-border-bottom-right-radius" style="padding: 30px; background-color: indigo; color: #fff; margin-bottom: 32px;">The border bottom-right radius is working on cross browsers</div> <div class="cs-zoom" style="padding: 30px; background-color: indigo; color: #fff; margin-bottom: 32px;">The zoom is working on cross browsers</div> </body> <script type="text/javascript" src="js/cross-style.js"></script> </html>
{ "content_hash": "9b81a5c4b9db4dce984fc2cc7f221df8", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 195, "avg_line_length": 94.2, "alnum_prop": 0.7289455060155697, "repo_name": "phatly27/cross-style", "id": "9f01cb803bd1eda9a35e3650d250a54bcb69eb26", "size": "1413", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/index.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "7150" }, { "name": "HTML", "bytes": "2826" }, { "name": "JavaScript", "bytes": "27654" } ], "symlink_target": "" }
<?php namespace WP_AMP_Themes\Admin; use \WP_AMP_Themes\Includes\Options; use \WP_AMP_Themes\Core\Themes_Config; /** * Admin_Ajax class for managing Ajax requests from the admin area of the plugin */ class Admin_Ajax { /** * Update theme settings. */ public function settings() { if ( current_user_can( 'manage_options' ) ) { $response = [ 'status' => 0, 'message' => 'There was an error. Please reload the page and try again.', ]; $changed = 0; if ( ! empty( $_POST ) ) { $wp_amp_themes_options = new Options(); if ( isset( $_POST['wp_amp_themes_settings_analyticsid'] ) && isset( $_POST['wp_amp_themes_settings_facebookappid'] ) && isset( $_POST['wp_amp_themes_settings_push_enabled'] ) && is_numeric( $_POST['wp_amp_themes_settings_push_enabled'] ) && ( $_POST['wp_amp_themes_settings_facebookappid'] == '' || is_numeric($_POST['wp_amp_themes_settings_facebookappid']) ) ) { // save analytics id $new_analytics_id = sanitize_text_field( $_POST['wp_amp_themes_settings_analyticsid'] ); if ( $new_analytics_id !== $wp_amp_themes_options->get_setting( 'analytics_id' ) ) { $changed = 1; $wp_amp_themes_options->update_settings( 'analytics_id', $new_analytics_id ); } // save facebook app id $new_facebook_app_id = sanitize_text_field( $_POST['wp_amp_themes_settings_facebookappid'] ); if ( $new_facebook_app_id !== $wp_amp_themes_options->get_setting( 'facebook_app_id' ) ) { $changed = 1; $wp_amp_themes_options->update_settings( 'facebook_app_id', $new_facebook_app_id ); } // save enable push setting $new_push_notifications_enabled = sanitize_text_field( $_POST['wp_amp_themes_settings_push_enabled']); if ( $new_push_notifications_enabled !== $wp_amp_themes_options->get_setting( 'push_notifications_enabled' ) ) { $changed = 1; $wp_amp_themes_options->update_settings( 'push_notifications_enabled', $new_push_notifications_enabled ); } if ( $changed ) { $response['status'] = 1; $response['message'] = 'Your settings have been successfully modified!'; } else { $response['message'] = 'Your settings have not changed.'; } } } // End if(). echo wp_json_encode( $response ); } // End if(). exit(); } /** * Mark the user as being subscribed to the mailing list. */ public function subscribe() { if ( current_user_can( 'manage_options' ) ) { $status = 0; if ( isset( $_POST ) && is_array( $_POST ) && ! empty( $_POST ) ) { if ( isset( $_POST['wp_amp_themes_subscribed'] ) && false != $_POST['wp_amp_themes_subscribed'] ) { $wp_amp_themes_options = new Options(); $subscribed = $wp_amp_themes_options->get_setting( 'joined_subscriber_list' ); if ( false == $subscribed ) { $status = 1; $wp_amp_themes_options->update_settings( 'joined_subscriber_list', $_POST['wp_amp_themes_subscribed'] ); } } } echo $status; } exit(); } /** * Switch the user theme. */ public function switch_theme() { if ( current_user_can( 'manage_options' ) ){ $response = 0; if ( ! empty( $_GET ) ) { $wp_amp_themes_config = new Themes_Config(); if ( isset( $_GET['theme'] ) && in_array( $_GET['theme'], $wp_amp_themes_config->allowed_themes, true ) ) { $wp_amp_themes_options = new Options(); $new_theme = sanitize_text_field( $_GET['theme'] ); $wp_amp_themes_options->update_settings( 'customize', [] ); $wp_amp_themes_options->update_settings( 'theme', $new_theme ); $response = 1; } } echo $response; } exit(); } /** * Register installed premium themes in the database. */ public function sync() { if ( current_user_can( 'manage_options' ) ) { $response = [ 'status' => 0, 'message' => 'There was an error. Please reload the page and try again.', ]; $wp_amp_themes_options = new Options(); $installed_themes = $wp_amp_themes_options->get_setting( 'installed_themes' ); $wp_amp_themes_config = new Themes_Config(); $allowed_themes = $wp_amp_themes_config->allowed_themes; $new_installed_themes = []; foreach ( $allowed_themes as $allowed_theme ) { if ( is_dir( WP_AMP_THEMES_PLUGIN_PATH . "frontend/themes/$allowed_theme/") ) { $new_installed_themes[] = $allowed_theme; } } if ( $installed_themes === $new_installed_themes ) { $response['message'] = 'No new premium themes installed.'; } else { $wp_amp_themes_options->update_settings( 'installed_themes', $new_installed_themes ); $response['message'] = 'Sync complete. Your new themes have been registered.'; $response['status'] = 1; } echo wp_json_encode( $response ); } exit(); } }
{ "content_hash": "05344fd899f41d48a8026c13bdd89c8f", "timestamp": "", "source": "github", "line_count": 189, "max_line_length": 126, "avg_line_length": 25.343915343915345, "alnum_prop": 0.6050104384133612, "repo_name": "appticles/wp-amp-themes", "id": "9a4b3f59492d99bd0f99d2eb11fe815f346fd0f0", "size": "4790", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "admin/class-admin-ajax.php", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "7844" }, { "name": "JavaScript", "bytes": "58978" }, { "name": "PHP", "bytes": "73468" } ], "symlink_target": "" }
import React from 'react'; import PropTypes from 'prop-types'; import watermark from 'watermark-dom'; import { seafileAPI } from '../../utils/seafile-api'; import { siteName } from '../../utils/constants'; import { Utils } from '../../utils/utils'; import toaster from '../toast'; import FileInfo from './file-info'; import FileToolbar from './file-toolbar'; import CommentPanel from './comment-panel'; import FileDetails from '../dirent-detail/file-details'; import '../../css/file-view.css'; const propTypes = { onSave: PropTypes.func, content: PropTypes.object.isRequired, isSaving: PropTypes.bool, needSave: PropTypes.bool, participants: PropTypes.array, onParticipantsChange: PropTypes.func, }; const { isStarred, isLocked, lockedByMe, repoID, filePath, enableWatermark, userNickName, repoName, parentDir, fileName } = window.app.pageOptions; class FileView extends React.Component { constructor(props) { super(props); this.state = { isStarred: isStarred, isLocked: isLocked, lockedByMe: lockedByMe, isCommentPanelOpen: false, isDetailsPanelOpen: false }; } toggleDetailsPanel = () => { this.setState({isDetailsPanelOpen: !this.state.isDetailsPanelOpen}); } toggleCommentPanel = () => { this.setState({ isCommentPanelOpen: !this.state.isCommentPanelOpen }); } toggleStar = () => { if (this.state.isStarred) { seafileAPI.unstarItem(repoID, filePath).then((res) => { this.setState({ isStarred: false }); }).catch((error) => { const errorMsg = Utils.getErrorMsg(error); toaster.danger(errorMsg); }); } else { seafileAPI.starItem(repoID, filePath).then((res) => { this.setState({ isStarred: true }); }).catch((error) => { const errorMsg = Utils.getErrorMsg(error); toaster.danger(errorMsg); }); } } toggleLockFile = () => { if (this.state.isLocked) { seafileAPI.unlockfile(repoID, filePath).then((res) => { this.setState({ isLocked: false, lockedByMe: false }); }).catch((error) => { const errorMsg = Utils.getErrorMsg(error); toaster.danger(errorMsg); }); } else { seafileAPI.lockfile(repoID, filePath).then((res) => { this.setState({ isLocked: true, lockedByMe: true }); }).catch((error) => { const errorMsg = Utils.getErrorMsg(error); toaster.danger(errorMsg); }); } } render() { const { isDetailsPanelOpen } = this.state; return ( <div className="h-100 d-flex flex-column"> <div className="file-view-header d-flex justify-content-between align-items-center"> <FileInfo isStarred={this.state.isStarred} isLocked={this.state.isLocked} toggleStar={this.toggleStar} /> <FileToolbar isLocked={this.state.isLocked} lockedByMe={this.state.lockedByMe} onSave={this.props.onSave} isSaving={this.props.isSaving} needSave={this.props.needSave} toggleLockFile={this.toggleLockFile} toggleCommentPanel={this.toggleCommentPanel} toggleDetailsPanel={this.toggleDetailsPanel} /> </div> <div className="file-view-body flex-auto d-flex o-hidden"> {this.props.content} {this.state.isCommentPanelOpen && <CommentPanel toggleCommentPanel={this.toggleCommentPanel} participants={this.props.participants} onParticipantsChange={this.props.onParticipantsChange} /> } {isDetailsPanelOpen && <FileDetails repoID={repoID} repoName={repoName} path={parentDir} dirent={{'name': fileName, type: 'file'}} togglePanel={this.toggleDetailsPanel} /> } </div> </div> ); } } if (enableWatermark) { watermark.init({ watermark_txt: `${siteName} ${userNickName}`, watermark_alpha: 0.075 }); } FileView.propTypes = propTypes; export default FileView;
{ "content_hash": "32b8d676b425a5028e07d398756521f3", "timestamp": "", "source": "github", "line_count": 153, "max_line_length": 92, "avg_line_length": 27.764705882352942, "alnum_prop": 0.5962806026365348, "repo_name": "miurahr/seahub", "id": "7f0a61ecb1821b8ddd502580be6e958e7539b47e", "size": "4248", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "frontend/src/components/file-view/file-view.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "231001" }, { "name": "HTML", "bytes": "750509" }, { "name": "JavaScript", "bytes": "2430915" }, { "name": "Python", "bytes": "1500021" }, { "name": "Shell", "bytes": "8856" } ], "symlink_target": "" }
package org.cloudfoundry.jvmkill; import java.util.concurrent.CountDownLatch; final class Spawner extends Thread { private final CountDownLatch latch; Spawner(CountDownLatch latch) { this.latch = latch; } @Override @SuppressWarnings("InfiniteLoopStatement") public void run() { try { this.latch.await(); } catch (InterruptedException e) { // suppress } System.out.println("Exhausting threads"); for (; ; ) { try { new Sleeper().start(); System.out.print("."); } catch (Throwable t) { // suppress } } } }
{ "content_hash": "1ded3d65c69094b56dc0436cf0e80efa", "timestamp": "", "source": "github", "line_count": 36, "max_line_length": 49, "avg_line_length": 19.61111111111111, "alnum_prop": 0.5212464589235127, "repo_name": "glyn/jvmkill", "id": "fe74386770541668a4534da4b2c9b1fb21b980ea", "size": "1326", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "resource-exhaustion-generator/src/main/java/org/cloudfoundry/jvmkill/Spawner.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "644" }, { "name": "Dockerfile", "bytes": "459" }, { "name": "Java", "bytes": "5659" }, { "name": "Rust", "bytes": "80391" }, { "name": "Shell", "bytes": "1595" } ], "symlink_target": "" }
package br.com.thiagomoreira.liferay.plugins.notfound.services.model; import com.liferay.portal.model.PersistedModel; /** * The extended model interface for the NotFound service. Represents a row in the &quot;TM_NF_NotFound&quot; database table, with each column mapped to a property of this class. * * @author Thiago Moreira * @see NotFoundModel * @see br.com.thiagomoreira.liferay.plugins.notfound.services.model.impl.NotFoundImpl * @see br.com.thiagomoreira.liferay.plugins.notfound.services.model.impl.NotFoundModelImpl * @generated */ public interface NotFound extends NotFoundModel, PersistedModel { /* * NOTE FOR DEVELOPERS: * * Never modify this interface directly. Add methods to {@link br.com.thiagomoreira.liferay.plugins.notfound.services.model.impl.NotFoundImpl} and rerun ServiceBuilder to automatically copy the method declarations to this interface. */ }
{ "content_hash": "1baeb433cb1980bec9be5b47d8688ef1", "timestamp": "", "source": "github", "line_count": 21, "max_line_length": 236, "avg_line_length": 43.142857142857146, "alnum_prop": 0.7748344370860927, "repo_name": "tmoreira2020/liferay-thiagomoreira-plugins", "id": "cbcc76ab1d422efa9c2b7cc2bf95bcf1a5559f13", "size": "1537", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "webs/not-found-services-web/not-found-services-web-service/src/main/java/br/com/thiagomoreira/liferay/plugins/notfound/services/model/NotFound.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "8604" }, { "name": "Java", "bytes": "243322" }, { "name": "JavaScript", "bytes": "631" } ], "symlink_target": "" }
<?php /** * Message translations. * * This file is automatically generated by 'yii message' command. * It contains the localizable messages extracted from source code. * You may modify this file by translating the extracted messages. * * Each array element represents the translation (value) of a message (key). * If the value is empty, the message is considered as not translated. * Messages that no longer need translation will have their translations * enclosed between a pair of '@@' marks. * * Message string can be used with plural forms format. Check i18n section * of the guide for details. * * NOTE: this file must be saved in UTF-8 encoding. */ return [ 'Access denied!' => 'アクセスが拒否されました!', 'Insufficent permissions!' => '権限が不十分です!', ];
{ "content_hash": "59fc4b6cd339f5f2890ca2c6ad36adfb", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 76, "avg_line_length": 35.04545454545455, "alnum_prop": 0.7211413748378729, "repo_name": "LeonidLyalin/vova", "id": "423b20365c2828f4c42d46d3fc55d5747021770f", "size": "815", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "common/humhub/protected/humhub/modules/comment/messages/ja/controllers_CommentController.php", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "ApacheConf", "bytes": "227" }, { "name": "Batchfile", "bytes": "3096" }, { "name": "CSS", "bytes": "824207" }, { "name": "HTML", "bytes": "25309" }, { "name": "JavaScript", "bytes": "1284304" }, { "name": "PHP", "bytes": "8757729" }, { "name": "Ruby", "bytes": "375" }, { "name": "Shell", "bytes": "3256" } ], "symlink_target": "" }
@implementation Amplitude (Test) @dynamic backgroundQueue; @dynamic initializerQueue; @dynamic eventsData; @dynamic initialized; @dynamic sessionId; @dynamic lastEventTime; - (void)flushQueue { [self flushQueueWithQueue:[self backgroundQueue]]; } - (void)flushQueueWithQueue:(NSOperationQueue*) queue { [queue waitUntilAllOperationsAreFinished]; } - (NSDictionary *)getEvent:(NSInteger) fromEnd { NSArray *events = [self eventsData][@"events"]; return [events objectAtIndex:[events count] - fromEnd - 1]; } - (NSDictionary *)getLastEvent { return [[self eventsData][@"events"] lastObject]; } - (NSUInteger)queuedEventCount { return [[self eventsData][@"events"] count]; } - (void)flushUploads:(void (^)())handler { [self performSelector:@selector(uploadEvents)]; [self flushQueue]; // Wait a second for the upload response to get into the queue. dispatch_time_t delay = dispatch_time(DISPATCH_TIME_NOW, 2.0 * NSEC_PER_SEC); dispatch_after(delay, dispatch_get_main_queue(), ^(void){ [self flushQueue]; handler(); }); } @end
{ "content_hash": "e4f101892922a3f39f2a9b306ed550fb", "timestamp": "", "source": "github", "line_count": 44, "max_line_length": 81, "avg_line_length": 24.931818181818183, "alnum_prop": 0.6946216955332726, "repo_name": "lopper/Amplitude-iOS", "id": "b05d4240fa7bcc2b4df41177810bff38f28650bc", "size": "1280", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "AmplitudeTests/Amplitude+Test.m", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "1904" }, { "name": "Objective-C", "bytes": "116943" }, { "name": "Ruby", "bytes": "755" } ], "symlink_target": "" }
ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
{ "content_hash": "815c886c25c97b674553c34eba4f067a", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 39, "avg_line_length": 10.307692307692308, "alnum_prop": 0.6940298507462687, "repo_name": "mdoering/backbone", "id": "a4a48aef4004dac4feef2e85c740c2a8ce6ebdbd", "size": "185", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Fagales/Fagaceae/Quercus/Quercus pongtungensis/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
""" Control Navigation Features """ import constants from pixy import Pixy class Navigation(): def __init__(self, arduino): self.pixy = Pixy() self.arduino = arduino def stop(self): self.arduino.set_motors(0, 0) def forward(self): self.arduino.set_motors(constants.FORWARD_SPEED_LEFT, constants.FORWARD_SPEED_RIGHT) def reverse(self): self.arduino.set_motors(-1 * constants.FORWARD_SPEED_LEFT, -1 * constants.FORWARD_SPEED_RIGHT) def spin_clockwise(self): self.arduino.set_motors(constants.SPIN_SPEED_LEFT, -1 * constants.SPIN_SPEED_RIGHT) def spin_counterclockwise(self): self.arduino.set_motors(-1 * constants.SPIN_SPEED_LEFT, constants.SPIN_SPEED_RIGHT) def spin_clockwise_fast(self): self.arduino.set_motors(constants.SPIN_SPEED_LEFT_FAST, -1 * constants.SPIN_SPEED_RIGHT_FAST) def spin_counterclockwise_fast(self): self.arduino.set_motors(-1 * constants.SPIN_SPEED_LEFT_FAST, constants.SPIN_SPEED_RIGHT_FAST) def spin_and_search_cone(self): if (self.pixy.signature_name != "cone"): self.pixy.set_signature_cone() block_x = self.pixy.get_pixy_block_x_average(self.arduino.get_pixy_blocks()) if (block_x != None): print("Cone Found") self.stop() return "CONE_FOUND" else: self.spin_clockwise_fast() return None def wander_and_search_cone(self): if (self.pixy.signature_name != "cone"): self.pixy.set_signature_cone() block_x = self.pixy.get_pixy_block_x_average(self.arduino.get_pixy_blocks()) self.arduino.print_ir() if (block_x != None): print("Cone Found") self.stop() return "CONE_FOUND" else: if (self.arduino.ir_wall()): print("Wandered into wall! Spinning...") self.spin_clockwise_fast() else: self.forward() return None # Returns "CONE_IN_RANGE" if cone in range, "LOST_CONE" if lose cone, otherwise None def approach_cone(self): if (self.pixy.signature_name != "cone"): self.pixy.set_signature_cone() block_x = self.pixy.get_pixy_block_x_average(self.arduino.get_pixy_blocks()) ping = self.arduino.get_ping() if (block_x == None): print("Lost Cone") self.stop() return "LOST_CONE" if (ping <= constants.PING_CONE_THRESHOLD and ping != 0): print("Cone in Range") self.stop() return "CONE_IN_RANGE" print("x:" + str(block_x)) if (block_x < constants.PIXY_BOUNDARY_LEFT): print("Go left") self.spin_counterclockwise() return None elif(block_x > constants.PIXY_BOUNDARY_RIGHT): print("Go right") self.spin_clockwise() return None else: print("Go forward") self.forward() return None def spin_and_search_target(self): if (self.pixy.signature_name != "target"): self.pixy.set_signature_target() block_x = self.pixy.get_pixy_block_x_average(self.arduino.get_pixy_blocks()) if (block_x != None): print("Target Found") self.stop() return "TARGET_FOUND" else: self.spin_clockwise() return None def wander_and_search_target(self): if (self.pixy.signature_name != "target"): self.pixy.set_signature_target() block_x = self.pixy.get_pixy_block_x_average(self.arduino.get_pixy_blocks()) self.arduino.print_ir() if (block_x != None): print("Target Found") self.stop() return "TARGET_FOUND" else: if (self.arduino.ir_wall_target()): print("Wandered into wall! Spinning...") self.spin_clockwise_fast() else: self.forward() return None # Returns "TARGET_IN_RANGE" if target in range, "LOST_TARGET" if lose target, otherwise None def approach_target(self): if (self.pixy.signature_name != "target"): self.pixy.set_signature_target() block_x = self.pixy.get_pixy_block_x_average(self.arduino.get_pixy_blocks()) ping = self.arduino.get_ping() ir_mid = self.arduino.get_ir_mid() if (block_x == None): print("Lost Target") self.stop() return "LOST_TARGET" # if (ping <= constants.PING_TARGET_THRESHOLD and ping != 0): if (ir_mid >= constants.IR_TARGET_THRESHOLD): print("Target in Range") self.stop() return "TARGET_IN_RANGE" print("x:" + str(block_x)) if (block_x < constants.PIXY_BOUNDARY_LEFT): print("Go left") self.spin_counterclockwise() return None elif(block_x > constants.PIXY_BOUNDARY_RIGHT): print("Go right") self.spin_clockwise() return None else: print("Go forward") self.forward() return None
{ "content_hash": "51a960a4c9358e47edb406ec348c7bdd", "timestamp": "", "source": "github", "line_count": 162, "max_line_length": 98, "avg_line_length": 28.65432098765432, "alnum_prop": 0.6331322705730289, "repo_name": "zacharylawrence/ENEE408I-Team-9", "id": "bfc850ea3fe6cbb1e8372d58292bc96fe8a04325", "size": "4682", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pi/navigation.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "2578" }, { "name": "HTML", "bytes": "1602" }, { "name": "JavaScript", "bytes": "2541" }, { "name": "Python", "bytes": "22830" } ], "symlink_target": "" }
<?php namespace <%= project.namespace %>\<%= module.namespace %>\Controllers\View; use \<%= project.namespace %>\<%= module.namespace %>\Library\Controllers\ModuleViewController; /** * Concrete implementation of <%= module.namespace %> module controller * * @RoutePrefix("/<%= module.slug %>/view") */ class <%= controller.name %>Controller extends ModuleViewController { /** * @Route("/<%= controller.slug %>", paths={module="<%= module.slug %>"}, methods={"GET"}, name="<%= module.slug %>-<%= controller.slug %>-index") */ public function indexAction() { } }
{ "content_hash": "7dc03a47295fce9bd8532bd73ccff26f", "timestamp": "", "source": "github", "line_count": 21, "max_line_length": 150, "avg_line_length": 28.19047619047619, "alnum_prop": 0.6317567567567568, "repo_name": "michaelkrone/generator-phalcon", "id": "ed33a27a7c4475b93037e8965fd2a106b105414b", "size": "592", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "controller-view/templates/controllers/view/IndexController.php", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "21586" }, { "name": "PHP", "bytes": "44122" } ], "symlink_target": "" }
package android.support.v4.view; import android.content.Context; import android.content.res.Resources; import android.content.res.TypedArray; import android.database.DataSetObserver; import android.graphics.Canvas; import android.graphics.Rect; import android.graphics.drawable.Drawable; import android.os.Build; import android.os.Bundle; import android.os.Parcel; import android.os.Parcelable; import android.os.SystemClock; import android.support.annotation.CallSuper; import android.support.annotation.DrawableRes; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.v4.content.ContextCompat; import android.support.v4.os.ParcelableCompat; import android.support.v4.os.ParcelableCompatCreatorCallbacks; import android.support.v4.view.accessibility.AccessibilityEventCompat; import android.support.v4.view.accessibility.AccessibilityNodeInfoCompat; import android.support.v4.view.accessibility.AccessibilityRecordCompat; import android.support.v4.widget.EdgeEffectCompat; import android.util.AttributeSet; import android.util.Log; import android.view.FocusFinder; import android.view.Gravity; import android.view.KeyEvent; import android.view.MotionEvent; import android.view.SoundEffectConstants; import android.view.VelocityTracker; import android.view.View; import android.view.ViewConfiguration; import android.view.ViewGroup; import android.view.ViewParent; import android.view.accessibility.AccessibilityEvent; import android.view.animation.Interpolator; import android.widget.Scroller; import java.lang.annotation.ElementType; import java.lang.annotation.Inherited; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; /** * 作者:weilu on 2017/3/23. * Thanks: https://github.com/castorflex/VerticalViewPager */ public class VerticalViewPager extends ViewGroup { private static final String TAG = "ViewPager"; private static final boolean DEBUG = false; private static final boolean USE_CACHE = false; private static final int DEFAULT_OFFSCREEN_PAGES = 1; private static final int MAX_SETTLE_DURATION = 600; // ms private static final int MIN_DISTANCE_FOR_FLING = 25; // dips private static final int DEFAULT_GUTTER_SIZE = 16; // dips private static final int MIN_FLING_VELOCITY = 400; // dips static final int[] LAYOUT_ATTRS = new int[] { android.R.attr.layout_gravity }; /** * Used to track what the expected number of items in the adapter should be. * If the app changes this when we don't expect it, we'll throw a big obnoxious exception. */ private int mExpectedAdapterCount; static class ItemInfo { Object object; int position; boolean scrolling; float heightFactor; float offset; } private static final Comparator<ItemInfo> COMPARATOR = new Comparator<ItemInfo>(){ @Override public int compare(ItemInfo lhs, ItemInfo rhs) { return lhs.position - rhs.position; } }; private static final Interpolator sInterpolator = new Interpolator() { @Override public float getInterpolation(float t) { t -= 1.0f; return t * t * t * t * t + 1.0f; } }; private final ArrayList<ItemInfo> mItems = new ArrayList<ItemInfo>(); private final ItemInfo mTempItem = new ItemInfo(); private final Rect mTempRect = new Rect(); PagerAdapter mAdapter; int mCurItem; // Index of currently displayed page. private int mRestoredCurItem = -1; private Parcelable mRestoredAdapterState = null; private ClassLoader mRestoredClassLoader = null; private Scroller mScroller; private boolean mIsScrollStarted; private PagerObserver mObserver; private int mPageMargin; private Drawable mMarginDrawable; private int mLeftPageBounds; private int mRightPageBounds; // Offsets of the first and last items, if known. // Set during population, used to determine if we are at the beginning // or end of the pager data set during touch scrolling. private float mFirstOffset = -Float.MAX_VALUE; private float mLastOffset = Float.MAX_VALUE; private int mChildWidthMeasureSpec; private int mChildHeightMeasureSpec; private boolean mInLayout; private boolean mScrollingCacheEnabled; private boolean mPopulatePending; private int mOffscreenPageLimit = DEFAULT_OFFSCREEN_PAGES; private boolean mIsBeingDragged; private boolean mIsUnableToDrag; private int mDefaultGutterSize; private int mGutterSize; private int mTouchSlop; /** * Position of the last motion event. */ private float mLastMotionX; private float mLastMotionY; private float mInitialMotionX; private float mInitialMotionY; /** * ID of the active pointer. This is used to retain consistency during * drags/flings if multiple pointers are used. */ private int mActivePointerId = INVALID_POINTER; /** * Sentinel value for no current active pointer. * Used by {@link #mActivePointerId}. */ private static final int INVALID_POINTER = -1; /** * Determines speed during touch scrolling */ private VelocityTracker mVelocityTracker; private int mMinimumVelocity; private int mMaximumVelocity; private int mFlingDistance; private int mCloseEnough; // If the pager is at least this close to its final position, complete the scroll // on touch down and let the user interact with the content inside instead of // "catching" the flinging pager. private static final int CLOSE_ENOUGH = 2; // dp private boolean mFakeDragging; private long mFakeDragBeginTime; private EdgeEffectCompat mTopEdge; private EdgeEffectCompat mBottomEdge; private boolean mFirstLayout = true; private boolean mNeedCalculatePageOffsets = false; private boolean mCalledSuper; private int mDecorChildCount; private List<OnPageChangeListener> mOnPageChangeListeners; private OnPageChangeListener mOnPageChangeListener; private OnPageChangeListener mInternalPageChangeListener; private List<OnAdapterChangeListener> mAdapterChangeListeners; private PageTransformer mPageTransformer; private int mPageTransformerLayerType; private Method mSetChildrenDrawingOrderEnabled; private static final int DRAW_ORDER_DEFAULT = 0; private static final int DRAW_ORDER_FORWARD = 1; private static final int DRAW_ORDER_REVERSE = 2; private int mDrawingOrder; private ArrayList<View> mDrawingOrderedChildren; private static final ViewPositionComparator sPositionComparator = new ViewPositionComparator(); /** * Indicates that the pager is in an idle, settled state. The current page * is fully in view and no animation is in progress. */ public static final int SCROLL_STATE_IDLE = 0; /** * Indicates that the pager is currently being dragged by the user. */ public static final int SCROLL_STATE_DRAGGING = 1; /** * Indicates that the pager is in the process of settling to a final position. */ public static final int SCROLL_STATE_SETTLING = 2; private final Runnable mEndScrollRunnable = new Runnable() { @Override public void run() { setScrollState(SCROLL_STATE_IDLE); populate(); } }; private int mScrollState = SCROLL_STATE_IDLE; /** * Callback interface for responding to changing state of the selected page. */ public interface OnPageChangeListener { /** * This method will be invoked when the current page is scrolled, either as part * of a programmatically initiated smooth scroll or a user initiated touch scroll. * * @param position Position index of the first page currently being displayed. * Page position+1 will be visible if positionOffset is nonzero. * @param positionOffset Value from [0, 1) indicating the offset from the page at position. * @param positionOffsetPixels Value in pixels indicating the offset from position. */ void onPageScrolled(int position, float positionOffset, int positionOffsetPixels); /** * This method will be invoked when a new page becomes selected. Animation is not * necessarily complete. * * @param position Position index of the new selected page. */ void onPageSelected(int position); /** * Called when the scroll state changes. Useful for discovering when the user * begins dragging, when the pager is automatically settling to the current page, * or when it is fully stopped/idle. * * @param state The new scroll state. * @see ViewPager#SCROLL_STATE_IDLE * @see ViewPager#SCROLL_STATE_DRAGGING * @see ViewPager#SCROLL_STATE_SETTLING */ void onPageScrollStateChanged(int state); } /** * Simple implementation of the {@link OnPageChangeListener} interface with stub * implementations of each method. Extend this if you do not intend to override * every method of {@link OnPageChangeListener}. */ public static class SimpleOnPageChangeListener implements OnPageChangeListener { @Override public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) { // This space for rent } @Override public void onPageSelected(int position) { // This space for rent } @Override public void onPageScrollStateChanged(int state) { // This space for rent } } /** * A PageTransformer is invoked whenever a visible/attached page is scrolled. * This offers an opportunity for the application to apply a custom transformation * to the page views using animation properties. * * <p>As property animation is only supported as of Android 3.0 and forward, * setting a PageTransformer on a ViewPager on earlier platform versions will * be ignored.</p> */ public interface PageTransformer { /** * Apply a property transformation to the given page. * * @param page Apply the transformation to this page * @param position Position of page relative to the current front-and-center * position of the pager. 0 is front and center. 1 is one full * page position to the right, and -1 is one page position to the left. */ void transformPage(View page, float position); } /** * Callback interface for responding to adapter changes. */ public interface OnAdapterChangeListener { /** * Called when the adapter for the given view pager has changed. * * @param viewPager VerticalViewPager where the adapter change has happened * @param oldAdapter the previously set adapter * @param newAdapter the newly set adapter */ void onAdapterChanged(@NonNull VerticalViewPager viewPager, // <----修改部分 @Nullable PagerAdapter oldAdapter, @Nullable PagerAdapter newAdapter); } /** * Annotation which allows marking of views to be decoration views when added to a view * pager. * * <p>Views marked with this annotation can be added to the view pager with a layout resource. * * <p>You can also control whether a view is a decor view but setting * {@link LayoutParams#isDecor} on the child's layout params.</p> */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.TYPE) @Inherited public @interface DecorView { } public VerticalViewPager(Context context) { super(context); initViewPager(); } public VerticalViewPager(Context context, AttributeSet attrs) { super(context, attrs); initViewPager(); } void initViewPager() { setWillNotDraw(false); setDescendantFocusability(FOCUS_AFTER_DESCENDANTS); setFocusable(true); final Context context = getContext(); mScroller = new Scroller(context, sInterpolator); final ViewConfiguration configuration = ViewConfiguration.get(context); final float density = context.getResources().getDisplayMetrics().density; mTouchSlop = configuration.getScaledPagingTouchSlop(); mMinimumVelocity = (int) (MIN_FLING_VELOCITY * density); mMaximumVelocity = configuration.getScaledMaximumFlingVelocity(); mTopEdge = new EdgeEffectCompat(context); mBottomEdge = new EdgeEffectCompat(context); mFlingDistance = (int) (MIN_DISTANCE_FOR_FLING * density); mCloseEnough = (int) (CLOSE_ENOUGH * density); mDefaultGutterSize = (int) (DEFAULT_GUTTER_SIZE * density); ViewCompat.setAccessibilityDelegate(this, new MyAccessibilityDelegate()); if (ViewCompat.getImportantForAccessibility(this) == ViewCompat.IMPORTANT_FOR_ACCESSIBILITY_AUTO) { ViewCompat.setImportantForAccessibility(this, ViewCompat.IMPORTANT_FOR_ACCESSIBILITY_YES); } ViewCompat.setOnApplyWindowInsetsListener(this, new android.support.v4.view.OnApplyWindowInsetsListener() { private final Rect mTempRect = new Rect(); @Override public WindowInsetsCompat onApplyWindowInsets(final View v, final WindowInsetsCompat originalInsets) { // First let the ViewPager itself try and consume them... final WindowInsetsCompat applied = ViewCompat.onApplyWindowInsets(v, originalInsets); if (applied.isConsumed()) { // If the ViewPager consumed all insets, return now return applied; } // Now we'll manually dispatch the insets to our children. Since ViewPager // children are always full-height, we do not want to use the standard // ViewGroup dispatchApplyWindowInsets since if child 0 consumes them, // the rest of the children will not receive any insets. To workaround this // we manually dispatch the applied insets, not allowing children to // consume them from each other. We do however keep track of any insets // which are consumed, returning the union of our children's consumption final Rect res = mTempRect; res.left = applied.getSystemWindowInsetLeft(); res.top = applied.getSystemWindowInsetTop(); res.right = applied.getSystemWindowInsetRight(); res.bottom = applied.getSystemWindowInsetBottom(); for (int i = 0, count = getChildCount(); i < count; i++) { final WindowInsetsCompat childInsets = ViewCompat .dispatchApplyWindowInsets(getChildAt(i), applied); // Now keep track of any consumed by tracking each dimension's min // value res.left = Math.min(childInsets.getSystemWindowInsetLeft(), res.left); res.top = Math.min(childInsets.getSystemWindowInsetTop(), res.top); res.right = Math.min(childInsets.getSystemWindowInsetRight(), res.right); res.bottom = Math.min(childInsets.getSystemWindowInsetBottom(), res.bottom); } // Now return a new WindowInsets, using the consumed window insets return applied.replaceSystemWindowInsets( res.left, res.top, res.right, res.bottom); } }); } @Override protected void onDetachedFromWindow() { removeCallbacks(mEndScrollRunnable); // To be on the safe side, abort the scroller if ((mScroller != null) && !mScroller.isFinished()) { mScroller.abortAnimation(); } super.onDetachedFromWindow(); } void setScrollState(int newState) { if (mScrollState == newState) { return; } mScrollState = newState; if (mPageTransformer != null) { // PageTransformers can do complex things that benefit from hardware layers. enableLayers(newState != SCROLL_STATE_IDLE); } dispatchOnScrollStateChanged(newState); } /** * Set a PagerAdapter that will supply views for this pager as needed. * * @param adapter Adapter to use */ public void setAdapter(PagerAdapter adapter) { if (mAdapter != null) { mAdapter.setViewPagerObserver(null); mAdapter.startUpdate(this); for (int i = 0; i < mItems.size(); i++) { final ItemInfo ii = mItems.get(i); mAdapter.destroyItem(this, ii.position, ii.object); } mAdapter.finishUpdate(this); mItems.clear(); removeNonDecorViews(); mCurItem = 0; scrollTo(0, 0); } final PagerAdapter oldAdapter = mAdapter; mAdapter = adapter; mExpectedAdapterCount = 0; if (mAdapter != null) { if (mObserver == null) { mObserver = new PagerObserver(); } mAdapter.setViewPagerObserver(mObserver); mPopulatePending = false; final boolean wasFirstLayout = mFirstLayout; mFirstLayout = true; mExpectedAdapterCount = mAdapter.getCount(); if (mRestoredCurItem >= 0) { mAdapter.restoreState(mRestoredAdapterState, mRestoredClassLoader); setCurrentItemInternal(mRestoredCurItem, false, true); mRestoredCurItem = -1; mRestoredAdapterState = null; mRestoredClassLoader = null; } else if (!wasFirstLayout) { populate(); } else { requestLayout(); } } // Dispatch the change to any listeners if (mAdapterChangeListeners != null && !mAdapterChangeListeners.isEmpty()) { for (int i = 0, count = mAdapterChangeListeners.size(); i < count; i++) { mAdapterChangeListeners.get(i).onAdapterChanged(this, oldAdapter, adapter); } } } private void removeNonDecorViews() { for (int i = 0; i < getChildCount(); i++) { final View child = getChildAt(i); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (!lp.isDecor) { removeViewAt(i); i--; } } } /** * Retrieve the current adapter supplying pages. * * @return The currently registered PagerAdapter */ public PagerAdapter getAdapter() { return mAdapter; } /** * Add a listener that will be invoked whenever the adapter for this ViewPager changes. * * @param listener listener to add */ public void addOnAdapterChangeListener(@NonNull OnAdapterChangeListener listener) { if (mAdapterChangeListeners == null) { mAdapterChangeListeners = new ArrayList<>(); } mAdapterChangeListeners.add(listener); } /** * Remove a listener that was previously added via * {@link #addOnAdapterChangeListener(OnAdapterChangeListener)}. * * @param listener listener to remove */ public void removeOnAdapterChangeListener(@NonNull OnAdapterChangeListener listener) { if (mAdapterChangeListeners != null) { mAdapterChangeListeners.remove(listener); } } // private int getClientWidth() { // return getMeasuredWidth() - getPaddingLeft() - getPaddingRight(); // } private int getClientHeight() { return getMeasuredHeight() - getPaddingTop() - getPaddingBottom(); // <----修改部分 } /** * Set the currently selected page. If the ViewPager has already been through its first * layout with its current adapter there will be a smooth animated transition between * the current item and the specified item. * * @param item Item index to select */ public void setCurrentItem(int item) { mPopulatePending = false; setCurrentItemInternal(item, !mFirstLayout, false); } /** * Set the currently selected page. * * @param item Item index to select * @param smoothScroll True to smoothly scroll to the new item, false to transition immediately */ public void setCurrentItem(int item, boolean smoothScroll) { mPopulatePending = false; setCurrentItemInternal(item, smoothScroll, false); } public int getCurrentItem() { return mCurItem; } void setCurrentItemInternal(int item, boolean smoothScroll, boolean always) { setCurrentItemInternal(item, smoothScroll, always, 0); } void setCurrentItemInternal(int item, boolean smoothScroll, boolean always, int velocity) { if (mAdapter == null || mAdapter.getCount() <= 0) { setScrollingCacheEnabled(false); return; } if (!always && mCurItem == item && mItems.size() != 0) { setScrollingCacheEnabled(false); return; } if (item < 0) { item = 0; } else if (item >= mAdapter.getCount()) { item = mAdapter.getCount() - 1; } final int pageLimit = mOffscreenPageLimit; if (item > (mCurItem + pageLimit) || item < (mCurItem - pageLimit)) { // We are doing a jump by more than one page. To avoid // glitches, we want to keep all current pages in the view // until the scroll ends. for (int i = 0; i < mItems.size(); i++) { mItems.get(i).scrolling = true; } } final boolean dispatchSelected = mCurItem != item; if (mFirstLayout) { // We don't have any idea how big we are yet and shouldn't have any pages either. // Just set things up and let the pending layout handle things. mCurItem = item; if (dispatchSelected) { dispatchOnPageSelected(item); } requestLayout(); } else { populate(item); scrollToItem(item, smoothScroll, velocity, dispatchSelected); } } private void scrollToItem(int item, boolean smoothScroll, int velocity, boolean dispatchSelected) { final ItemInfo curInfo = infoForPosition(item); int destY = 0; if (curInfo != null) { final int height = getClientHeight(); destY = (int) (height * Math.max(mFirstOffset, Math.min(curInfo.offset, mLastOffset))); } if (smoothScroll) { smoothScrollTo(0, destY, velocity); if (dispatchSelected) { dispatchOnPageSelected(item); } } else { if (dispatchSelected) { dispatchOnPageSelected(item); } completeScroll(false); scrollTo(0, destY); pageScrolled(destY); } } /** * Set a listener that will be invoked whenever the page changes or is incrementally * scrolled. See {@link OnPageChangeListener}. * * @param listener Listener to set * * @deprecated Use {@link #addOnPageChangeListener(OnPageChangeListener)} * and {@link #removeOnPageChangeListener(OnPageChangeListener)} instead. */ @Deprecated public void setOnPageChangeListener(OnPageChangeListener listener) { mOnPageChangeListener = listener; } /** * Add a listener that will be invoked whenever the page changes or is incrementally * scrolled. See {@link OnPageChangeListener}. * * <p>Components that add a listener should take care to remove it when finished. * Other components that take ownership of a view may call {@link #clearOnPageChangeListeners()} * to remove all attached listeners.</p> * * @param listener listener to add */ public void addOnPageChangeListener(OnPageChangeListener listener) { if (mOnPageChangeListeners == null) { mOnPageChangeListeners = new ArrayList<>(); } mOnPageChangeListeners.add(listener); } /** * Remove a listener that was previously added via * {@link #addOnPageChangeListener(OnPageChangeListener)}. * * @param listener listener to remove */ public void removeOnPageChangeListener(OnPageChangeListener listener) { if (mOnPageChangeListeners != null) { mOnPageChangeListeners.remove(listener); } } /** * Remove all listeners that are notified of any changes in scroll state or position. */ public void clearOnPageChangeListeners() { if (mOnPageChangeListeners != null) { mOnPageChangeListeners.clear(); } } /** * Sets a {@link PageTransformer} that will be called for each attached page whenever * the scroll position is changed. This allows the application to apply custom property * transformations to each page, overriding the default sliding behavior. * * <p><em>Note:</em> Prior to Android 3.0 the property animation APIs did not exist. * As a result, setting a PageTransformer prior to Android 3.0 (API 11) will have no effect. * By default, calling this method will cause contained pages to use * {@link ViewCompat#LAYER_TYPE_HARDWARE}. This layer type allows custom alpha transformations, * but it will cause issues if any of your pages contain a {@link android.view.SurfaceView} * and you have not called {@link android.view.SurfaceView#setZOrderOnTop(boolean)} to put that * {@link android.view.SurfaceView} above your app content. To disable this behavior, call * {@link #setPageTransformer(boolean,PageTransformer,int)} and pass * {@link ViewCompat#LAYER_TYPE_NONE} for {@code pageLayerType}.</p> * * @param reverseDrawingOrder true if the supplied PageTransformer requires page views * to be drawn from last to first instead of first to last. * @param transformer PageTransformer that will modify each page's animation properties */ public void setPageTransformer(boolean reverseDrawingOrder, PageTransformer transformer) { setPageTransformer(reverseDrawingOrder, transformer, ViewCompat.LAYER_TYPE_HARDWARE); } /** * Sets a {@link PageTransformer} that will be called for each attached page whenever * the scroll position is changed. This allows the application to apply custom property * transformations to each page, overriding the default sliding behavior. * * <p><em>Note:</em> Prior to Android 3.0 ({@link Build.VERSION_CODES#HONEYCOMB API 11}), * the property animation APIs did not exist. As a result, setting a PageTransformer prior * to API 11 will have no effect.</p> * * @param reverseDrawingOrder true if the supplied PageTransformer requires page views * to be drawn from last to first instead of first to last. * @param transformer PageTransformer that will modify each page's animation properties * @param pageLayerType View layer type that should be used for ViewPager pages. It should be * either {@link ViewCompat#LAYER_TYPE_HARDWARE}, * {@link ViewCompat#LAYER_TYPE_SOFTWARE}, or * {@link ViewCompat#LAYER_TYPE_NONE}. */ public void setPageTransformer(boolean reverseDrawingOrder, PageTransformer transformer, int pageLayerType) { if (Build.VERSION.SDK_INT >= 11) { final boolean hasTransformer = transformer != null; final boolean needsPopulate = hasTransformer != (mPageTransformer != null); mPageTransformer = transformer; setChildrenDrawingOrderEnabledCompat(hasTransformer); if (hasTransformer) { mDrawingOrder = reverseDrawingOrder ? DRAW_ORDER_REVERSE : DRAW_ORDER_FORWARD; mPageTransformerLayerType = pageLayerType; } else { mDrawingOrder = DRAW_ORDER_DEFAULT; } if (needsPopulate) populate(); } } void setChildrenDrawingOrderEnabledCompat(boolean enable) { if (Build.VERSION.SDK_INT >= 7) { if (mSetChildrenDrawingOrderEnabled == null) { try { mSetChildrenDrawingOrderEnabled = ViewGroup.class.getDeclaredMethod( "setChildrenDrawingOrderEnabled", new Class[] { Boolean.TYPE }); } catch (NoSuchMethodException e) { Log.e(TAG, "Can't find setChildrenDrawingOrderEnabled", e); } } try { mSetChildrenDrawingOrderEnabled.invoke(this, enable); } catch (Exception e) { Log.e(TAG, "Error changing children drawing order", e); } } } @Override protected int getChildDrawingOrder(int childCount, int i) { final int index = mDrawingOrder == DRAW_ORDER_REVERSE ? childCount - 1 - i : i; final int result = ((LayoutParams) mDrawingOrderedChildren.get(index).getLayoutParams()).childIndex; return result; } /** * Set a separate OnPageChangeListener for internal use by the support library. * * @param listener Listener to set * @return The old listener that was set, if any. */ OnPageChangeListener setInternalPageChangeListener(OnPageChangeListener listener) { OnPageChangeListener oldListener = mInternalPageChangeListener; mInternalPageChangeListener = listener; return oldListener; } /** * Returns the number of pages that will be retained to either side of the * current page in the view hierarchy in an idle state. Defaults to 1. * * @return How many pages will be kept offscreen on either side * @see #setOffscreenPageLimit(int) */ public int getOffscreenPageLimit() { return mOffscreenPageLimit; } /** * Set the number of pages that should be retained to either side of the * current page in the view hierarchy in an idle state. Pages beyond this * limit will be recreated from the adapter when needed. * * <p>This is offered as an optimization. If you know in advance the number * of pages you will need to support or have lazy-loading mechanisms in place * on your pages, tweaking this setting can have benefits in perceived smoothness * of paging animations and interaction. If you have a small number of pages (3-4) * that you can keep active all at once, less time will be spent in layout for * newly created view subtrees as the user pages back and forth.</p> * * <p>You should keep this limit low, especially if your pages have complex layouts. * This setting defaults to 1.</p> * * @param limit How many pages will be kept offscreen in an idle state. */ public void setOffscreenPageLimit(int limit) { if (limit < DEFAULT_OFFSCREEN_PAGES) { Log.w(TAG, "Requested offscreen page limit " + limit + " too small; defaulting to " + DEFAULT_OFFSCREEN_PAGES); limit = DEFAULT_OFFSCREEN_PAGES; } if (limit != mOffscreenPageLimit) { mOffscreenPageLimit = limit; populate(); } } /** * Set the margin between pages. * * @param marginPixels Distance between adjacent pages in pixels * @see #getPageMargin() * @see #setPageMarginDrawable(Drawable) * @see #setPageMarginDrawable(int) */ public void setPageMargin(int marginPixels) { final int oldMargin = mPageMargin; mPageMargin = marginPixels; final int height = getHeight(); recomputeScrollPosition(height, height, marginPixels, oldMargin); requestLayout(); } /** * Return the margin between pages. * * @return The size of the margin in pixels */ public int getPageMargin() { return mPageMargin; } /** * Set a drawable that will be used to fill the margin between pages. * * @param d Drawable to display between pages */ public void setPageMarginDrawable(Drawable d) { mMarginDrawable = d; if (d != null) refreshDrawableState(); setWillNotDraw(d == null); invalidate(); } /** * Set a drawable that will be used to fill the margin between pages. * * @param resId Resource ID of a drawable to display between pages */ public void setPageMarginDrawable(@DrawableRes int resId) { setPageMarginDrawable(ContextCompat.getDrawable(getContext(), resId)); } @Override protected boolean verifyDrawable(Drawable who) { return super.verifyDrawable(who) || who == mMarginDrawable; } @Override protected void drawableStateChanged() { super.drawableStateChanged(); final Drawable d = mMarginDrawable; if (d != null && d.isStateful()) { d.setState(getDrawableState()); } } // We want the duration of the page snap animation to be influenced by the distance that // the screen has to travel, however, we don't want this duration to be effected in a // purely linear fashion. Instead, we use this method to moderate the effect that the distance // of travel has on the overall snap duration. float distanceInfluenceForSnapDuration(float f) { f -= 0.5f; // center the values about 0. f *= 0.3f * Math.PI / 2.0f; return (float) Math.sin(f); } /** * Like {@link View#scrollBy}, but scroll smoothly instead of immediately. * * @param x the number of pixels to scroll by on the X axis * @param y the number of pixels to scroll by on the Y axis */ void smoothScrollTo(int x, int y) { smoothScrollTo(x, y, 0); } /** * Like {@link View#scrollBy}, but scroll smoothly instead of immediately. * * @param x the number of pixels to scroll by on the X axis * @param y the number of pixels to scroll by on the Y axis * @param velocity the velocity associated with a fling, if applicable. (0 otherwise) */ void smoothScrollTo(int x, int y, int velocity) { if (getChildCount() == 0) { // Nothing to do. setScrollingCacheEnabled(false); return; } int sy; boolean wasScrolling = (mScroller != null) && !mScroller.isFinished(); if (wasScrolling) { // We're in the middle of a previously initiated scrolling. Check to see // whether that scrolling has actually started (if we always call getStartX // we can get a stale value from the scroller if it hadn't yet had its first // computeScrollOffset call) to decide what is the current scrolling position. sy = mIsScrollStarted ? mScroller.getCurrY() : mScroller.getStartY(); // And abort the current scrolling. mScroller.abortAnimation(); setScrollingCacheEnabled(false); } else { sy = getScrollY(); } int sx = getScrollX(); int dx = x - sx; int dy = y - sy; if (dx == 0 && dy == 0) { completeScroll(false); populate(); setScrollState(SCROLL_STATE_IDLE); return; } setScrollingCacheEnabled(true); setScrollState(SCROLL_STATE_SETTLING); final int height = getClientHeight(); final int halfHeight = height / 2; final float distanceRatio = Math.min(1f, 1.0f * Math.abs(dy) / height); final float distance = halfHeight + halfHeight * distanceInfluenceForSnapDuration(distanceRatio); int duration; velocity = Math.abs(velocity); if (velocity > 0) { duration = 4 * Math.round(1000 * Math.abs(distance / velocity)); } else { final float pageHeight = height * mAdapter.getPageWidth(mCurItem); final float pageDelta = (float) Math.abs(dy) / (pageHeight + mPageMargin); duration = (int) ((pageDelta + 1) * 100); } duration = Math.min(duration, MAX_SETTLE_DURATION); // Reset the "scroll started" flag. It will be flipped to true in all places // where we call computeScrollOffset(). mIsScrollStarted = false; mScroller.startScroll(sx, sy, dx, dy, duration); ViewCompat.postInvalidateOnAnimation(this); } ItemInfo addNewItem(int position, int index) { ItemInfo ii = new ItemInfo(); ii.position = position; ii.object = mAdapter.instantiateItem(this, position); ii.heightFactor = mAdapter.getPageWidth(position); if (index < 0 || index >= mItems.size()) { mItems.add(ii); } else { mItems.add(index, ii); } return ii; } void dataSetChanged() { // This method only gets called if our observer is attached, so mAdapter is non-null. final int adapterCount = mAdapter.getCount(); mExpectedAdapterCount = adapterCount; boolean needPopulate = mItems.size() < mOffscreenPageLimit * 2 + 1 && mItems.size() < adapterCount; int newCurrItem = mCurItem; boolean isUpdating = false; for (int i = 0; i < mItems.size(); i++) { final ItemInfo ii = mItems.get(i); final int newPos = mAdapter.getItemPosition(ii.object); if (newPos == PagerAdapter.POSITION_UNCHANGED) { continue; } if (newPos == PagerAdapter.POSITION_NONE) { mItems.remove(i); i--; if (!isUpdating) { mAdapter.startUpdate(this); isUpdating = true; } mAdapter.destroyItem(this, ii.position, ii.object); needPopulate = true; if (mCurItem == ii.position) { // Keep the current item in the valid range newCurrItem = Math.max(0, Math.min(mCurItem, adapterCount - 1)); needPopulate = true; } continue; } if (ii.position != newPos) { if (ii.position == mCurItem) { // Our current item changed position. Follow it. newCurrItem = newPos; } ii.position = newPos; needPopulate = true; } } if (isUpdating) { mAdapter.finishUpdate(this); } Collections.sort(mItems, COMPARATOR); if (needPopulate) { // Reset our known page widths; populate will recompute them. final int childCount = getChildCount(); for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (!lp.isDecor) { lp.heightFactor = 0.f; } } setCurrentItemInternal(newCurrItem, false, true); requestLayout(); } } void populate() { populate(mCurItem); } void populate(int newCurrentItem) { ItemInfo oldCurInfo = null; if (mCurItem != newCurrentItem) { oldCurInfo = infoForPosition(mCurItem); mCurItem = newCurrentItem; } if (mAdapter == null) { sortChildDrawingOrder(); return; } // Bail now if we are waiting to populate. This is to hold off // on creating views from the time the user releases their finger to // fling to a new position until we have finished the scroll to // that position, avoiding glitches from happening at that point. if (mPopulatePending) { if (DEBUG) Log.i(TAG, "populate is pending, skipping for now..."); sortChildDrawingOrder(); return; } // Also, don't populate until we are attached to a window. This is to // avoid trying to populate before we have restored our view hierarchy // state and conflicting with what is restored. if (getWindowToken() == null) { return; } mAdapter.startUpdate(this); final int pageLimit = mOffscreenPageLimit; final int startPos = Math.max(0, mCurItem - pageLimit); final int N = mAdapter.getCount(); final int endPos = Math.min(N - 1, mCurItem + pageLimit); if (N != mExpectedAdapterCount) { String resName; try { resName = getResources().getResourceName(getId()); } catch (Resources.NotFoundException e) { resName = Integer.toHexString(getId()); } throw new IllegalStateException("The application's PagerAdapter changed the adapter's" + " contents without calling PagerAdapter#notifyDataSetChanged!" + " Expected adapter item count: " + mExpectedAdapterCount + ", found: " + N + " Pager id: " + resName + " Pager class: " + getClass() + " Problematic adapter: " + mAdapter.getClass()); } // Locate the currently focused item or add it if needed. int curIndex = -1; ItemInfo curItem = null; for (curIndex = 0; curIndex < mItems.size(); curIndex++) { final ItemInfo ii = mItems.get(curIndex); if (ii.position >= mCurItem) { if (ii.position == mCurItem) curItem = ii; break; } } if (curItem == null && N > 0) { curItem = addNewItem(mCurItem, curIndex); } // Fill 3x the available width or up to the number of offscreen // pages requested to either side, whichever is larger. // If we have no current item we have no work to do. if (curItem != null) { float extraWidthTop = 0.f; int itemIndex = curIndex - 1; ItemInfo ii = itemIndex >= 0 ? mItems.get(itemIndex) : null; final int clientHeight = getClientHeight(); final float topHeightNeeded = clientHeight <= 0 ? 0 : 2.f - curItem.heightFactor + (float) getPaddingTop() / (float) clientHeight; for (int pos = mCurItem - 1; pos >= 0; pos--) { if (extraWidthTop >= topHeightNeeded && pos < startPos) { if (ii == null) { break; } if (pos == ii.position && !ii.scrolling) { mItems.remove(itemIndex); mAdapter.destroyItem(this, pos, ii.object); if (DEBUG) { Log.i(TAG, "populate() - destroyItem() with pos: " + pos + " view: " + ((View) ii.object)); } itemIndex--; curIndex--; ii = itemIndex >= 0 ? mItems.get(itemIndex) : null; } } else if (ii != null && pos == ii.position) { extraWidthTop += ii.heightFactor; itemIndex--; ii = itemIndex >= 0 ? mItems.get(itemIndex) : null; } else { ii = addNewItem(pos, itemIndex + 1); extraWidthTop += ii.heightFactor; curIndex++; ii = itemIndex >= 0 ? mItems.get(itemIndex) : null; } } float extraHeightBottom = curItem.heightFactor; itemIndex = curIndex + 1; if (extraHeightBottom < 2.f) { ii = itemIndex < mItems.size() ? mItems.get(itemIndex) : null; final float bottomHeightNeeded = clientHeight <= 0 ? 0 : (float) getPaddingBottom() / (float) clientHeight + 2.f; for (int pos = mCurItem + 1; pos < N; pos++) { if (extraHeightBottom >= bottomHeightNeeded && pos > endPos) { if (ii == null) { break; } if (pos == ii.position && !ii.scrolling) { mItems.remove(itemIndex); mAdapter.destroyItem(this, pos, ii.object); if (DEBUG) { Log.i(TAG, "populate() - destroyItem() with pos: " + pos + " view: " + ((View) ii.object)); } ii = itemIndex < mItems.size() ? mItems.get(itemIndex) : null; } } else if (ii != null && pos == ii.position) { extraHeightBottom += ii.heightFactor; itemIndex++; ii = itemIndex < mItems.size() ? mItems.get(itemIndex) : null; } else { ii = addNewItem(pos, itemIndex); itemIndex++; extraHeightBottom += ii.heightFactor; ii = itemIndex < mItems.size() ? mItems.get(itemIndex) : null; } } } calculatePageOffsets(curItem, curIndex, oldCurInfo); } if (DEBUG) { Log.i(TAG, "Current page list:"); for (int i = 0; i < mItems.size(); i++) { Log.i(TAG, "#" + i + ": page " + mItems.get(i).position); } } mAdapter.setPrimaryItem(this, mCurItem, curItem != null ? curItem.object : null); mAdapter.finishUpdate(this); // Check width measurement of current pages and drawing sort order. // Update LayoutParams as needed. final int childCount = getChildCount(); for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); lp.childIndex = i; if (!lp.isDecor && lp.heightFactor == 0.f) { // 0 means requery the adapter for this, it doesn't have a valid width. final ItemInfo ii = infoForChild(child); if (ii != null) { lp.heightFactor = ii.heightFactor; lp.position = ii.position; } } } sortChildDrawingOrder(); if (hasFocus()) { View currentFocused = findFocus(); ItemInfo ii = currentFocused != null ? infoForAnyChild(currentFocused) : null; if (ii == null || ii.position != mCurItem) { for (int i = 0; i < getChildCount(); i++) { View child = getChildAt(i); ii = infoForChild(child); if (ii != null && ii.position == mCurItem) { if (child.requestFocus(View.FOCUS_FORWARD)) { break; } } } } } } private void sortChildDrawingOrder() { if (mDrawingOrder != DRAW_ORDER_DEFAULT) { if (mDrawingOrderedChildren == null) { mDrawingOrderedChildren = new ArrayList<View>(); } else { mDrawingOrderedChildren.clear(); } final int childCount = getChildCount(); for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); mDrawingOrderedChildren.add(child); } Collections.sort(mDrawingOrderedChildren, sPositionComparator); } } private void calculatePageOffsets(ItemInfo curItem, int curIndex, ItemInfo oldCurInfo) { final int N = mAdapter.getCount(); final int height = getClientHeight(); final float marginOffset = height > 0 ? (float) mPageMargin / height : 0; // Fix up offsets for later layout. if (oldCurInfo != null) { final int oldCurPosition = oldCurInfo.position; // Base offsets off of oldCurInfo. if (oldCurPosition < curItem.position) { int itemIndex = 0; ItemInfo ii = null; float offset = oldCurInfo.offset + oldCurInfo.heightFactor + marginOffset; for (int pos = oldCurPosition + 1; pos <= curItem.position && itemIndex < mItems.size(); pos++) { ii = mItems.get(itemIndex); while (pos > ii.position && itemIndex < mItems.size() - 1) { itemIndex++; ii = mItems.get(itemIndex); } while (pos < ii.position) { // We don't have an item populated for this, // ask the adapter for an offset. offset += mAdapter.getPageWidth(pos) + marginOffset; pos++; } ii.offset = offset; offset += ii.heightFactor + marginOffset; } } else if (oldCurPosition > curItem.position) { int itemIndex = mItems.size() - 1; ItemInfo ii = null; float offset = oldCurInfo.offset; for (int pos = oldCurPosition - 1; pos >= curItem.position && itemIndex >= 0; pos--) { ii = mItems.get(itemIndex); while (pos < ii.position && itemIndex > 0) { itemIndex--; ii = mItems.get(itemIndex); } while (pos > ii.position) { // We don't have an item populated for this, // ask the adapter for an offset. offset -= mAdapter.getPageWidth(pos) + marginOffset; pos--; } offset -= ii.heightFactor + marginOffset; ii.offset = offset; } } } // Base all offsets off of curItem. final int itemCount = mItems.size(); float offset = curItem.offset; int pos = curItem.position - 1; mFirstOffset = curItem.position == 0 ? curItem.offset : -Float.MAX_VALUE; mLastOffset = curItem.position == N - 1 ? curItem.offset + curItem.heightFactor - 1 : Float.MAX_VALUE; // Previous pages for (int i = curIndex - 1; i >= 0; i--, pos--) { final ItemInfo ii = mItems.get(i); while (pos > ii.position) { offset -= mAdapter.getPageWidth(pos--) + marginOffset; } offset -= ii.heightFactor + marginOffset; ii.offset = offset; if (ii.position == 0) mFirstOffset = offset; } offset = curItem.offset + curItem.heightFactor + marginOffset; pos = curItem.position + 1; // Next pages for (int i = curIndex + 1; i < itemCount; i++, pos++) { final ItemInfo ii = mItems.get(i); while (pos < ii.position) { offset += mAdapter.getPageWidth(pos++) + marginOffset; } if (ii.position == N - 1) { mLastOffset = offset + ii.heightFactor - 1; } ii.offset = offset; offset += ii.heightFactor + marginOffset; } mNeedCalculatePageOffsets = false; } /** * This is the persistent state that is saved by ViewPager. Only needed * if you are creating a sublass of ViewPager that must save its own * state, in which case it should implement a subclass of this which * contains that state. */ public static class SavedState extends AbsSavedState { int position; Parcelable adapterState; ClassLoader loader; public SavedState(Parcelable superState) { super(superState); } @Override public void writeToParcel(Parcel out, int flags) { super.writeToParcel(out, flags); out.writeInt(position); out.writeParcelable(adapterState, flags); } @Override public String toString() { return "FragmentPager.SavedState{" + Integer.toHexString(System.identityHashCode(this)) + " position=" + position + "}"; } public static final Parcelable.Creator<SavedState> CREATOR = ParcelableCompat.newCreator( new ParcelableCompatCreatorCallbacks<SavedState>() { @Override public SavedState createFromParcel(Parcel in, ClassLoader loader) { return new SavedState(in, loader); } @Override public SavedState[] newArray(int size) { return new SavedState[size]; } }); SavedState(Parcel in, ClassLoader loader) { super(in, loader); if (loader == null) { loader = getClass().getClassLoader(); } position = in.readInt(); adapterState = in.readParcelable(loader); this.loader = loader; } } @Override public Parcelable onSaveInstanceState() { Parcelable superState = super.onSaveInstanceState(); SavedState ss = new SavedState(superState); ss.position = mCurItem; if (mAdapter != null) { ss.adapterState = mAdapter.saveState(); } return ss; } @Override public void onRestoreInstanceState(Parcelable state) { if (!(state instanceof SavedState)) { super.onRestoreInstanceState(state); return; } SavedState ss = (SavedState) state; super.onRestoreInstanceState(ss.getSuperState()); if (mAdapter != null) { mAdapter.restoreState(ss.adapterState, ss.loader); setCurrentItemInternal(ss.position, false, true); } else { mRestoredCurItem = ss.position; mRestoredAdapterState = ss.adapterState; mRestoredClassLoader = ss.loader; } } @Override public void addView(View child, int index, ViewGroup.LayoutParams params) { if (!checkLayoutParams(params)) { params = generateLayoutParams(params); } final LayoutParams lp = (LayoutParams) params; // Any views added via inflation should be classed as part of the decor lp.isDecor |= isDecorView(child); if (mInLayout) { if (lp != null && lp.isDecor) { throw new IllegalStateException("Cannot add pager decor view during layout"); } lp.needsMeasure = true; addViewInLayout(child, index, params); } else { super.addView(child, index, params); } if (USE_CACHE) { if (child.getVisibility() != GONE) { child.setDrawingCacheEnabled(mScrollingCacheEnabled); } else { child.setDrawingCacheEnabled(false); } } } private static boolean isDecorView(@NonNull View view) { Class<?> clazz = view.getClass(); return clazz.getAnnotation(DecorView.class) != null; } @Override public void removeView(View view) { if (mInLayout) { removeViewInLayout(view); } else { super.removeView(view); } } ItemInfo infoForChild(View child) { for (int i = 0; i < mItems.size(); i++) { ItemInfo ii = mItems.get(i); if (mAdapter.isViewFromObject(child, ii.object)) { return ii; } } return null; } ItemInfo infoForAnyChild(View child) { ViewParent parent; while ((parent = child.getParent()) != this) { if (parent == null || !(parent instanceof View)) { return null; } child = (View) parent; } return infoForChild(child); } ItemInfo infoForPosition(int position) { for (int i = 0; i < mItems.size(); i++) { ItemInfo ii = mItems.get(i); if (ii.position == position) { return ii; } } return null; } @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); mFirstLayout = true; } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { // For simple implementation, our internal size is always 0. // We depend on the container to specify the layout size of // our view. We can't really know what it is since we will be // adding and removing different arbitrary views and do not // want the layout to change as this happens. setMeasuredDimension(getDefaultSize(0, widthMeasureSpec), getDefaultSize(0, heightMeasureSpec)); final int measuredWidth = getMeasuredWidth(); final int maxGutterSize = measuredWidth / 10; mGutterSize = Math.min(maxGutterSize, mDefaultGutterSize); // Children are just made to fill our space. int childWidthSize = measuredWidth - getPaddingLeft() - getPaddingRight(); int childHeightSize = getMeasuredHeight() - getPaddingTop() - getPaddingBottom(); /* * Make sure all children have been properly measured. Decor views first. * Right now we cheat and make this less complicated by assuming decor * views won't intersect. We will pin to edges based on gravity. */ int size = getChildCount(); for (int i = 0; i < size; ++i) { final View child = getChildAt(i); if (child.getVisibility() != GONE) { final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (lp != null && lp.isDecor) { final int hgrav = lp.gravity & Gravity.HORIZONTAL_GRAVITY_MASK; final int vgrav = lp.gravity & Gravity.VERTICAL_GRAVITY_MASK; int widthMode = MeasureSpec.AT_MOST; int heightMode = MeasureSpec.AT_MOST; boolean consumeVertical = vgrav == Gravity.TOP || vgrav == Gravity.BOTTOM; boolean consumeHorizontal = hgrav == Gravity.LEFT || hgrav == Gravity.RIGHT; if (consumeVertical) { widthMode = MeasureSpec.EXACTLY; } else if (consumeHorizontal) { heightMode = MeasureSpec.EXACTLY; } int widthSize = childWidthSize; int heightSize = childHeightSize; if (lp.width != LayoutParams.WRAP_CONTENT) { widthMode = MeasureSpec.EXACTLY; if (lp.width != LayoutParams.MATCH_PARENT) { widthSize = lp.width; } } if (lp.height != LayoutParams.WRAP_CONTENT) { heightMode = MeasureSpec.EXACTLY; if (lp.height != LayoutParams.MATCH_PARENT) { heightSize = lp.height; } } final int widthSpec = MeasureSpec.makeMeasureSpec(widthSize, widthMode); final int heightSpec = MeasureSpec.makeMeasureSpec(heightSize, heightMode); child.measure(widthSpec, heightSpec); if (consumeVertical) { childHeightSize -= child.getMeasuredHeight(); } else if (consumeHorizontal) { childWidthSize -= child.getMeasuredWidth(); } } } } mChildWidthMeasureSpec = MeasureSpec.makeMeasureSpec(childWidthSize, MeasureSpec.EXACTLY); mChildHeightMeasureSpec = MeasureSpec.makeMeasureSpec(childHeightSize, MeasureSpec.EXACTLY); // Make sure we have created all fragments that we need to have shown. mInLayout = true; populate(); mInLayout = false; // Page views next. size = getChildCount(); for (int i = 0; i < size; ++i) { final View child = getChildAt(i); if (child.getVisibility() != GONE) { if (DEBUG) { Log.v(TAG, "Measuring #" + i + " " + child + ": " + mChildWidthMeasureSpec); } final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (lp == null || !lp.isDecor) { final int heightSpec = MeasureSpec.makeMeasureSpec( (int) (childHeightSize * lp.heightFactor), MeasureSpec.EXACTLY); child.measure(mChildWidthMeasureSpec, heightSpec); } } } } @Override protected void onSizeChanged(int w, int h, int oldw, int oldh) { super.onSizeChanged(w, h, oldw, oldh); // Make sure scroll position is set correctly. if (h != oldh) { recomputeScrollPosition(h, oldh, mPageMargin, mPageMargin); } } private void recomputeScrollPosition(int height, int oldHeight, int margin, int oldMargin) { if (oldHeight > 0 && !mItems.isEmpty()) { if (!mScroller.isFinished()) { mScroller.setFinalX(getCurrentItem() * getClientHeight()); } else { final int heightWithMargin = height - getPaddingTop() - getPaddingBottom() + margin; final int oldHeightWithMargin = oldHeight - getPaddingTop() - getPaddingBottom() + oldMargin; final int ypos = getScrollY(); final float pageOffset = (float) ypos / oldHeightWithMargin; final int newOffsetPixels = (int) (pageOffset * heightWithMargin); scrollTo(getScrollX(), newOffsetPixels); } } else { final ItemInfo ii = infoForPosition(mCurItem); final float scrollOffset = ii != null ? Math.min(ii.offset, mLastOffset) : 0; final int scrollPos = (int) (scrollOffset * (height - getPaddingTop() - getPaddingBottom())); if (scrollPos != getScrollY()) { completeScroll(false); scrollTo(getScrollX(), scrollPos); } } } @Override protected void onLayout(boolean changed, int l, int t, int r, int b) { final int count = getChildCount(); int width = r - l; int height = b - t; int paddingLeft = getPaddingLeft(); int paddingTop = getPaddingTop(); int paddingRight = getPaddingRight(); int paddingBottom = getPaddingBottom(); final int scrollY = getScrollY(); int decorCount = 0; // First pass - decor views. We need to do this in two passes so that // we have the proper offsets for non-decor views later. for (int i = 0; i < count; i++) { final View child = getChildAt(i); if (child.getVisibility() != GONE) { final LayoutParams lp = (LayoutParams) child.getLayoutParams(); int childLeft = 0; int childTop = 0; if (lp.isDecor) { final int hgrav = lp.gravity & Gravity.HORIZONTAL_GRAVITY_MASK; final int vgrav = lp.gravity & Gravity.VERTICAL_GRAVITY_MASK; switch (hgrav) { default: childLeft = paddingLeft; break; case Gravity.LEFT: childLeft = paddingLeft; paddingLeft += child.getMeasuredWidth(); break; case Gravity.CENTER_HORIZONTAL: childLeft = Math.max((width - child.getMeasuredWidth()) / 2, paddingLeft); break; case Gravity.RIGHT: childLeft = width - paddingRight - child.getMeasuredWidth(); paddingRight += child.getMeasuredWidth(); break; } switch (vgrav) { default: childTop = paddingTop; break; case Gravity.TOP: childTop = paddingTop; paddingTop += child.getMeasuredHeight(); break; case Gravity.CENTER_VERTICAL: childTop = Math.max((height - child.getMeasuredHeight()) / 2, paddingTop); break; case Gravity.BOTTOM: childTop = height - paddingBottom - child.getMeasuredHeight(); paddingBottom += child.getMeasuredHeight(); break; } childTop += scrollY; child.layout(childLeft, childTop, childLeft + child.getMeasuredWidth(), childTop + child.getMeasuredHeight()); decorCount++; } } } final int childHeight = height - paddingTop - paddingBottom; // Page views. Do this once we have the right padding offsets from above. for (int i = 0; i < count; i++) { final View child = getChildAt(i); if (child.getVisibility() != GONE) { final LayoutParams lp = (LayoutParams) child.getLayoutParams(); ItemInfo ii; if (!lp.isDecor && (ii = infoForChild(child)) != null) { int toff = (int) (childHeight * ii.offset); int childLeft = paddingLeft; int childTop = paddingTop + toff; if (lp.needsMeasure) { // This was added during layout and needs measurement. // Do it now that we know what we're working with. lp.needsMeasure = false; final int widthSpec = MeasureSpec.makeMeasureSpec( (int) (width - paddingLeft - paddingRight), MeasureSpec.EXACTLY); final int heightSpec = MeasureSpec.makeMeasureSpec( (int) (childHeight * lp.heightFactor), MeasureSpec.EXACTLY); child.measure(widthSpec, heightSpec); } if (DEBUG) { Log.v(TAG, "Positioning #" + i + " " + child + " f=" + ii.object + ":" + childLeft + "," + childTop + " " + child.getMeasuredWidth() + "x" + child.getMeasuredHeight()); } child.layout(childLeft, childTop, childLeft + child.getMeasuredWidth(), childTop + child.getMeasuredHeight()); } } } mLeftPageBounds = paddingLeft; mRightPageBounds = width - paddingRight; mDecorChildCount = decorCount; if (mFirstLayout) { scrollToItem(mCurItem, false, 0, false); } mFirstLayout = false; } @Override public void computeScroll() { mIsScrollStarted = true; if (!mScroller.isFinished() && mScroller.computeScrollOffset()) { int oldX = getScrollX(); int oldY = getScrollY(); int x = mScroller.getCurrX(); int y = mScroller.getCurrY(); if (oldX != x || oldY != y) { scrollTo(x, y); if (!pageScrolled(y)) { mScroller.abortAnimation(); scrollTo(x, 0); } } // Keep on drawing until the animation has finished. ViewCompat.postInvalidateOnAnimation(this); return; } // Done with scroll, clean up state. completeScroll(true); } private boolean pageScrolled(int ypos) { if (mItems.size() == 0) { if (mFirstLayout) { // If we haven't been laid out yet, we probably just haven't been populated yet. // Let's skip this call since it doesn't make sense in this state return false; } mCalledSuper = false; onPageScrolled(0, 0, 0); if (!mCalledSuper) { throw new IllegalStateException( "onPageScrolled did not call superclass implementation"); } return false; } final ItemInfo ii = infoForCurrentScrollPosition(); final int height = getClientHeight(); final int heightWithMargin = height + mPageMargin; final float marginOffset = (float) mPageMargin / height; final int currentPage = ii.position; final float pageOffset = (((float) ypos / height) - ii.offset) / (ii.heightFactor + marginOffset); final int offsetPixels = (int) (pageOffset * heightWithMargin); mCalledSuper = false; onPageScrolled(currentPage, pageOffset, offsetPixels); if (!mCalledSuper) { throw new IllegalStateException( "onPageScrolled did not call superclass implementation"); } return true; } /** * This method will be invoked when the current page is scrolled, either as part * of a programmatically initiated smooth scroll or a user initiated touch scroll. * If you override this method you must call through to the superclass implementation * (e.g. super.onPageScrolled(position, offset, offsetPixels)) before onPageScrolled * returns. * * @param position Position index of the first page currently being displayed. * Page position+1 will be visible if positionOffset is nonzero. * @param offset Value from [0, 1) indicating the offset from the page at position. * @param offsetPixels Value in pixels indicating the offset from position. */ @CallSuper protected void onPageScrolled(int position, float offset, int offsetPixels) { // Offset any decor views if needed - keep them on-screen at all times. if (mDecorChildCount > 0) { final int scrollY = getScrollY(); int paddingTop = getPaddingTop(); int paddingBottom = getPaddingBottom(); final int height = getHeight(); final int childCount = getChildCount(); for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (!lp.isDecor) continue; final int vgrav = lp.gravity & Gravity.VERTICAL_GRAVITY_MASK; int childTop = 0; switch (vgrav) { default: childTop = paddingTop; break; case Gravity.TOP: childTop = paddingTop; paddingTop += child.getHeight(); break; case Gravity.CENTER_VERTICAL: childTop = Math.max((height - child.getMeasuredHeight()) / 2, paddingTop); break; case Gravity.BOTTOM: childTop = height - paddingBottom - child.getMeasuredHeight(); paddingBottom += child.getMeasuredHeight(); break; } childTop += scrollY; final int childOffset = childTop - child.getTop(); if (childOffset != 0) { child.offsetTopAndBottom(childOffset); } } } dispatchOnPageScrolled(position, offset, offsetPixels); if (mPageTransformer != null) { final int scrollY = getScrollY(); final int childCount = getChildCount(); for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (lp.isDecor) continue; final float transformPos = (float) (child.getTop() - scrollY) / getClientHeight(); mPageTransformer.transformPage(child, transformPos); } } mCalledSuper = true; } private void dispatchOnPageScrolled(int position, float offset, int offsetPixels) { if (mOnPageChangeListener != null) { mOnPageChangeListener.onPageScrolled(position, offset, offsetPixels); } if (mOnPageChangeListeners != null) { for (int i = 0, z = mOnPageChangeListeners.size(); i < z; i++) { OnPageChangeListener listener = mOnPageChangeListeners.get(i); if (listener != null) { listener.onPageScrolled(position, offset, offsetPixels); } } } if (mInternalPageChangeListener != null) { mInternalPageChangeListener.onPageScrolled(position, offset, offsetPixels); } } private void dispatchOnPageSelected(int position) { if (mOnPageChangeListener != null) { mOnPageChangeListener.onPageSelected(position); } if (mOnPageChangeListeners != null) { for (int i = 0, z = mOnPageChangeListeners.size(); i < z; i++) { OnPageChangeListener listener = mOnPageChangeListeners.get(i); if (listener != null) { listener.onPageSelected(position); } } } if (mInternalPageChangeListener != null) { mInternalPageChangeListener.onPageSelected(position); } } private void dispatchOnScrollStateChanged(int state) { if (mOnPageChangeListener != null) { mOnPageChangeListener.onPageScrollStateChanged(state); } if (mOnPageChangeListeners != null) { for (int i = 0, z = mOnPageChangeListeners.size(); i < z; i++) { OnPageChangeListener listener = mOnPageChangeListeners.get(i); if (listener != null) { listener.onPageScrollStateChanged(state); } } } if (mInternalPageChangeListener != null) { mInternalPageChangeListener.onPageScrollStateChanged(state); } } private void completeScroll(boolean postEvents) { boolean needPopulate = mScrollState == SCROLL_STATE_SETTLING; if (needPopulate) { // Done with scroll, no longer want to cache view drawing. setScrollingCacheEnabled(false); boolean wasScrolling = !mScroller.isFinished(); if (wasScrolling) { mScroller.abortAnimation(); int oldX = getScrollX(); int oldY = getScrollY(); int x = mScroller.getCurrX(); int y = mScroller.getCurrY(); if (oldX != x || oldY != y) { scrollTo(x, y); if (y != oldY) { pageScrolled(y); } } } } mPopulatePending = false; for (int i = 0; i < mItems.size(); i++) { ItemInfo ii = mItems.get(i); if (ii.scrolling) { needPopulate = true; ii.scrolling = false; } } if (needPopulate) { if (postEvents) { ViewCompat.postOnAnimation(this, mEndScrollRunnable); } else { mEndScrollRunnable.run(); } } } private boolean isGutterDrag(float y, float dy) { return (y < mGutterSize && dy > 0) || (y > getHeight() - mGutterSize && dy < 0); } private void enableLayers(boolean enable) { final int childCount = getChildCount(); for (int i = 0; i < childCount; i++) { final int layerType = enable ? mPageTransformerLayerType : ViewCompat.LAYER_TYPE_NONE; ViewCompat.setLayerType(getChildAt(i), layerType, null); } } @Override public boolean onInterceptTouchEvent(MotionEvent ev) { /* * This method JUST determines whether we want to intercept the motion. * If we return true, onMotionEvent will be called and we do the actual * scrolling there. */ final int action = ev.getAction() & MotionEventCompat.ACTION_MASK; // Always take care of the touch gesture being complete. if (action == MotionEvent.ACTION_CANCEL || action == MotionEvent.ACTION_UP) { // Release the drag. if (DEBUG) Log.v(TAG, "Intercept done!"); resetTouch(); return false; } // Nothing more to do here if we have decided whether or not we // are dragging. if (action != MotionEvent.ACTION_DOWN) { if (mIsBeingDragged) { if (DEBUG) Log.v(TAG, "Intercept returning true!"); return true; } if (mIsUnableToDrag) { if (DEBUG) Log.v(TAG, "Intercept returning false!"); return false; } } switch (action) { case MotionEvent.ACTION_MOVE: { /* * mIsBeingDragged == false, otherwise the shortcut would have caught it. Check * whether the user has moved far enough from his original down touch. */ /* * Locally do absolute value. mLastMotionY is set to the y value * of the down event. */ final int activePointerId = mActivePointerId; if (activePointerId == INVALID_POINTER) { // If we don't have a valid id, the touch down wasn't on content. break; } final int pointerIndex = ev.findPointerIndex(activePointerId); final float y = ev.getY(pointerIndex); final float dy = y - mLastMotionY; final float yDiff = Math.abs(dy); final float x = ev.getX(pointerIndex); final float xDiff = Math.abs(x - mInitialMotionX); if (DEBUG) Log.v(TAG, "Moved x to " + x + "," + y + " diff=" + xDiff + "," + yDiff); if (dy != 0 && !isGutterDrag(mLastMotionY, dy) && canScroll(this, false, (int) dy, (int) x, (int) y)) { // Nested view has scrollable area under this point. Let it be handled there. mLastMotionX = x; mLastMotionY = y; mIsUnableToDrag = true; return false; } if (yDiff > mTouchSlop && yDiff * 0.5f > xDiff) { if (DEBUG) Log.v(TAG, "Starting drag!"); mIsBeingDragged = true; requestParentDisallowInterceptTouchEvent(true); setScrollState(SCROLL_STATE_DRAGGING); mLastMotionY = dy > 0 ? mInitialMotionY + mTouchSlop : mInitialMotionY - mTouchSlop; mLastMotionX = x; setScrollingCacheEnabled(true); } else if (xDiff > mTouchSlop) { // The finger has moved enough in the vertical // direction to be counted as a drag... abort // any attempt to drag horizontally, to work correctly // with children that have scrolling containers. if (DEBUG) Log.v(TAG, "Starting unable to drag!"); mIsUnableToDrag = true; } if (mIsBeingDragged) { // Scroll to follow the motion event if (performDrag(y)) { ViewCompat.postInvalidateOnAnimation(this); } } break; } case MotionEvent.ACTION_DOWN: { /* * Remember location of down touch. * ACTION_DOWN always refers to pointer index 0. */ mLastMotionX = mInitialMotionX = ev.getX(); mLastMotionY = mInitialMotionY = ev.getY(); mActivePointerId = ev.getPointerId(0); mIsUnableToDrag = false; mIsScrollStarted = true; mScroller.computeScrollOffset(); if (mScrollState == SCROLL_STATE_SETTLING && Math.abs(mScroller.getFinalY() - mScroller.getCurrY()) > mCloseEnough) { // Let the user 'catch' the pager as it animates. mScroller.abortAnimation(); mPopulatePending = false; populate(); mIsBeingDragged = true; requestParentDisallowInterceptTouchEvent(true); setScrollState(SCROLL_STATE_DRAGGING); } else { completeScroll(false); mIsBeingDragged = false; } if (DEBUG) { Log.v(TAG, "Down at " + mLastMotionX + "," + mLastMotionY + " mIsBeingDragged=" + mIsBeingDragged + "mIsUnableToDrag=" + mIsUnableToDrag); } break; } case MotionEventCompat.ACTION_POINTER_UP: onSecondaryPointerUp(ev); break; } if (mVelocityTracker == null) { mVelocityTracker = VelocityTracker.obtain(); } mVelocityTracker.addMovement(ev); /* * The only time we want to intercept motion events is if we are in the * drag mode. */ return mIsBeingDragged; } @Override public boolean onTouchEvent(MotionEvent ev) { if (mFakeDragging) { // A fake drag is in progress already, ignore this real one // but still eat the touch events. // (It is likely that the user is multi-touching the screen.) return true; } if (ev.getAction() == MotionEvent.ACTION_DOWN && ev.getEdgeFlags() != 0) { // Don't handle edge touches immediately -- they may actually belong to one of our // descendants. return false; } if (mAdapter == null || mAdapter.getCount() == 0) { // Nothing to present or scroll; nothing to touch. return false; } if (mVelocityTracker == null) { mVelocityTracker = VelocityTracker.obtain(); } mVelocityTracker.addMovement(ev); final int action = ev.getAction(); boolean needsInvalidate = false; switch (action & MotionEventCompat.ACTION_MASK) { case MotionEvent.ACTION_DOWN: { mScroller.abortAnimation(); mPopulatePending = false; populate(); // Remember where the motion event started mLastMotionX = mInitialMotionX = ev.getX(); mLastMotionY = mInitialMotionY = ev.getY(); mActivePointerId = ev.getPointerId(0); break; } case MotionEvent.ACTION_MOVE: if (!mIsBeingDragged) { final int pointerIndex = ev.findPointerIndex(mActivePointerId); if (pointerIndex == -1) { // A child has consumed some touch events and put us into an inconsistent // state. needsInvalidate = resetTouch(); break; } final float y = ev.getY(pointerIndex); final float yDiff = Math.abs(y - mLastMotionY); final float x = ev.getX(pointerIndex); final float xDiff = Math.abs(x - mLastMotionX); if (DEBUG) { Log.v(TAG, "Moved x to " + x + "," + y + " diff=" + xDiff + "," + yDiff); } if (yDiff > mTouchSlop && yDiff > xDiff) { if (DEBUG) Log.v(TAG, "Starting drag!"); mIsBeingDragged = true; requestParentDisallowInterceptTouchEvent(true); mLastMotionY = y - mInitialMotionY > 0 ? mInitialMotionY + mTouchSlop : mInitialMotionY - mTouchSlop; mLastMotionX = x; setScrollState(SCROLL_STATE_DRAGGING); setScrollingCacheEnabled(true); // Disallow Parent Intercept, just in case ViewParent parent = getParent(); if (parent != null) { parent.requestDisallowInterceptTouchEvent(true); } } } // Not else! Note that mIsBeingDragged can be set above. if (mIsBeingDragged) { // Scroll to follow the motion event final int activePointerIndex = ev.findPointerIndex(mActivePointerId); final float y = ev.getY(activePointerIndex); needsInvalidate |= performDrag(y); } break; case MotionEvent.ACTION_UP: if (mIsBeingDragged) { final VelocityTracker velocityTracker = mVelocityTracker; velocityTracker.computeCurrentVelocity(1000, mMaximumVelocity); int initialVelocity = (int) VelocityTrackerCompat.getYVelocity( velocityTracker, mActivePointerId); mPopulatePending = true; final int height = getClientHeight(); final int scrollY = getScrollY(); final ItemInfo ii = infoForCurrentScrollPosition(); final float marginOffset = (float) mPageMargin / height; final int currentPage = ii.position; final float pageOffset = (((float) scrollY / height) - ii.offset) / (ii.heightFactor + marginOffset); final int activePointerIndex = ev.findPointerIndex(mActivePointerId); final float y = ev.getY(activePointerIndex); final int totalDelta = (int) (y - mInitialMotionY); int nextPage = determineTargetPage(currentPage, pageOffset, initialVelocity, totalDelta); setCurrentItemInternal(nextPage, true, true, initialVelocity); needsInvalidate = resetTouch(); } break; case MotionEvent.ACTION_CANCEL: if (mIsBeingDragged) { scrollToItem(mCurItem, true, 0, false); needsInvalidate = resetTouch(); } break; case MotionEventCompat.ACTION_POINTER_DOWN: { final int index = MotionEventCompat.getActionIndex(ev); final float y = ev.getY(index); mLastMotionY = y; mActivePointerId = ev.getPointerId(index); break; } case MotionEventCompat.ACTION_POINTER_UP: onSecondaryPointerUp(ev); mLastMotionY = ev.getY(ev.findPointerIndex(mActivePointerId)); break; } if (needsInvalidate) { ViewCompat.postInvalidateOnAnimation(this); } return true; } private boolean resetTouch() { boolean needsInvalidate; mActivePointerId = INVALID_POINTER; endDrag(); needsInvalidate = mTopEdge.onRelease() | mBottomEdge.onRelease(); return needsInvalidate; } private void requestParentDisallowInterceptTouchEvent(boolean disallowIntercept) { final ViewParent parent = getParent(); if (parent != null) { parent.requestDisallowInterceptTouchEvent(disallowIntercept); } } private boolean performDrag(float y) { boolean needsInvalidate = false; final float deltaY = mLastMotionY - y; mLastMotionY = y; float oldScrollY = getScrollY(); float scrollY = oldScrollY + deltaY; final int height = getClientHeight(); float topBound = height * mFirstOffset; float bottomBound = height * mLastOffset; boolean topAbsolute = true; boolean bottomAbsolute = true; final ItemInfo firstItem = mItems.get(0); final ItemInfo lastItem = mItems.get(mItems.size() - 1); if (firstItem.position != 0) { topAbsolute = false; topBound = firstItem.offset * height; } if (lastItem.position != mAdapter.getCount() - 1) { bottomAbsolute = false; bottomBound = lastItem.offset * height; } if (scrollY < topBound) { if (topAbsolute) { float over = topBound - scrollY; needsInvalidate = mTopEdge.onPull(Math.abs(over) / height); } scrollY = topBound; } else if (scrollY > bottomBound) { if (bottomAbsolute) { float over = scrollY - bottomBound; needsInvalidate = mBottomEdge.onPull(Math.abs(over) / height); } scrollY = bottomBound; } // Don't lose the rounded component mLastMotionX += scrollY - (int) scrollY; scrollTo(getScrollX(), (int) scrollY); pageScrolled((int) scrollY); return needsInvalidate; } /** * @return Info about the page at the current scroll position. * This can be synthetic for a missing middle page; the 'object' field can be null. */ private ItemInfo infoForCurrentScrollPosition() { final int height = getClientHeight(); final float scrollOffset = height > 0 ? (float) getScrollY() / height : 0; final float marginOffset = height > 0 ? (float) mPageMargin / height : 0; int lastPos = -1; float lastOffset = 0.f; float lastHeight = 0.f; boolean first = true; ItemInfo lastItem = null; for (int i = 0; i < mItems.size(); i++) { ItemInfo ii = mItems.get(i); float offset; if (!first && ii.position != lastPos + 1) { // Create a synthetic item for a missing page. ii = mTempItem; ii.offset = lastOffset + lastHeight + marginOffset; ii.position = lastPos + 1; ii.heightFactor = mAdapter.getPageWidth(ii.position); i--; } offset = ii.offset; final float topBound = offset; final float bottomBound = offset + ii.heightFactor + marginOffset; if (first || scrollOffset >= topBound) { if (scrollOffset < bottomBound || i == mItems.size() - 1) { return ii; } } else { return lastItem; } first = false; lastPos = ii.position; lastOffset = offset; lastHeight = ii.heightFactor; lastItem = ii; } return lastItem; } private int determineTargetPage(int currentPage, float pageOffset, int velocity, int deltaY) { int targetPage; if (Math.abs(deltaY) > mFlingDistance && Math.abs(velocity) > mMinimumVelocity) { targetPage = velocity > 0 ? currentPage : currentPage + 1; } else { final float truncator = currentPage >= mCurItem ? 0.4f : 0.6f; targetPage = currentPage + (int) (pageOffset + truncator); } if (mItems.size() > 0) { final ItemInfo firstItem = mItems.get(0); final ItemInfo lastItem = mItems.get(mItems.size() - 1); // Only let the user target pages we have items for targetPage = Math.max(firstItem.position, Math.min(targetPage, lastItem.position)); } return targetPage; } @Override public void draw(Canvas canvas) { super.draw(canvas); boolean needsInvalidate = false; final int overScrollMode = getOverScrollMode(); if (overScrollMode == View.OVER_SCROLL_ALWAYS || (overScrollMode == View.OVER_SCROLL_IF_CONTENT_SCROLLS && mAdapter != null && mAdapter.getCount() > 1)) { if (!mTopEdge.isFinished()) { final int restoreCount = canvas.save(); final int height = getHeight(); final int width = getWidth() - getPaddingLeft() - getPaddingRight(); canvas.translate(getPaddingLeft(), mFirstOffset * height); mTopEdge.setSize(width, height); needsInvalidate |= mTopEdge.draw(canvas); canvas.restoreToCount(restoreCount); } if (!mBottomEdge.isFinished()) { final int restoreCount = canvas.save(); final int height = getHeight(); final int width = getWidth() - getPaddingLeft() - getPaddingRight(); canvas.rotate(180); canvas.translate(-width - getPaddingLeft(), -(mLastOffset + 1) * height); mBottomEdge.setSize(width, height); needsInvalidate |= mBottomEdge.draw(canvas); canvas.restoreToCount(restoreCount); } } else { mTopEdge.finish(); mBottomEdge.finish(); } if (needsInvalidate) { // Keep animating ViewCompat.postInvalidateOnAnimation(this); } } @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); // Draw the margin drawable between pages if needed. if (mPageMargin > 0 && mMarginDrawable != null && mItems.size() > 0 && mAdapter != null) { final int scrollY = getScrollY(); final int height = getHeight(); final float marginOffset = (float) mPageMargin / height; int itemIndex = 0; ItemInfo ii = mItems.get(0); float offset = ii.offset; final int itemCount = mItems.size(); final int firstPos = ii.position; final int lastPos = mItems.get(itemCount - 1).position; for (int pos = firstPos; pos < lastPos; pos++) { while (pos > ii.position && itemIndex < itemCount) { ii = mItems.get(++itemIndex); } float drawAt; if (pos == ii.position) { drawAt = (ii.offset + ii.heightFactor) * height; offset = ii.offset + ii.heightFactor + marginOffset; } else { float heightFactor = mAdapter.getPageWidth(pos); drawAt = (offset + heightFactor) * height; offset += heightFactor + marginOffset; } if (drawAt + mPageMargin > scrollY) { mMarginDrawable.setBounds(mLeftPageBounds, Math.round(drawAt), mRightPageBounds, Math.round(drawAt + mPageMargin)); mMarginDrawable.draw(canvas); } if (drawAt > scrollY + height) { break; // No more visible, no sense in continuing } } } } /** * Start a fake drag of the pager. * * <p>A fake drag can be useful if you want to synchronize the motion of the ViewPager * with the touch scrolling of another view, while still letting the ViewPager * control the snapping motion and fling behavior. (e.g. parallax-scrolling tabs.) * Call {@link #fakeDragBy(float)} to simulate the actual drag motion. Call * {@link #endFakeDrag()} to complete the fake drag and fling as necessary. * * <p>During a fake drag the ViewPager will ignore all touch events. If a real drag * is already in progress, this method will return false. * * @return true if the fake drag began successfully, false if it could not be started. * * @see #fakeDragBy(float) * @see #endFakeDrag() */ public boolean beginFakeDrag() { if (mIsBeingDragged) { return false; } mFakeDragging = true; setScrollState(SCROLL_STATE_DRAGGING); mInitialMotionY = mLastMotionY = 0; if (mVelocityTracker == null) { mVelocityTracker = VelocityTracker.obtain(); } else { mVelocityTracker.clear(); } final long time = SystemClock.uptimeMillis(); final MotionEvent ev = MotionEvent.obtain(time, time, MotionEvent.ACTION_DOWN, 0, 0, 0); mVelocityTracker.addMovement(ev); ev.recycle(); mFakeDragBeginTime = time; return true; } /** * End a fake drag of the pager. * * @see #beginFakeDrag() * @see #fakeDragBy(float) */ public void endFakeDrag() { if (!mFakeDragging) { throw new IllegalStateException("No fake drag in progress. Call beginFakeDrag first."); } if (mAdapter != null) { final VelocityTracker velocityTracker = mVelocityTracker; velocityTracker.computeCurrentVelocity(1000, mMaximumVelocity); int initialVelocity = (int) VelocityTrackerCompat.getXVelocity( velocityTracker, mActivePointerId); mPopulatePending = true; final int height = getClientHeight(); final int scrollY = getScrollY(); final ItemInfo ii = infoForCurrentScrollPosition(); final int currentPage = ii.position; final float pageOffset = (((float) scrollY / height) - ii.offset) / ii.heightFactor; final int totalDelta = (int) (mLastMotionY - mInitialMotionY); int nextPage = determineTargetPage(currentPage, pageOffset, initialVelocity, totalDelta); setCurrentItemInternal(nextPage, true, true, initialVelocity); } endDrag(); mFakeDragging = false; } /** * Fake drag by an offset in pixels. You must have called {@link #beginFakeDrag()} first. * * @param yOffset Offset in pixels to drag by. * @see #beginFakeDrag() * @see #endFakeDrag() */ public void fakeDragBy(float yOffset) { if (!mFakeDragging) { throw new IllegalStateException("No fake drag in progress. Call beginFakeDrag first."); } if (mAdapter == null) { return; } mLastMotionY += yOffset; float oldScrollY = getScrollY(); float scrollY = oldScrollY - yOffset; final int height = getClientHeight(); float topBound = height * mFirstOffset; float bottomBound = height * mLastOffset; final ItemInfo firstItem = mItems.get(0); final ItemInfo lastItem = mItems.get(mItems.size() - 1); if (firstItem.position != 0) { topBound = firstItem.offset * height; } if (lastItem.position != mAdapter.getCount() - 1) { bottomBound = lastItem.offset * height; } if (scrollY < topBound) { scrollY = topBound; } else if (scrollY > bottomBound) { scrollY = bottomBound; } // Don't lose the rounded component mLastMotionY += scrollY - (int) scrollY; scrollTo(getScrollX(), (int) scrollY); pageScrolled((int) scrollY); // Synthesize an event for the VelocityTracker. final long time = SystemClock.uptimeMillis(); final MotionEvent ev = MotionEvent.obtain(mFakeDragBeginTime, time, MotionEvent.ACTION_MOVE, 0, mLastMotionY, 0); mVelocityTracker.addMovement(ev); ev.recycle(); } /** * Returns true if a fake drag is in progress. * * @return true if currently in a fake drag, false otherwise. * * @see #beginFakeDrag() * @see #fakeDragBy(float) * @see #endFakeDrag() */ public boolean isFakeDragging() { return mFakeDragging; } private void onSecondaryPointerUp(MotionEvent ev) { final int pointerIndex = MotionEventCompat.getActionIndex(ev); final int pointerId = ev.getPointerId(pointerIndex); if (pointerId == mActivePointerId) { // This was our active pointer going up. Choose a new // active pointer and adjust accordingly. final int newPointerIndex = pointerIndex == 0 ? 1 : 0; mLastMotionY = ev.getY(newPointerIndex); mActivePointerId = ev.getPointerId(newPointerIndex); if (mVelocityTracker != null) { mVelocityTracker.clear(); } } } private void endDrag() { mIsBeingDragged = false; mIsUnableToDrag = false; if (mVelocityTracker != null) { mVelocityTracker.recycle(); mVelocityTracker = null; } } private void setScrollingCacheEnabled(boolean enabled) { if (mScrollingCacheEnabled != enabled) { mScrollingCacheEnabled = enabled; if (USE_CACHE) { final int size = getChildCount(); for (int i = 0; i < size; ++i) { final View child = getChildAt(i); if (child.getVisibility() != GONE) { child.setDrawingCacheEnabled(enabled); } } } } } /** * Check if this ViewPager can be scrolled horizontally in a certain direction. * * @param direction Negative to check scrolling left, positive to check scrolling right. * @return Whether this ViewPager can be scrolled in the specified direction. It will always * return false if the specified direction is 0. */ public boolean internalCanScrollVertically(int direction) { if (mAdapter == null) { return false; } final int height = getClientHeight(); final int scrollY = getScrollY(); if (direction < 0) { return (scrollY > (int) (height * mFirstOffset)); } else if (direction > 0) { return (scrollY < (int) (height * mLastOffset)); } else { return false; } } /** * Tests scrollability within child views of v given a delta of dx. * * @param v View to test for horizontal scrollability * @param checkV Whether the view v passed should itself be checked for scrollability (true), * or just its children (false). * @param dy Delta scrolled in pixels * @param x X coordinate of the active touch point * @param y Y coordinate of the active touch point * @return true if child views of v can be scrolled by delta of dx. */ protected boolean canScroll(View v, boolean checkV, int dy, int x, int y) { if (v instanceof ViewGroup) { final ViewGroup group = (ViewGroup) v; final int scrollX = v.getScrollX(); final int scrollY = v.getScrollY(); final int count = group.getChildCount(); // Count backwards - let topmost views consume scroll distance first. for (int i = count - 1; i >= 0; i--) { // TODO: Add versioned support here for transformed views. // This will not work for transformed views in Honeycomb+ final View child = group.getChildAt(i); if (y + scrollY >= child.getTop() && y + scrollY < child.getBottom() && x + scrollX >= child.getLeft() && x + scrollX < child.getRight() && canScroll(child, true, dy, x + scrollX - child.getLeft(), y + scrollY - child.getTop())) { return true; } } } return checkV && ViewCompat.canScrollVertically(v, -dy); } @Override public boolean dispatchKeyEvent(KeyEvent event) { // Let the focused view and/or our descendants get the key first return super.dispatchKeyEvent(event) || executeKeyEvent(event); } /** * You can call this function yourself to have the scroll view perform * scrolling from a key event, just as if the event had been dispatched to * it by the view hierarchy. * * @param event The key event to execute. * @return Return true if the event was handled, else false. */ public boolean executeKeyEvent(KeyEvent event) { boolean handled = false; if (event.getAction() == KeyEvent.ACTION_DOWN) { switch (event.getKeyCode()) { case KeyEvent.KEYCODE_DPAD_LEFT: handled = arrowScroll(FOCUS_LEFT); break; case KeyEvent.KEYCODE_DPAD_RIGHT: handled = arrowScroll(FOCUS_RIGHT); break; case KeyEvent.KEYCODE_TAB: if (Build.VERSION.SDK_INT >= 11) { // The focus finder had a bug handling FOCUS_FORWARD and FOCUS_BACKWARD // before Android 3.0. Ignore the tab key on those devices. if (KeyEventCompat.hasNoModifiers(event)) { handled = arrowScroll(FOCUS_FORWARD); } else if (KeyEventCompat.hasModifiers(event, KeyEvent.META_SHIFT_ON)) { handled = arrowScroll(FOCUS_BACKWARD); } } break; } } return handled; } /** * Handle scrolling in response to a left or right arrow click. * * @param direction The direction corresponding to the arrow key that was pressed. It should be * either {@link View#FOCUS_LEFT} or {@link View#FOCUS_RIGHT}. * @return Whether the scrolling was handled successfully. */ public boolean arrowScroll(int direction) { View currentFocused = findFocus(); if (currentFocused == this) { currentFocused = null; } else if (currentFocused != null) { boolean isChild = false; for (ViewParent parent = currentFocused.getParent(); parent instanceof ViewGroup; parent = parent.getParent()) { if (parent == this) { isChild = true; break; } } if (!isChild) { // This would cause the focus search down below to fail in fun ways. final StringBuilder sb = new StringBuilder(); sb.append(currentFocused.getClass().getSimpleName()); for (ViewParent parent = currentFocused.getParent(); parent instanceof ViewGroup; parent = parent.getParent()) { sb.append(" => ").append(parent.getClass().getSimpleName()); } Log.e(TAG, "arrowScroll tried to find focus based on non-child " + "current focused view " + sb.toString()); currentFocused = null; } } boolean handled = false; View nextFocused = FocusFinder.getInstance().findNextFocus(this, currentFocused, direction); if (nextFocused != null && nextFocused != currentFocused) { if (direction == View.FOCUS_UP) { // If there is nothing to the left, or this is causing us to // jump to the right, then what we really want to do is page left. final int nextTop = getChildRectInPagerCoordinates(mTempRect, nextFocused).top; final int currTop = getChildRectInPagerCoordinates(mTempRect, currentFocused).top; if (currentFocused != null && nextTop >= currTop) { handled = pageUp(); } else { handled = nextFocused.requestFocus(); } } else if (direction == View.FOCUS_DOWN) { // If there is nothing to the right, or this is causing us to // jump to the left, then what we really want to do is page right. final int nextDown = getChildRectInPagerCoordinates(mTempRect, nextFocused).bottom; final int currDown = getChildRectInPagerCoordinates(mTempRect, currentFocused).bottom; if (currentFocused != null && nextDown <= currDown) { handled = pageDown(); } else { handled = nextFocused.requestFocus(); } } } else if (direction == FOCUS_UP || direction == FOCUS_BACKWARD) { // Trying to move left and nothing there; try to page. handled = pageUp(); } else if (direction == FOCUS_DOWN || direction == FOCUS_FORWARD) { // Trying to move right and nothing there; try to page. handled = pageDown(); } if (handled) { playSoundEffect(SoundEffectConstants.getContantForFocusDirection(direction)); } return handled; } private Rect getChildRectInPagerCoordinates(Rect outRect, View child) { if (outRect == null) { outRect = new Rect(); } if (child == null) { outRect.set(0, 0, 0, 0); return outRect; } outRect.left = child.getLeft(); outRect.right = child.getRight(); outRect.top = child.getTop(); outRect.bottom = child.getBottom(); ViewParent parent = child.getParent(); while (parent instanceof ViewGroup && parent != this) { final ViewGroup group = (ViewGroup) parent; outRect.left += group.getLeft(); outRect.right += group.getRight(); outRect.top += group.getTop(); outRect.bottom += group.getBottom(); parent = group.getParent(); } return outRect; } boolean pageUp() { if (mCurItem > 0) { setCurrentItem(mCurItem - 1, true); return true; } return false; } boolean pageDown() { if (mAdapter != null && mCurItem < (mAdapter.getCount() - 1)) { setCurrentItem(mCurItem + 1, true); return true; } return false; } /** * We only want the current page that is being shown to be focusable. */ @Override public void addFocusables(ArrayList<View> views, int direction, int focusableMode) { final int focusableCount = views.size(); final int descendantFocusability = getDescendantFocusability(); if (descendantFocusability != FOCUS_BLOCK_DESCENDANTS) { for (int i = 0; i < getChildCount(); i++) { final View child = getChildAt(i); if (child.getVisibility() == VISIBLE) { ItemInfo ii = infoForChild(child); if (ii != null && ii.position == mCurItem) { child.addFocusables(views, direction, focusableMode); } } } } // we add ourselves (if focusable) in all cases except for when we are // FOCUS_AFTER_DESCENDANTS and there are some descendants focusable. this is // to avoid the focus search finding layouts when a more precise search // among the focusable children would be more interesting. if (descendantFocusability != FOCUS_AFTER_DESCENDANTS || (focusableCount == views.size())) { // No focusable descendants // Note that we can't call the superclass here, because it will // add all views in. So we need to do the same thing View does. if (!isFocusable()) { return; } if ((focusableMode & FOCUSABLES_TOUCH_MODE) == FOCUSABLES_TOUCH_MODE && isInTouchMode() && !isFocusableInTouchMode()) { return; } if (views != null) { views.add(this); } } } /** * We only want the current page that is being shown to be touchable. */ @Override public void addTouchables(ArrayList<View> views) { // Note that we don't call super.addTouchables(), which means that // we don't call View.addTouchables(). This is okay because a ViewPager // is itself not touchable. for (int i = 0; i < getChildCount(); i++) { final View child = getChildAt(i); if (child.getVisibility() == VISIBLE) { ItemInfo ii = infoForChild(child); if (ii != null && ii.position == mCurItem) { child.addTouchables(views); } } } } /** * We only want the current page that is being shown to be focusable. */ @Override protected boolean onRequestFocusInDescendants(int direction, Rect previouslyFocusedRect) { int index; int increment; int end; int count = getChildCount(); if ((direction & FOCUS_FORWARD) != 0) { index = 0; increment = 1; end = count; } else { index = count - 1; increment = -1; end = -1; } for (int i = index; i != end; i += increment) { View child = getChildAt(i); if (child.getVisibility() == VISIBLE) { ItemInfo ii = infoForChild(child); if (ii != null && ii.position == mCurItem) { if (child.requestFocus(direction, previouslyFocusedRect)) { return true; } } } } return false; } @Override public boolean dispatchPopulateAccessibilityEvent(AccessibilityEvent event) { // Dispatch scroll events from this ViewPager. if (event.getEventType() == AccessibilityEventCompat.TYPE_VIEW_SCROLLED) { return super.dispatchPopulateAccessibilityEvent(event); } // Dispatch all other accessibility events from the current page. final int childCount = getChildCount(); for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); if (child.getVisibility() == VISIBLE) { final ItemInfo ii = infoForChild(child); if (ii != null && ii.position == mCurItem && child.dispatchPopulateAccessibilityEvent(event)) { return true; } } } return false; } @Override protected ViewGroup.LayoutParams generateDefaultLayoutParams() { return new LayoutParams(); } @Override protected ViewGroup.LayoutParams generateLayoutParams(ViewGroup.LayoutParams p) { return generateDefaultLayoutParams(); } @Override protected boolean checkLayoutParams(ViewGroup.LayoutParams p) { return p instanceof LayoutParams && super.checkLayoutParams(p); } @Override public ViewGroup.LayoutParams generateLayoutParams(AttributeSet attrs) { return new LayoutParams(getContext(), attrs); } class MyAccessibilityDelegate extends AccessibilityDelegateCompat { @Override public void onInitializeAccessibilityEvent(View host, AccessibilityEvent event) { super.onInitializeAccessibilityEvent(host, event); event.setClassName(ViewPager.class.getName()); final AccessibilityRecordCompat recordCompat = AccessibilityEventCompat.asRecord(event); recordCompat.setScrollable(canScroll()); if (event.getEventType() == AccessibilityEventCompat.TYPE_VIEW_SCROLLED && mAdapter != null) { recordCompat.setItemCount(mAdapter.getCount()); recordCompat.setFromIndex(mCurItem); recordCompat.setToIndex(mCurItem); } } @Override public void onInitializeAccessibilityNodeInfo(View host, AccessibilityNodeInfoCompat info) { super.onInitializeAccessibilityNodeInfo(host, info); info.setClassName(ViewPager.class.getName()); info.setScrollable(canScroll()); if (internalCanScrollVertically(1)) { info.addAction(AccessibilityNodeInfoCompat.ACTION_SCROLL_FORWARD); } if (internalCanScrollVertically(-1)) { info.addAction(AccessibilityNodeInfoCompat.ACTION_SCROLL_BACKWARD); } } @Override public boolean performAccessibilityAction(View host, int action, Bundle args) { if (super.performAccessibilityAction(host, action, args)) { return true; } switch (action) { case AccessibilityNodeInfoCompat.ACTION_SCROLL_FORWARD: { if (internalCanScrollVertically(1)) { setCurrentItem(mCurItem + 1); return true; } } return false; case AccessibilityNodeInfoCompat.ACTION_SCROLL_BACKWARD: { if (internalCanScrollVertically(-1)) { setCurrentItem(mCurItem - 1); return true; } } return false; } return false; } private boolean canScroll() { return (mAdapter != null) && (mAdapter.getCount() > 1); } } private class PagerObserver extends DataSetObserver { PagerObserver() { } @Override public void onChanged() { dataSetChanged(); } @Override public void onInvalidated() { dataSetChanged(); } } /** * Layout parameters that should be supplied for views added to a * ViewPager. */ public static class LayoutParams extends ViewGroup.LayoutParams { /** * true if this view is a decoration on the pager itself and not * a view supplied by the adapter. */ public boolean isDecor; /** * Gravity setting for use on decor views only: * Where to position the view page within the overall ViewPager * container; constants are defined in {@link android.view.Gravity}. */ public int gravity; /** * Width as a 0-1 multiplier of the measured pager width */ float heightFactor = 0.f; /** * true if this view was added during layout and needs to be measured * before being positioned. */ boolean needsMeasure; /** * Adapter position this view is for if !isDecor */ int position; /** * Current child index within the ViewPager that this view occupies */ int childIndex; public LayoutParams() { super(MATCH_PARENT, MATCH_PARENT); } public LayoutParams(Context context, AttributeSet attrs) { super(context, attrs); final TypedArray a = context.obtainStyledAttributes(attrs, LAYOUT_ATTRS); gravity = a.getInteger(0, Gravity.TOP); a.recycle(); } } static class ViewPositionComparator implements Comparator<View> { @Override public int compare(View lhs, View rhs) { final LayoutParams llp = (LayoutParams) lhs.getLayoutParams(); final LayoutParams rlp = (LayoutParams) rhs.getLayoutParams(); if (llp.isDecor != rlp.isDecor) { return llp.isDecor ? 1 : -1; } return llp.position - rlp.position; } } }
{ "content_hash": "c61fffe2d1f8cfe93ee8496817b0f23a", "timestamp": "", "source": "github", "line_count": 3139, "max_line_length": 108, "avg_line_length": 39.37145587766805, "alnum_prop": 0.5574858197059561, "repo_name": "simplezhli/ChangeTabLayout", "id": "2bb0eb6a1231c539475438ba0e9ed363160a7a81", "size": "124228", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "library/src/main/java/android/support/v4/view/VerticalViewPager.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "176444" } ], "symlink_target": "" }
BEGIN; ALTER SEQUENCE signatureinfo_id_seq AS INT; ALTER TABLE SignatureInfo ALTER id TYPE INT, ALTER signing_key TYPE VARCHAR(500), ALTER signing_key_version TYPE VARCHAR(100), ALTER signing_key_id TYPE VARCHAR(50); DROP INDEX IF EXISTS signatureinfo_thru_timestamp; END;
{ "content_hash": "38cd1a5419dedaa5fc7e8755bc03dace", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 50, "avg_line_length": 23.666666666666668, "alnum_prop": 0.7711267605633803, "repo_name": "google/exposure-notifications-server", "id": "e62d7b02b1ea233fce49f49fc8f6d81733953658", "size": "903", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "migrations/000075_SignatureInfoTypes.down.sql", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "2052" }, { "name": "Go", "bytes": "1534524" }, { "name": "HCL", "bytes": "161902" }, { "name": "HTML", "bytes": "54041" }, { "name": "Makefile", "bytes": "2717" }, { "name": "PLpgSQL", "bytes": "149518" }, { "name": "Shell", "bytes": "13245" } ], "symlink_target": "" }
Originally published: 2012-04-27 02:10:55 Last updated: 2012-04-27 02:10:56 Author: James Coliins A method for allocating costs 'fairly' amongst a group of friends who cooperate to their mutual advantage.
{ "content_hash": "79aab902df76200b3ff5a6048e387af3", "timestamp": "", "source": "github", "line_count": 5, "max_line_length": 106, "avg_line_length": 42.6, "alnum_prop": 0.7652582159624414, "repo_name": "ActiveState/code", "id": "fafcee41ce01404b095bc4caa4ce9fe95a9109d4", "size": "237", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "recipes/Python/578115_Taxi_Fare_Splitter/README.md", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "35894" }, { "name": "C", "bytes": "56048" }, { "name": "C++", "bytes": "90880" }, { "name": "HTML", "bytes": "11656" }, { "name": "Java", "bytes": "57468" }, { "name": "JavaScript", "bytes": "181218" }, { "name": "PHP", "bytes": "250144" }, { "name": "Perl", "bytes": "37296" }, { "name": "Perl 6", "bytes": "9914" }, { "name": "Python", "bytes": "17387779" }, { "name": "Ruby", "bytes": "40233" }, { "name": "Shell", "bytes": "190732" }, { "name": "Tcl", "bytes": "674650" } ], "symlink_target": "" }
#include <linux/netdevice.h> #include "igb.h" /* This is the only thing that needs to be changed to adjust the * maximum number of ports that the driver can manage. */ #define IGB_MAX_NIC 32 #define OPTION_UNSET -1 #define OPTION_DISABLED 0 #define OPTION_ENABLED 1 #define MAX_NUM_LIST_OPTS 15 /* All parameters are treated the same, as an integer array of values. * This macro just reduces the need to repeat the same declaration code * over and over (plus this helps to avoid typo bugs). */ #define IGB_PARAM_INIT { [0 ... IGB_MAX_NIC] = OPTION_UNSET } #ifndef module_param_array /* Module Parameters are always initialized to -1, so that the driver * can tell the difference between no user specified value or the * user asking for the default value. * The true default values are loaded in when igb_check_options is called. * * This is a GCC extension to ANSI C. * See the item "Labeled Elements in Initializers" in the section * "Extensions to the C Language Family" of the GCC documentation. */ #define IGB_PARAM(X, desc) \ static const int X[IGB_MAX_NIC+1] = IGB_PARAM_INIT; \ MODULE_PARM(X, "1-" __MODULE_STRING(IGB_MAX_NIC) "i"); \ MODULE_PARM_DESC(X, desc); #else #define IGB_PARAM(X, desc) \ static int X[IGB_MAX_NIC+1] = IGB_PARAM_INIT; \ static unsigned int num_##X; \ module_param_array_named(X, X, int, &num_##X, 0); \ MODULE_PARM_DESC(X, desc); #endif /* Interrupt Throttle Rate (interrupts/sec) * * Valid Range: 100-100000 (0=off, 1=dynamic, 3=dynamic conservative) */ IGB_PARAM(InterruptThrottleRate, "Maximum interrupts per second, per vector, (max 100000), default 3=adaptive"); #define DEFAULT_ITR 3 #define MAX_ITR 100000 /* #define MIN_ITR 120 */ #define MIN_ITR 0 /* IntMode (Interrupt Mode) * * Valid Range: 0 - 2 * * Default Value: 2 (MSI-X) */ IGB_PARAM(IntMode, "Change Interrupt Mode (0=Legacy, 1=MSI, 2=MSI-X), default 2"); #define MAX_INTMODE IGB_INT_MODE_MSIX #define MIN_INTMODE IGB_INT_MODE_LEGACY IGB_PARAM(Node, "set the starting node to allocate memory on, default -1"); /* LLIPort (Low Latency Interrupt TCP Port) * * Valid Range: 0 - 65535 * * Default Value: 0 (disabled) */ IGB_PARAM(LLIPort, "Low Latency Interrupt TCP Port (0-65535), default 0=off"); #define DEFAULT_LLIPORT 0 #define MAX_LLIPORT 0xFFFF #define MIN_LLIPORT 0 /* LLIPush (Low Latency Interrupt on TCP Push flag) * * Valid Range: 0, 1 * * Default Value: 0 (disabled) */ IGB_PARAM(LLIPush, "Low Latency Interrupt on TCP Push flag (0,1), default 0=off"); #define DEFAULT_LLIPUSH 0 #define MAX_LLIPUSH 1 #define MIN_LLIPUSH 0 /* LLISize (Low Latency Interrupt on Packet Size) * * Valid Range: 0 - 1500 * * Default Value: 0 (disabled) */ IGB_PARAM(LLISize, "Low Latency Interrupt on Packet Size (0-1500), default 0=off"); #define DEFAULT_LLISIZE 0 #define MAX_LLISIZE 1500 #define MIN_LLISIZE 0 /* RSS (Enable RSS multiqueue receive) * * Valid Range: 0 - 8 * * Default Value: 1 */ IGB_PARAM(RSS, "Number of Receive-Side Scaling Descriptor Queues (0-8), default 1, 0=number of cpus"); #define DEFAULT_RSS 1 #define MAX_RSS 8 #define MIN_RSS 0 /* VMDQ (Enable VMDq multiqueue receive) * * Valid Range: 0 - 8 * * Default Value: 0 */ IGB_PARAM(VMDQ, "Number of Virtual Machine Device Queues: 0-1 = disable, 2-8 enable, default 0"); #define DEFAULT_VMDQ 0 #define MAX_VMDQ MAX_RSS #define MIN_VMDQ 0 /* max_vfs (Enable SR-IOV VF devices) * * Valid Range: 0 - 7 * * Default Value: 0 */ IGB_PARAM(max_vfs, "Number of Virtual Functions: 0 = disable, 1-7 enable, default 0"); #define DEFAULT_SRIOV 0 #define MAX_SRIOV 7 #define MIN_SRIOV 0 /* MDD (Enable Malicious Driver Detection) * * Only available when SR-IOV is enabled - max_vfs is greater than 0 * * Valid Range: 0, 1 * * Default Value: 1 */ IGB_PARAM(MDD, "Malicious Driver Detection (0/1), default 1 = enabled. " "Only available when max_vfs is greater than 0"); #ifdef DEBUG /* Disable Hardware Reset on Tx Hang * * Valid Range: 0, 1 * * Default Value: 0 (disabled, i.e. h/w will reset) */ IGB_PARAM(DisableHwReset, "Disable reset of hardware on Tx hang"); /* Dump Transmit and Receive buffers * * Valid Range: 0, 1 * * Default Value: 0 */ IGB_PARAM(DumpBuffers, "Dump Tx/Rx buffers on Tx hang or by request"); #endif /* DEBUG */ /* QueuePairs (Enable TX/RX queue pairs for interrupt handling) * * Valid Range: 0 - 1 * * Default Value: 1 */ IGB_PARAM(QueuePairs, "Enable Tx/Rx queue pairs for interrupt handling (0,1), default 1=on"); #define DEFAULT_QUEUE_PAIRS 1 #define MAX_QUEUE_PAIRS 1 #define MIN_QUEUE_PAIRS 0 /* Enable/disable EEE (a.k.a. IEEE802.3az) * * Valid Range: 0, 1 * * Default Value: 1 */ IGB_PARAM(EEE, "Enable/disable on parts that support the feature"); /* Enable/disable DMA Coalescing * * Valid Values: 0(off), 1000, 2000, 3000, 4000, 5000, 6000, 7000, 8000, * 9000, 10000(msec), 250(usec), 500(usec) * * Default Value: 0 */ IGB_PARAM(DMAC, "Disable or set latency for DMA Coalescing ((0=off, 1000-10000(msec), 250, 500 (usec))"); #ifndef IGB_NO_LRO /* Enable/disable Large Receive Offload * * Valid Values: 0(off), 1(on) * * Default Value: 0 */ IGB_PARAM(LRO, "Large Receive Offload (0,1), default 0=off"); #endif struct igb_opt_list { int i; char *str; }; struct igb_option { enum { enable_option, range_option, list_option } type; const char *name; const char *err; int def; union { struct { /* range_option info */ int min; int max; } r; struct { /* list_option info */ int nr; struct igb_opt_list *p; } l; } arg; }; static int igb_validate_option(unsigned int *value, struct igb_option *opt, struct igb_adapter *adapter) { if (*value == OPTION_UNSET) { *value = opt->def; return 0; } switch (opt->type) { case enable_option: switch (*value) { case OPTION_ENABLED: DPRINTK(PROBE, INFO, "%s Enabled\n", opt->name); return 0; case OPTION_DISABLED: DPRINTK(PROBE, INFO, "%s Disabled\n", opt->name); return 0; } break; case range_option: if (*value >= opt->arg.r.min && *value <= opt->arg.r.max) { DPRINTK(PROBE, INFO, "%s set to %d\n", opt->name, *value); return 0; } break; case list_option: { int i; struct igb_opt_list *ent; for (i = 0; i < opt->arg.l.nr; i++) { ent = &opt->arg.l.p[i]; if (*value == ent->i) { if (ent->str[0] != '\0') DPRINTK(PROBE, INFO, "%s\n", ent->str); return 0; } } } break; default: BUG(); } DPRINTK(PROBE, INFO, "Invalid %s value specified (%d) %s\n", opt->name, *value, opt->err); *value = opt->def; return -1; } /** * igb_check_options - Range Checking for Command Line Parameters * @adapter: board private structure * * This routine checks all command line parameters for valid user * input. If an invalid value is given, or if no user specified * value exists, a default value is used. The final value is stored * in a variable in the adapter structure. **/ void igb_check_options(struct igb_adapter *adapter) { int bd = adapter->bd_number; struct e1000_hw *hw = &adapter->hw; if (bd >= IGB_MAX_NIC) { DPRINTK(PROBE, NOTICE, "Warning: no configuration for board #%d\n", bd); DPRINTK(PROBE, NOTICE, "Using defaults for all values\n"); #ifndef module_param_array bd = IGB_MAX_NIC; #endif } { /* Interrupt Throttling Rate */ struct igb_option opt = { .type = range_option, .name = "Interrupt Throttling Rate (ints/sec)", .err = "using default of " __MODULE_STRING(DEFAULT_ITR), .def = DEFAULT_ITR, .arg = { .r = { .min = MIN_ITR, .max = MAX_ITR } } }; #ifdef module_param_array if (num_InterruptThrottleRate > bd) { #endif unsigned int itr = InterruptThrottleRate[bd]; switch (itr) { case 0: DPRINTK(PROBE, INFO, "%s turned off\n", opt.name); if (hw->mac.type >= e1000_i350) adapter->dmac = IGB_DMAC_DISABLE; adapter->rx_itr_setting = itr; break; case 1: DPRINTK(PROBE, INFO, "%s set to dynamic mode\n", opt.name); adapter->rx_itr_setting = itr; break; case 3: DPRINTK(PROBE, INFO, "%s set to dynamic conservative mode\n", opt.name); adapter->rx_itr_setting = itr; break; default: igb_validate_option(&itr, &opt, adapter); /* Save the setting, because the dynamic bits * change itr. In case of invalid user value, * default to conservative mode, else need to * clear the lower two bits because they are * used as control */ if (itr == 3) { adapter->rx_itr_setting = itr; } else { adapter->rx_itr_setting = 1000000000 / (itr * 256); adapter->rx_itr_setting &= ~3; } break; } #ifdef module_param_array } else { adapter->rx_itr_setting = opt.def; } #endif adapter->tx_itr_setting = adapter->rx_itr_setting; } { /* Interrupt Mode */ struct igb_option opt = { .type = range_option, .name = "Interrupt Mode", .err = "defaulting to 2 (MSI-X)", .def = IGB_INT_MODE_MSIX, .arg = { .r = { .min = MIN_INTMODE, .max = MAX_INTMODE } } }; #ifdef module_param_array if (num_IntMode > bd) { #endif unsigned int int_mode = IntMode[bd]; igb_validate_option(&int_mode, &opt, adapter); adapter->int_mode = int_mode; #ifdef module_param_array } else { adapter->int_mode = opt.def; } #endif } { /* Low Latency Interrupt TCP Port */ struct igb_option opt = { .type = range_option, .name = "Low Latency Interrupt TCP Port", .err = "using default of " __MODULE_STRING(DEFAULT_LLIPORT), .def = DEFAULT_LLIPORT, .arg = { .r = { .min = MIN_LLIPORT, .max = MAX_LLIPORT } } }; #ifdef module_param_array if (num_LLIPort > bd) { #endif adapter->lli_port = LLIPort[bd]; if (adapter->lli_port) { igb_validate_option(&adapter->lli_port, &opt, adapter); } else { DPRINTK(PROBE, INFO, "%s turned off\n", opt.name); } #ifdef module_param_array } else { adapter->lli_port = opt.def; } #endif } { /* Low Latency Interrupt on Packet Size */ struct igb_option opt = { .type = range_option, .name = "Low Latency Interrupt on Packet Size", .err = "using default of " __MODULE_STRING(DEFAULT_LLISIZE), .def = DEFAULT_LLISIZE, .arg = { .r = { .min = MIN_LLISIZE, .max = MAX_LLISIZE } } }; #ifdef module_param_array if (num_LLISize > bd) { #endif adapter->lli_size = LLISize[bd]; if (adapter->lli_size) { igb_validate_option(&adapter->lli_size, &opt, adapter); } else { DPRINTK(PROBE, INFO, "%s turned off\n", opt.name); } #ifdef module_param_array } else { adapter->lli_size = opt.def; } #endif } { /* Low Latency Interrupt on TCP Push flag */ struct igb_option opt = { .type = enable_option, .name = "Low Latency Interrupt on TCP Push flag", .err = "defaulting to Disabled", .def = OPTION_DISABLED }; #ifdef module_param_array if (num_LLIPush > bd) { #endif unsigned int lli_push = LLIPush[bd]; igb_validate_option(&lli_push, &opt, adapter); adapter->flags |= lli_push ? IGB_FLAG_LLI_PUSH : 0; #ifdef module_param_array } else { adapter->flags |= opt.def ? IGB_FLAG_LLI_PUSH : 0; } #endif } { /* SRIOV - Enable SR-IOV VF devices */ struct igb_option opt = { .type = range_option, .name = "max_vfs - SR-IOV VF devices", .err = "using default of " __MODULE_STRING(DEFAULT_SRIOV), .def = DEFAULT_SRIOV, .arg = { .r = { .min = MIN_SRIOV, .max = MAX_SRIOV } } }; #ifdef module_param_array if (num_max_vfs > bd) { #endif adapter->vfs_allocated_count = max_vfs[bd]; igb_validate_option(&adapter->vfs_allocated_count, &opt, adapter); #ifdef module_param_array } else { adapter->vfs_allocated_count = opt.def; } #endif if (adapter->vfs_allocated_count) { switch (hw->mac.type) { case e1000_82575: case e1000_82580: case e1000_i210: case e1000_i211: case e1000_i354: adapter->vfs_allocated_count = 0; DPRINTK(PROBE, INFO, "SR-IOV option max_vfs not supported.\n"); default: break; } } } { /* VMDQ - Enable VMDq multiqueue receive */ struct igb_option opt = { .type = range_option, .name = "VMDQ - VMDq multiqueue queue count", .err = "using default of " __MODULE_STRING(DEFAULT_VMDQ), .def = DEFAULT_VMDQ, .arg = { .r = { .min = MIN_VMDQ, .max = (MAX_VMDQ - adapter->vfs_allocated_count) } } }; if ((hw->mac.type != e1000_i210) || (hw->mac.type != e1000_i211)) { #ifdef module_param_array if (num_VMDQ > bd) { #endif adapter->vmdq_pools = (VMDQ[bd] == 1 ? 0 : VMDQ[bd]); if (adapter->vfs_allocated_count && !adapter->vmdq_pools) { DPRINTK(PROBE, INFO, "Enabling SR-IOV requires VMDq be set to at least 1\n"); adapter->vmdq_pools = 1; } igb_validate_option(&adapter->vmdq_pools, &opt, adapter); #ifdef module_param_array } else { if (!adapter->vfs_allocated_count) adapter->vmdq_pools = (opt.def == 1 ? 0 : opt.def); else adapter->vmdq_pools = 1; } #endif #ifdef CONFIG_IGB_VMDQ_NETDEV if (hw->mac.type == e1000_82575 && adapter->vmdq_pools) { DPRINTK(PROBE, INFO, "VMDq not supported on this part.\n"); adapter->vmdq_pools = 0; } #endif } else { DPRINTK(PROBE, INFO, "VMDq option is not supported.\n"); adapter->vmdq_pools = opt.def; } } { /* RSS - Enable RSS multiqueue receives */ struct igb_option opt = { .type = range_option, .name = "RSS - RSS multiqueue receive count", .err = "using default of " __MODULE_STRING(DEFAULT_RSS), .def = DEFAULT_RSS, .arg = { .r = { .min = MIN_RSS, .max = MAX_RSS } } }; switch (hw->mac.type) { case e1000_82575: #ifndef CONFIG_IGB_VMDQ_NETDEV if (!!adapter->vmdq_pools) { if (adapter->vmdq_pools <= 2) { if (adapter->vmdq_pools == 2) opt.arg.r.max = 3; } else { opt.arg.r.max = 1; } } else { opt.arg.r.max = 4; } #else opt.arg.r.max = !!adapter->vmdq_pools ? 1 : 4; #endif /* CONFIG_IGB_VMDQ_NETDEV */ break; case e1000_i210: opt.arg.r.max = 4; break; case e1000_i211: opt.arg.r.max = 2; break; case e1000_82576: #ifndef CONFIG_IGB_VMDQ_NETDEV if (!!adapter->vmdq_pools) opt.arg.r.max = 2; break; #endif /* CONFIG_IGB_VMDQ_NETDEV */ case e1000_82580: case e1000_i350: case e1000_i354: default: if (!!adapter->vmdq_pools) opt.arg.r.max = 1; break; } if (adapter->int_mode != IGB_INT_MODE_MSIX) { DPRINTK(PROBE, INFO, "RSS is not supported when in MSI/Legacy Interrupt mode, %s\n", opt.err); opt.arg.r.max = 1; } #ifdef module_param_array if (num_RSS > bd) { #endif adapter->rss_queues = RSS[bd]; switch (adapter->rss_queues) { case 1: break; default: igb_validate_option(&adapter->rss_queues, &opt, adapter); if (adapter->rss_queues) break; case 0: adapter->rss_queues = min_t(u32, opt.arg.r.max, num_online_cpus()); break; } #ifdef module_param_array } else { adapter->rss_queues = opt.def; } #endif } { /* QueuePairs - Enable Tx/Rx queue pairs for interrupt handling */ struct igb_option opt = { .type = enable_option, .name = "QueuePairs - Tx/Rx queue pairs for interrupt handling", .err = "defaulting to Enabled", .def = OPTION_ENABLED }; #ifdef module_param_array if (num_QueuePairs > bd) { #endif unsigned int qp = QueuePairs[bd]; /* * We must enable queue pairs if the number of queues * exceeds the number of available interrupts. We are * limited to 10, or 3 per unallocated vf. On I210 and * I211 devices, we are limited to 5 interrupts. * However, since I211 only supports 2 queues, we do not * need to check and override the user option. */ if (qp == OPTION_DISABLED) { if (adapter->rss_queues > 4) qp = OPTION_ENABLED; if (adapter->vmdq_pools > 4) qp = OPTION_ENABLED; if (adapter->rss_queues > 1 && (adapter->vmdq_pools > 3 || adapter->vfs_allocated_count > 6)) qp = OPTION_ENABLED; if (hw->mac.type == e1000_i210 && adapter->rss_queues > 2) qp = OPTION_ENABLED; if (qp == OPTION_ENABLED) DPRINTK(PROBE, INFO, "Number of queues exceeds available interrupts, %s\n", opt.err); } igb_validate_option(&qp, &opt, adapter); adapter->flags |= qp ? IGB_FLAG_QUEUE_PAIRS : 0; #ifdef module_param_array } else { adapter->flags |= opt.def ? IGB_FLAG_QUEUE_PAIRS : 0; } #endif } { /* EEE - Enable EEE for capable adapters */ if (hw->mac.type >= e1000_i350) { struct igb_option opt = { .type = enable_option, .name = "EEE Support", .err = "defaulting to Enabled", .def = OPTION_ENABLED }; #ifdef module_param_array if (num_EEE > bd) { #endif unsigned int eee = EEE[bd]; igb_validate_option(&eee, &opt, adapter); adapter->flags |= eee ? IGB_FLAG_EEE : 0; if (eee) hw->dev_spec._82575.eee_disable = false; else hw->dev_spec._82575.eee_disable = true; #ifdef module_param_array } else { adapter->flags |= opt.def ? IGB_FLAG_EEE : 0; if (adapter->flags & IGB_FLAG_EEE) hw->dev_spec._82575.eee_disable = false; else hw->dev_spec._82575.eee_disable = true; } #endif } } { /* DMAC - Enable DMA Coalescing for capable adapters */ if (hw->mac.type >= e1000_i350) { struct igb_opt_list list [] = { { IGB_DMAC_DISABLE, "DMAC Disable"}, { IGB_DMAC_MIN, "DMAC 250 usec"}, { IGB_DMAC_500, "DMAC 500 usec"}, { IGB_DMAC_EN_DEFAULT, "DMAC 1000 usec"}, { IGB_DMAC_2000, "DMAC 2000 usec"}, { IGB_DMAC_3000, "DMAC 3000 usec"}, { IGB_DMAC_4000, "DMAC 4000 usec"}, { IGB_DMAC_5000, "DMAC 5000 usec"}, { IGB_DMAC_6000, "DMAC 6000 usec"}, { IGB_DMAC_7000, "DMAC 7000 usec"}, { IGB_DMAC_8000, "DMAC 8000 usec"}, { IGB_DMAC_9000, "DMAC 9000 usec"}, { IGB_DMAC_MAX, "DMAC 10000 usec"} }; struct igb_option opt = { .type = list_option, .name = "DMA Coalescing", .err = "using default of "__MODULE_STRING(IGB_DMAC_DISABLE), .def = IGB_DMAC_DISABLE, .arg = { .l = { .nr = 13, .p = list } } }; #ifdef module_param_array if (num_DMAC > bd) { #endif unsigned int dmac = DMAC[bd]; if (adapter->rx_itr_setting == IGB_DMAC_DISABLE) dmac = IGB_DMAC_DISABLE; igb_validate_option(&dmac, &opt, adapter); switch (dmac) { case IGB_DMAC_DISABLE: adapter->dmac = dmac; break; case IGB_DMAC_MIN: adapter->dmac = dmac; break; case IGB_DMAC_500: adapter->dmac = dmac; break; case IGB_DMAC_EN_DEFAULT: adapter->dmac = dmac; break; case IGB_DMAC_2000: adapter->dmac = dmac; break; case IGB_DMAC_3000: adapter->dmac = dmac; break; case IGB_DMAC_4000: adapter->dmac = dmac; break; case IGB_DMAC_5000: adapter->dmac = dmac; break; case IGB_DMAC_6000: adapter->dmac = dmac; break; case IGB_DMAC_7000: adapter->dmac = dmac; break; case IGB_DMAC_8000: adapter->dmac = dmac; break; case IGB_DMAC_9000: adapter->dmac = dmac; break; case IGB_DMAC_MAX: adapter->dmac = dmac; break; default: adapter->dmac = opt.def; DPRINTK(PROBE, INFO, "Invalid DMAC setting, " "resetting DMAC to %d\n", opt.def); } #ifdef module_param_array } else adapter->dmac = opt.def; #endif } } #ifndef IGB_NO_LRO { /* LRO - Enable Large Receive Offload */ struct igb_option opt = { .type = enable_option, .name = "LRO - Large Receive Offload", .err = "defaulting to Disabled", .def = OPTION_DISABLED }; struct net_device *netdev = adapter->netdev; #ifdef module_param_array if (num_LRO > bd) { #endif unsigned int lro = LRO[bd]; igb_validate_option(&lro, &opt, adapter); netdev->features |= lro ? NETIF_F_LRO : 0; #ifdef module_param_array } else if (opt.def == OPTION_ENABLED) { netdev->features |= NETIF_F_LRO; } #endif } #endif /* IGB_NO_LRO */ { /* MDD - Enable Malicious Driver Detection. Only available when SR-IOV is enabled. */ struct igb_option opt = { .type = enable_option, .name = "Malicious Driver Detection", .err = "defaulting to 1", .def = OPTION_ENABLED, .arg = { .r = { .min = OPTION_DISABLED, .max = OPTION_ENABLED } } }; #ifdef module_param_array if (num_MDD > bd) { #endif adapter->mdd = MDD[bd]; igb_validate_option((uint *)&adapter->mdd, &opt, adapter); #ifdef module_param_array } else { adapter->mdd = opt.def; } #endif } }
{ "content_hash": "0f80d9645bc64b1a45ec8259177a8009", "timestamp": "", "source": "github", "line_count": 822, "max_line_length": 106, "avg_line_length": 25.570559610705597, "alnum_prop": 0.6101146581664209, "repo_name": "ipdcode/containerdns", "id": "c922ca2fc9dae4f3adffd33fe4d281a0a2d79e87", "size": "22186", "binary": false, "copies": "17", "ref": "refs/heads/master", "path": "kdns/dpdk-17.02/lib/librte_eal/linuxapp/kni/ethtool/igb/igb_param.c", "mode": "33188", "license": "mit", "language": [ { "name": "Go", "bytes": "245554" }, { "name": "Shell", "bytes": "3360" } ], "symlink_target": "" }
package ca.uhn.fhir.rest.param; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.ICompositeDatatype; import ca.uhn.fhir.model.api.IElement; import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.model.api.annotation.Child; import ca.uhn.fhir.model.api.annotation.Description; import ca.uhn.fhir.model.base.composite.BaseCodingDt; import ca.uhn.fhir.model.base.composite.BaseResourceReferenceDt; import ca.uhn.fhir.model.primitive.BooleanDt; import ca.uhn.fhir.model.primitive.CodeDt; import ca.uhn.fhir.model.primitive.StringDt; import ca.uhn.fhir.model.primitive.UriDt; import ca.uhn.fhir.util.CoverageIgnore; import java.util.List; @CoverageIgnore public class InternalCodingDt extends BaseCodingDt implements ICompositeDatatype { private static final long serialVersionUID = 993056016725918652L; /** * Constructor */ public InternalCodingDt() { super(); } /** * Creates a new Coding with the given system and code */ public InternalCodingDt(String theSystem, String theCode) { setSystem(theSystem); setCode(theCode); } @Child(name = "system", type = UriDt.class, order = 0, min = 0, max = 1) @Description(shortDefinition = "Identity of the terminology system", formalDefinition = "The identification of the code system that defines the meaning of the symbol in the code.") private UriDt mySystem; @Child(name = "version", type = StringDt.class, order = 1, min = 0, max = 1) @Description(shortDefinition = "Version of the system - if relevant", formalDefinition = "The version of the code system which was used when choosing this code. Note that a well-maintained code system does not need the version reported, because the meaning of codes is consistent across versions. However this cannot consistently be assured. and When the meaning is not guaranteed to be consistent, the version SHOULD be exchanged") private StringDt myVersion; @Child(name = "code", type = CodeDt.class, order = 2, min = 0, max = 1) @Description(shortDefinition = "Symbol in syntax defined by the system", formalDefinition = "A symbol in syntax defined by the system. The symbol may be a predefined code or an expression in a syntax defined by the coding system (e.g. post-coordination)") private CodeDt myCode; @Child(name = "display", type = StringDt.class, order = 3, min = 0, max = 1) @Description(shortDefinition = "Representation defined by the system", formalDefinition = "A representation of the meaning of the code in the system, following the rules of the system.") private StringDt myDisplay; @Child(name = "primary", type = BooleanDt.class, order = 4, min = 0, max = 1) @Description(shortDefinition = "If this code was chosen directly by the user", formalDefinition = "Indicates that this code was chosen by a user directly - i.e. off a pick list of available items (codes or displays)") private BooleanDt myPrimary; @Override public boolean isEmpty() { return super.isBaseEmpty() && ca.uhn.fhir.util.ElementUtil.isEmpty(mySystem, myVersion, myCode, myDisplay, myPrimary); } @Deprecated //override deprecated method @Override public <T extends IElement> List<T> getAllPopulatedChildElementsOfType(Class<T> theType) { return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements(theType, mySystem, myVersion, myCode, myDisplay, myPrimary); } /** * Gets the value(s) for <b>system</b> (Identity of the terminology system). creating it if it does not exist. Will not return <code>null</code>. * * <p> * <b>Definition:</b> The identification of the code system that defines the meaning of the symbol in the code. * </p> */ @Override public UriDt getSystemElement() { if (mySystem == null) { mySystem = new UriDt(); } return mySystem; } /** * Sets the value(s) for <b>system</b> (Identity of the terminology system) * * <p> * <b>Definition:</b> The identification of the code system that defines the meaning of the symbol in the code. * </p> */ public InternalCodingDt setSystem(UriDt theValue) { mySystem = theValue; return this; } /** * Sets the value for <b>system</b> (Identity of the terminology system) * * <p> * <b>Definition:</b> The identification of the code system that defines the meaning of the symbol in the code. * </p> */ @Override public InternalCodingDt setSystem(String theUri) { mySystem = new UriDt(theUri); return this; } /** * Gets the value(s) for <b>version</b> (Version of the system - if relevant). creating it if it does not exist. Will not return <code>null</code>. * * <p> * <b>Definition:</b> The version of the code system which was used when choosing this code. Note that a well-maintained code system does not need the version reported, because the meaning of codes * is consistent across versions. However this cannot consistently be assured. and When the meaning is not guaranteed to be consistent, the version SHOULD be exchanged * </p> */ public StringDt getVersion() { if (myVersion == null) { myVersion = new StringDt(); } return myVersion; } /** * Sets the value(s) for <b>version</b> (Version of the system - if relevant) * * <p> * <b>Definition:</b> The version of the code system which was used when choosing this code. Note that a well-maintained code system does not need the version reported, because the meaning of codes * is consistent across versions. However this cannot consistently be assured. and When the meaning is not guaranteed to be consistent, the version SHOULD be exchanged * </p> */ public InternalCodingDt setVersion(StringDt theValue) { myVersion = theValue; return this; } /** * Sets the value for <b>version</b> (Version of the system - if relevant) * * <p> * <b>Definition:</b> The version of the code system which was used when choosing this code. Note that a well-maintained code system does not need the version reported, because the meaning of codes * is consistent across versions. However this cannot consistently be assured. and When the meaning is not guaranteed to be consistent, the version SHOULD be exchanged * </p> */ public InternalCodingDt setVersion(String theString) { myVersion = new StringDt(theString); return this; } /** * Gets the value(s) for <b>code</b> (Symbol in syntax defined by the system). creating it if it does not exist. Will not return <code>null</code>. * * <p> * <b>Definition:</b> A symbol in syntax defined by the system. The symbol may be a predefined code or an expression in a syntax defined by the coding system (e.g. post-coordination) * </p> */ @Override public CodeDt getCodeElement() { if (myCode == null) { myCode = new CodeDt(); } return myCode; } /** * Sets the value(s) for <b>code</b> (Symbol in syntax defined by the system) * * <p> * <b>Definition:</b> A symbol in syntax defined by the system. The symbol may be a predefined code or an expression in a syntax defined by the coding system (e.g. post-coordination) * </p> */ public InternalCodingDt setCode(CodeDt theValue) { myCode = theValue; return this; } /** * Sets the value for <b>code</b> (Symbol in syntax defined by the system) * * <p> * <b>Definition:</b> A symbol in syntax defined by the system. The symbol may be a predefined code or an expression in a syntax defined by the coding system (e.g. post-coordination) * </p> */ @Override public InternalCodingDt setCode(String theCode) { myCode = new CodeDt(theCode); return this; } /** * Gets the value(s) for <b>display</b> (Representation defined by the system). creating it if it does not exist. Will not return <code>null</code>. * * <p> * <b>Definition:</b> A representation of the meaning of the code in the system, following the rules of the system. * </p> */ public StringDt getDisplay() { if (myDisplay == null) { myDisplay = new StringDt(); } return myDisplay; } /** * Sets the value(s) for <b>display</b> (Representation defined by the system) * * <p> * <b>Definition:</b> A representation of the meaning of the code in the system, following the rules of the system. * </p> */ public InternalCodingDt setDisplay(StringDt theValue) { myDisplay = theValue; return this; } /** * Sets the value for <b>display</b> (Representation defined by the system) * * <p> * <b>Definition:</b> A representation of the meaning of the code in the system, following the rules of the system. * </p> */ @Override public InternalCodingDt setDisplay(String theString) { myDisplay = new StringDt(theString); return this; } /** * Gets the value(s) for <b>primary</b> (If this code was chosen directly by the user). creating it if it does not exist. Will not return <code>null</code>. * * <p> * <b>Definition:</b> Indicates that this code was chosen by a user directly - i.e. off a pick list of available items (codes or displays) * </p> */ public BooleanDt getPrimary() { if (myPrimary == null) { myPrimary = new BooleanDt(); } return myPrimary; } /** * Sets the value(s) for <b>primary</b> (If this code was chosen directly by the user) * * <p> * <b>Definition:</b> Indicates that this code was chosen by a user directly - i.e. off a pick list of available items (codes or displays) * </p> */ public InternalCodingDt setPrimary(BooleanDt theValue) { myPrimary = theValue; return this; } /** * Sets the value for <b>primary</b> (If this code was chosen directly by the user) * * <p> * <b>Definition:</b> Indicates that this code was chosen by a user directly - i.e. off a pick list of available items (codes or displays) * </p> */ public InternalCodingDt setPrimary(boolean theBoolean) { myPrimary = new BooleanDt(theBoolean); return this; } /** * Gets the value(s) for <b>valueSet</b> (Set this coding was chosen from). creating it if it does not exist. Will not return <code>null</code>. * * <p> * <b>Definition:</b> The set of possible coded values this coding was chosen from or constrained by * </p> */ public BaseResourceReferenceDt getValueSet() { throw new UnsupportedOperationException(Msg.code(1949)); } @Override public StringDt getDisplayElement() { return getDisplay(); } @Deprecated //override deprecated method @Override public Boolean getMissing() { throw new UnsupportedOperationException(Msg.code(1950)); } @Deprecated //override deprecated method @Override public IQueryParameterType setMissing(Boolean theMissing) { throw new UnsupportedOperationException(Msg.code(1951)); } }
{ "content_hash": "f35e496a12ae5702f410a9e0055aca4f", "timestamp": "", "source": "github", "line_count": 298, "max_line_length": 433, "avg_line_length": 35.43288590604027, "alnum_prop": 0.7126621839189318, "repo_name": "aemay2/hapi-fhir", "id": "c5f8c725e5ad9a123c24ca4f2902c50d962386f2", "size": "11220", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/InternalCodingDt.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "3861" }, { "name": "CSS", "bytes": "9608" }, { "name": "HTML", "bytes": "213468" }, { "name": "Java", "bytes": "25723741" }, { "name": "JavaScript", "bytes": "31583" }, { "name": "Kotlin", "bytes": "3951" }, { "name": "Ruby", "bytes": "230677" }, { "name": "Shell", "bytes": "46167" } ], "symlink_target": "" }
@echo off pushd %~dp0 "%VS120COMNTOOLS%..\IDE\mstest" /test:Microsoft.Protocols.TestSuites.MS_ASCMD.S16_Settings.MSASCMD_S16_TC02_Settings_DevicePassword_Status2 /testcontainer:..\..\MS-ASCMD\TestSuite\bin\Debug\MS-ASCMD_TestSuite.dll /runconfig:..\..\MS-ASCMD\MS-ASCMD.testsettings /unique pause
{ "content_hash": "228b7ff1b2ea236b31580e878fd932d0", "timestamp": "", "source": "github", "line_count": 4, "max_line_length": 268, "avg_line_length": 74, "alnum_prop": 0.7871621621621622, "repo_name": "XinwLi/Interop-TestSuites-1", "id": "13be80a1d303082ef652762728c3ba9b9291d0ea", "size": "296", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "ExchangeActiveSync/Source/Scripts/MS-ASCMD/RunMSASCMD_S16_TC02_Settings_DevicePassword_Status2.cmd", "mode": "33188", "license": "mit", "language": [ { "name": "ASP", "bytes": "1421" }, { "name": "Batchfile", "bytes": "1149773" }, { "name": "C", "bytes": "154398" }, { "name": "C#", "bytes": "160448942" }, { "name": "C++", "bytes": "26321" }, { "name": "PowerShell", "bytes": "1499733" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <parent> <artifactId>geomesa-arrow_2.11</artifactId> <groupId>org.locationtech.geomesa</groupId> <version>2.1.0-SNAPSHOT</version> </parent> <modelVersion>4.0.0</modelVersion> <name>GeoMesa Arrow JTS Geometry Vectors</name> <artifactId>geomesa-arrow-jts</artifactId> <dependencies> <dependency> <groupId>org.apache.arrow</groupId> <artifactId>arrow-vector</artifactId> <exclusions> <exclusion> <groupId>org.slf4j</groupId> <artifactId>log4j-over-slf4j</artifactId> </exclusion> <exclusion> <groupId>joda-time</groupId> <artifactId>joda-time</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>com.vividsolutions</groupId> <artifactId>jts-core</artifactId> </dependency> <!-- not used, but we can't exclude direct inherited dependencies, so just mark it provided --> <dependency> <groupId>org.scala-lang</groupId> <artifactId>scala-library</artifactId> <scope>provided</scope> </dependency> <!-- test deps --> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>4.12</version> <scope>test</scope> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> <version>1.7.5</version> <scope>test</scope> </dependency> </dependencies> <build> <pluginManagement> <plugins> <!-- skip all scala life cycles, as there are no scala sources in this module --> <plugin> <groupId>net.alchim31.maven</groupId> <artifactId>scala-maven-plugin</artifactId> <configuration> <skip>true</skip> </configuration> </plugin> </plugins> </pluginManagement> </build> </project>
{ "content_hash": "ef2d91d4ea4b0d444b7db12ed79ea91c", "timestamp": "", "source": "github", "line_count": 72, "max_line_length": 204, "avg_line_length": 34.52777777777778, "alnum_prop": 0.5374094931617055, "repo_name": "ddseapy/geomesa", "id": "698265f9e27aefd4110594fb8cb3fbb2cc3e7464", "size": "2486", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "geomesa-arrow/geomesa-arrow-jts/pom.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "2900" }, { "name": "Java", "bytes": "333097" }, { "name": "JavaScript", "bytes": "140" }, { "name": "Python", "bytes": "11981" }, { "name": "R", "bytes": "2716" }, { "name": "Scala", "bytes": "7136591" }, { "name": "Scheme", "bytes": "3143" }, { "name": "Shell", "bytes": "136402" } ], "symlink_target": "" }
A Wrapper for Box2D API inside Cocos2D-X V3, Use Box2D in Cocos2DX Easily # Why You Need This? As you know Cocos2DX uses chipmunk 7 as built-in physics engine, also they provide a nice API to simplify usage of chipmunk physics Engine But What if you love Box2D, Just Like me :), This Wrapper Will Give you what you want... This API Will let you use Box2D in Cocos2dX Scene, Plus A Nice API, just like Cocos2dx api for chipmunk. # Installation of XCBox2D Clone , or Download ZIP file of XCBox2D from github and Place (XCBox2DHelper) into Classes Folder of your Cocos2D-X Project Simply, Add This Folder into your xcode project and now you ready to use XCBox2D Note: Select Create Groups and Add them into Target of Project # XCBox2D Class Creator <img src="https://i.imgbox.com/QlcQNZGX.png" alt="image host"/> This Creator will help easily create XCScene class, no need to dublicate HelloWorld Scene to Add Everything, Also this Creator will help to Create Subclass of XCSprite Download XCBox2D Creator - Mac OS X ( <a href="#">Download</a> ) - Windows ( <a href="#">Download</a> ) - Linux ( <a href="#">Download</a> ) How Using XCBox2D Class Creator First of all, you jave to create your XCScene 1- Write your Class Name (Example: GameScene ) 2- Choose The Super as XCScene 3- if you Want add settings like Enable Debug and Boundary and Contact events kust check them for World Gravity, just write your values 4- Choose Where To Save and Create Your Class 5- Move .h and .cpp files to Your Classes Folder # **_Documentations_** # XCScene in Depth What you should know about this class, This class is inherit from cocos2d::Layer So, you should forgot cocos2d::Layer, because without XCScene you will not able to use the API **The Functions of XCScene** XCScene has many functions to help you using Box2D more easier than normal Box2D API + **Set World Graviry** ``` gamescene->setBox2DWorldGravity( Vec2(0.0f, -9.8f) ); ``` This Line for settting the World Gravity + **Enable Debug Mode** ``` gamescene->setBox2DWorldDebug(true); ``` if you want Enable Debug Mode, of course false will disable + **Boundary** ``` gamescene->setBoundary(BoundaryType::ALLSIDES); ``` Boundary in the Scene you have more Options for this, you can choose from the enum another types **BoundaryType enum** - BoundaryType::ALLSIDES) - BoundaryType::LEFT) - BoundaryType::RIGHT) - BoundaryType::UP) - BoundaryType::DOWN) This will set Boundary around the Scene, good to prototyping + **World Update** Each XCScene should have update function So, don't forgot and don't remove it from this Scene Important: don't remove calling the super inside Update Function ``` XCScene::update(dt); ``` Without this line, the world will not able to update itself. # Your First Body Now , lets try to add XCBox2D Body into our Scene With using this code - Put the code in initWithBox2D Function in your scene // Creating Sprite auto sprite = XCSprite::create("CloseNormal.png"); sprite->setPosition( Vec2(visibleSize.width / 2, visibleSize.height / 2)); this->addChild(sprite); // Creating Body auto body = XCBox2DPhysicsBody::createCircle(sprite->getContentSize().width / 2, BOX2DFIXTURE_DEFAULT); body->setBodyType(BodyType::Dynamic); sprite->setBox2DBody(body); let's discuss the code, first of all we should create Sprite and add it into our Scene the important part is **XCSprite Class** which you always use to add your Sprites into scene XCSprite Class usage same as cocos2d::Sprite Class but with API to use XCBox2D So, We created the Sprite and position it on center and addit into our scene Now, we will be able to to create our Box2D Body, don't think it's will be painful as Box2D API, long andan confused :) XCBox2D will help you to Create Body Easily **XCBox2DPhysicsBody Class**
{ "content_hash": "a899b1f57370590c0116765750c4ca39", "timestamp": "", "source": "github", "line_count": 137, "max_line_length": 193, "avg_line_length": 28.26277372262774, "alnum_prop": 0.7425103305785123, "repo_name": "xCodeSoul/XCBox2D", "id": "1be984304cf59b776453ba0e274cbf8184488bfd", "size": "3882", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
dtPathQueue::dtPathQueue() : m_nextHandle(1), m_maxPathSize(0), m_queueHead(0), m_navquery(0) { for (int i = 0; i < MAX_QUEUE; ++i) m_queue[i].path = 0; } dtPathQueue::~dtPathQueue() { purge(); } void dtPathQueue::purge() { dtFreeNavMeshQuery(m_navquery); m_navquery = 0; for (int i = 0; i < MAX_QUEUE; ++i) { dtFree(m_queue[i].path); m_queue[i].path = 0; } } bool dtPathQueue::init(const int maxPathSize, const int maxSearchNodeCount, dtNavMesh* nav) { purge(); m_navquery = dtAllocNavMeshQuery(); if (!m_navquery) return false; if (dtStatusFailed(m_navquery->init(nav, maxSearchNodeCount))) return false; m_maxPathSize = maxPathSize; for (int i = 0; i < MAX_QUEUE; ++i) { m_queue[i].ref = DT_PATHQ_INVALID; m_queue[i].path = (dtPolyRef*)dtAlloc(sizeof(dtPolyRef)*m_maxPathSize, DT_ALLOC_PERM); if (!m_queue[i].path) return false; } m_queueHead = 0; return true; } void dtPathQueue::update(const int maxIters) { static const int MAX_KEEP_ALIVE = 2; // in update ticks. // Update path request until there is nothing to update // or upto maxIters pathfinder iterations has been consumed. int iterCount = maxIters; for (int i = 0; i < MAX_QUEUE; ++i) { PathQuery& q = m_queue[m_queueHead % MAX_QUEUE]; // Skip inactive requests. if (q.ref == DT_PATHQ_INVALID) { m_queueHead++; continue; } // Handle completed request. if (dtStatusSucceed(q.status) || dtStatusFailed(q.status)) { // If the path result has not been read in few frames, free the slot. q.keepAlive++; if (q.keepAlive > MAX_KEEP_ALIVE) { q.ref = DT_PATHQ_INVALID; q.status = 0; } m_queueHead++; continue; } m_navquery->updateLinkFilter(q.linkFilter.Get()); // Handle query start. if (q.status == 0) { q.status = m_navquery->initSlicedFindPath(q.startRef, q.endRef, q.startPos, q.endPos, q.filter); } // Handle query in progress. if (dtStatusInProgress(q.status)) { int iters = 0; q.status = m_navquery->updateSlicedFindPath(iterCount, &iters); iterCount -= iters; } if (dtStatusSucceed(q.status)) { q.status = m_navquery->finalizeSlicedFindPath(q.path, &q.npath, m_maxPathSize); } if (iterCount <= 0) break; m_queueHead++; } } dtPathQueueRef dtPathQueue::request(dtPolyRef startRef, dtPolyRef endRef, const float* startPos, const float* endPos, const dtQueryFilter* filter, TSharedPtr<dtQuerySpecialLinkFilter> linkFilter) { // Find empty slot int slot = -1; for (int i = 0; i < MAX_QUEUE; ++i) { if (m_queue[i].ref == DT_PATHQ_INVALID) { slot = i; break; } } // Could not find slot. if (slot == -1) return DT_PATHQ_INVALID; dtPathQueueRef ref = m_nextHandle++; if (m_nextHandle == DT_PATHQ_INVALID) m_nextHandle++; PathQuery& q = m_queue[slot]; q.ref = ref; dtVcopy(q.startPos, startPos); q.startRef = startRef; dtVcopy(q.endPos, endPos); q.endRef = endRef; q.status = 0; q.npath = 0; q.filter = filter; q.linkFilter = linkFilter; q.keepAlive = 0; return ref; } dtStatus dtPathQueue::getRequestStatus(dtPathQueueRef ref) const { for (int i = 0; i < MAX_QUEUE; ++i) { if (m_queue[i].ref == ref) return m_queue[i].status; } return DT_FAILURE; } dtStatus dtPathQueue::getPathResult(dtPathQueueRef ref, dtPolyRef* path, int* pathSize, const int maxPath) { for (int i = 0; i < MAX_QUEUE; ++i) { if (m_queue[i].ref == ref) { PathQuery& q = m_queue[i]; // Free request for reuse. q.ref = DT_PATHQ_INVALID; q.status = 0; // Copy path int n = dtMin(q.npath, maxPath); memcpy(path, q.path, sizeof(dtPolyRef)*n); *pathSize = n; return DT_SUCCESS; } } return DT_FAILURE; }
{ "content_hash": "961b978aad4e59c28803821fa67c1cd1", "timestamp": "", "source": "github", "line_count": 176, "max_line_length": 106, "avg_line_length": 21.068181818181817, "alnum_prop": 0.6445523193096009, "repo_name": "PopCap/GameIdea", "id": "b499ba25485204be3e150b33eda00d39eb12b7ea", "size": "4943", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Engine/Source/Runtime/Navmesh/Private/DetourCrowd/DetourPathQueue.cpp", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "ASP", "bytes": "238055" }, { "name": "Assembly", "bytes": "184134" }, { "name": "Batchfile", "bytes": "116983" }, { "name": "C", "bytes": "84264210" }, { "name": "C#", "bytes": "9612596" }, { "name": "C++", "bytes": "242290999" }, { "name": "CMake", "bytes": "548754" }, { "name": "CSS", "bytes": "134910" }, { "name": "GLSL", "bytes": "96780" }, { "name": "HLSL", "bytes": "124014" }, { "name": "HTML", "bytes": "4097051" }, { "name": "Java", "bytes": "757767" }, { "name": "JavaScript", "bytes": "2742822" }, { "name": "Makefile", "bytes": "1976144" }, { "name": "Objective-C", "bytes": "75778979" }, { "name": "Objective-C++", "bytes": "312592" }, { "name": "PAWN", "bytes": "2029" }, { "name": "PHP", "bytes": "10309" }, { "name": "PLSQL", "bytes": "130426" }, { "name": "Pascal", "bytes": "23662" }, { "name": "Perl", "bytes": "218656" }, { "name": "Python", "bytes": "21593012" }, { "name": "SAS", "bytes": "1847" }, { "name": "Shell", "bytes": "2889614" }, { "name": "Tcl", "bytes": "1452" } ], "symlink_target": "" }
@implementation PuntoAcopio - (instancetype)initWithData:(NSDictionary *)data { self = [super init]; if (self) { if (!data[@"id"]) { return nil; } else { _uid = data[@"id"]; } _latitude = data[@"latitud"] ? data [@"latitud"] : @(0); _longitude = data[@"longitud"] ? data [@"longitud"] : @(0); _departamento = data[@"departamento"] ? data[@"departamento"] : @""; _provincia = data[@"provincia"] ? data[@"provincia"] : @""; _distrito = data[@"distrito"] ? data[@"distrito"] : @""; _address = data[@"direccion"] ? data[@"direccion"] : @""; _reference = data[@"referencia"] ? data[@"referencia"] : @""; NSArray *needs = data[@"necesidades"]; NSMutableArray *tempNeeds = [NSMutableArray new]; if ([needs isKindOfClass:[NSArray class]]) { for (NSDictionary *dictionaryNeed in needs) { if ([dictionaryNeed isKindOfClass:[NSDictionary class]]) { Need *need = [[Need alloc] initWithData:dictionaryNeed]; if (need) { [tempNeeds addObject:need]; } } } _needs = [NSArray arrayWithArray:tempNeeds]; } NSDictionary *contactData = data[@"contacto"]; if ([contactData isKindOfClass:[NSDictionary class]]) { _contact = [[Contact alloc] initWithData:contactData]; } } return self; } @end
{ "content_hash": "70ff404b96daa8f8c2aa562b95b2c0e4", "timestamp": "", "source": "github", "line_count": 44, "max_line_length": 76, "avg_line_length": 35, "alnum_prop": 0.5064935064935064, "repo_name": "cegonya/EulaliaiOS", "id": "a3d9dece48667a6d0c628dc014f59104018657f5", "size": "1699", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Eulalia/Eulalia/PuntoAcopio.m", "mode": "33188", "license": "mit", "language": [ { "name": "Objective-C", "bytes": "29277" }, { "name": "Ruby", "bytes": "189" } ], "symlink_target": "" }
using System; using System.Collections.Generic; using System.Globalization; using System.IO; using System.Linq; #if !PORTABLE using System.Security.Cryptography; #endif using System.Text; namespace Zirpl { public static class StringExtensions { public static String Substitute(this String originalText, int startIndex, int length, String newToken) { var sb = new StringBuilder(); if (startIndex > 0) { sb.Append(originalText.Substring(0, startIndex)); } sb.Append(newToken); if (startIndex + length < originalText.Length) { sb.Append(originalText.Substring(startIndex + length)); } return sb.ToString(); } public static string Base64Encode(this string toEncode) { var stream = new MemoryStream(); var writer = new StreamWriter(stream); writer.Write(toEncode); writer.Flush(); var buffer = new byte[stream.Length]; stream.Position = 0; stream.Read(buffer, 0, (int)stream.Length); var base64Value = Convert.ToBase64String(buffer); return base64Value; } public static String LastXSubstring(this String text, int lengthX) { if (text == null) { throw new ArgumentNullException("text"); } var actualLength = text.Length; var startIndex = actualLength - lengthX; // if actualLength = 5 (indices 0-4), lastXlength = 3, startIndex = 2 if (startIndex < 0) { throw new ArgumentOutOfRangeException("lengthX", "lengthX specified is longer than string"); } return text.Substring(startIndex, lengthX); } #if !PORTABLE public static String Hash(this String text, HashAlgorithm algorithm, Encoding encoding) { // http://weblogs.sqlteam.com/mladenp/archive/2009/04/28/Comparing-SQL-Server-HASHBYTES-function-and-.Net-hashing.aspx // dont use these 2 for comparing to SQL Server hashing: //byte[] bs = System.Text.Encoding.ASCII.GetBytes(input); //byte[] bs = System.Text.Encoding.UTF7.GetBytes(input); // these ones are fine //byte[] bs = System.Text.Encoding.UTF8.GetBytes(input); // best for varchar when comparing to SQL Server hashes //byte[] bs = System.Text.Encoding.UTF16.GetBytes(input); // best for nchar when comparing to SQL Server hashes //byte[] bs = System.Text.Encoding.UTF32.GetBytes(input); // UTF32 or Unicode required for complex chars //byte[] bs = System.Text.Encoding.Unicode.GetBytes(input); // UTF32 or Unicode required for complex chars byte[] bs = encoding.GetBytes(text); bs = algorithm.ComputeHash(bs); // TODO: this MAY be required... unsure //StringBuilder s = new StringBuilder(); //foreach (byte b in bs) //{ // s.Append(b.ToString("x2").ToLower()); //} return encoding.GetString(bs, 0, text.Length); } #endif /// <summary> /// Converts to Camel casing. /// "FooBar" becomes "fooBar" /// "Foobar becomes "foobar" /// </summary> /// <param name="source"></param> /// <returns></returns> public static string ToCamelCase(this String source) { if (String.IsNullOrEmpty(source)) { return source; } else if (source.Length == 1) { return source.ToLower(); } else { return source[0].ToString().ToLower() + String.Join("", source.Substring(1)); } } #if !PORTABLE /// <summary> /// Converts to Camel casing. /// "FooBar" becomes "fooBar" /// "Foobar becomes "foobar" /// </summary> /// <param name="source"></param> /// <returns></returns> public static string ToCamelCase(this String source, CultureInfo cultureInfo) { if (String.IsNullOrEmpty(source)) { return source; } else if (source.Length == 1) { return source.ToLower(cultureInfo); } else { return source[0].ToString().ToLower(cultureInfo) + String.Join("", source.Substring(1)); } } #endif /// <summary> /// Converts to pascal casing. /// "fooBar" becomes "FooBar" /// "foobar" becomes "Foobar" /// </summary> /// <param name="source"></param> /// <returns></returns> public static string ToPascalCase(this String source) { if (String.IsNullOrEmpty(source)) { return source; } else if (source.Length == 1) { return source.ToUpper(); } else { return source[0].ToString().ToUpper() + String.Join("", source.Substring(1)); } } #if !PORTABLE /// <summary> /// Converts to pascal casing. /// "fooBar" becomes "FooBar" /// "foobar" becomes "Foobar" /// </summary> /// <param name="source"></param> /// <returns></returns> public static string ToPascalCase(this String source, CultureInfo cultureInfo) { if (String.IsNullOrEmpty(source)) { return source; } else if (source.Length == 1) { return source.ToUpper(cultureInfo); } else { return source[0].ToString().ToUpper(cultureInfo) + String.Join("", source.Substring(1)); } } #endif /// <summary> /// Parses a camel cased or pascal cased string and returns an array of the words within the string. /// </summary> /// <example> /// The string "PascalCasing" will return an array with two elements, "Pascal" and "Casing". /// </example> /// <param name="source">The string that is camel cased that needs to be split</param> /// <returns>An arry of each word part</returns> public static string[] SplitCamelOrPascalCase(this string source) { if (source == null) return new string[] { }; //Return empty array. if (source.Length == 0) return new string[] { "" }; List<String> words = new List<String>(); int wordStartIndex = 0; char[] letters = source.ToCharArray(); // Skip the first letter. we don't care what case it is. for (int i = 1; i < letters.Length; i++) { if (char.IsUpper(letters[i])) { //Grab everything before the current index. words.Add(new String(letters, wordStartIndex, i - wordStartIndex)); wordStartIndex = i; } } //We need to have the last word. words.Add(new String(letters, wordStartIndex, letters.Length - wordStartIndex)); //Copy to a string array. string[] wordArray = new string[words.Count]; words.CopyTo(wordArray, 0); return wordArray; } ///// <summary> ///// Parses a camel cased or pascal cased string and returns a new string with spaces between the words in the string. ///// </summary> ///// <example> ///// The string "PascalCasing" will return an array with two elements, "Pascal" and "Casing". ///// </example> ///// <param name="source">The string that is camel cased that needs to be split</param> ///// <returns>A string with spaces between each word part</returns> //public static string ToCamelCase(this string[] source) //{ // return string.Join("", SplitCamelOrPascalCase(source)); //} } }
{ "content_hash": "721bf6403a11eadd331d27f671d4f590", "timestamp": "", "source": "github", "line_count": 238, "max_line_length": 130, "avg_line_length": 35.03781512605042, "alnum_prop": 0.5278810408921933, "repo_name": "zirplsoftware/ZAppEngine", "id": "d6a6120d20b6a8d109c7c49e9cc068d08d9afc84", "size": "8341", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Zirpl.Common/StringExtensions.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C#", "bytes": "2908246" }, { "name": "Shell", "bytes": "1826" } ], "symlink_target": "" }
package datafusion
{ "content_hash": "d726107ab666176ac032477885308359", "timestamp": "", "source": "github", "line_count": 1, "max_line_length": 18, "avg_line_length": 19, "alnum_prop": 0.8947368421052632, "repo_name": "GoogleCloudPlatform/k8s-config-connector", "id": "4e1f434147ad4bb81ac9adc3efc8c700e8fa465f", "size": "1339", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pkg/clients/generated/apis/datafusion/group.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "13767" }, { "name": "Go", "bytes": "5700747" }, { "name": "HTML", "bytes": "1246" }, { "name": "Makefile", "bytes": "9799" }, { "name": "Python", "bytes": "31671" }, { "name": "Shell", "bytes": "25436" } ], "symlink_target": "" }
package org.springframework.security.oauth2.client.endpoint; import org.springframework.core.convert.converter.Converter; import org.springframework.http.RequestEntity; import org.springframework.http.ResponseEntity; import org.springframework.http.converter.FormHttpMessageConverter; import org.springframework.http.converter.HttpMessageConverter; import org.springframework.security.oauth2.client.http.OAuth2ErrorResponseErrorHandler; import org.springframework.security.oauth2.core.AuthorizationGrantType; import org.springframework.security.oauth2.core.OAuth2AuthorizationException; import org.springframework.security.oauth2.core.OAuth2Error; import org.springframework.security.oauth2.core.endpoint.OAuth2AccessTokenResponse; import org.springframework.security.oauth2.core.http.converter.OAuth2AccessTokenResponseHttpMessageConverter; import org.springframework.util.Assert; import org.springframework.util.CollectionUtils; import org.springframework.web.client.ResponseErrorHandler; import org.springframework.web.client.RestClientException; import org.springframework.web.client.RestOperations; import org.springframework.web.client.RestTemplate; import java.util.Arrays; /** * The default implementation of an {@link OAuth2AccessTokenResponseClient} * for the {@link AuthorizationGrantType#REFRESH_TOKEN refresh_token} grant. * This implementation uses a {@link RestOperations} when requesting * an access token credential at the Authorization Server's Token Endpoint. * * @author Joe Grandja * @since 5.2 * @see OAuth2AccessTokenResponseClient * @see OAuth2RefreshTokenGrantRequest * @see OAuth2AccessTokenResponse * @see <a target="_blank" href="https://tools.ietf.org/html/rfc6749#section-6">Section 6 Refreshing an Access Token</a> */ public final class DefaultRefreshTokenTokenResponseClient implements OAuth2AccessTokenResponseClient<OAuth2RefreshTokenGrantRequest> { private static final String INVALID_TOKEN_RESPONSE_ERROR_CODE = "invalid_token_response"; private Converter<OAuth2RefreshTokenGrantRequest, RequestEntity<?>> requestEntityConverter = new OAuth2RefreshTokenGrantRequestEntityConverter(); private RestOperations restOperations; public DefaultRefreshTokenTokenResponseClient() { RestTemplate restTemplate = new RestTemplate(Arrays.asList( new FormHttpMessageConverter(), new OAuth2AccessTokenResponseHttpMessageConverter())); restTemplate.setErrorHandler(new OAuth2ErrorResponseErrorHandler()); this.restOperations = restTemplate; } @Override public OAuth2AccessTokenResponse getTokenResponse(OAuth2RefreshTokenGrantRequest refreshTokenGrantRequest) { Assert.notNull(refreshTokenGrantRequest, "refreshTokenGrantRequest cannot be null"); RequestEntity<?> request = this.requestEntityConverter.convert(refreshTokenGrantRequest); ResponseEntity<OAuth2AccessTokenResponse> response; try { response = this.restOperations.exchange(request, OAuth2AccessTokenResponse.class); } catch (RestClientException ex) { OAuth2Error oauth2Error = new OAuth2Error(INVALID_TOKEN_RESPONSE_ERROR_CODE, "An error occurred while attempting to retrieve the OAuth 2.0 Access Token Response: " + ex.getMessage(), null); throw new OAuth2AuthorizationException(oauth2Error, ex); } OAuth2AccessTokenResponse tokenResponse = response.getBody(); if (CollectionUtils.isEmpty(tokenResponse.getAccessToken().getScopes()) || tokenResponse.getRefreshToken() == null) { OAuth2AccessTokenResponse.Builder tokenResponseBuilder = OAuth2AccessTokenResponse.withResponse(tokenResponse); if (CollectionUtils.isEmpty(tokenResponse.getAccessToken().getScopes())) { // As per spec, in Section 5.1 Successful Access Token Response // https://tools.ietf.org/html/rfc6749#section-5.1 // If AccessTokenResponse.scope is empty, then default to the scope // originally requested by the client in the Token Request tokenResponseBuilder.scopes(refreshTokenGrantRequest.getAccessToken().getScopes()); } if (tokenResponse.getRefreshToken() == null) { // Reuse existing refresh token tokenResponseBuilder.refreshToken(refreshTokenGrantRequest.getRefreshToken().getTokenValue()); } tokenResponse = tokenResponseBuilder.build(); } return tokenResponse; } /** * Sets the {@link Converter} used for converting the {@link OAuth2RefreshTokenGrantRequest} * to a {@link RequestEntity} representation of the OAuth 2.0 Access Token Request. * * @param requestEntityConverter the {@link Converter} used for converting to a {@link RequestEntity} representation of the Access Token Request */ public void setRequestEntityConverter(Converter<OAuth2RefreshTokenGrantRequest, RequestEntity<?>> requestEntityConverter) { Assert.notNull(requestEntityConverter, "requestEntityConverter cannot be null"); this.requestEntityConverter = requestEntityConverter; } /** * Sets the {@link RestOperations} used when requesting the OAuth 2.0 Access Token Response. * * <p> * <b>NOTE:</b> At a minimum, the supplied {@code restOperations} must be configured with the following: * <ol> * <li>{@link HttpMessageConverter}'s - {@link FormHttpMessageConverter} and {@link OAuth2AccessTokenResponseHttpMessageConverter}</li> * <li>{@link ResponseErrorHandler} - {@link OAuth2ErrorResponseErrorHandler}</li> * </ol> * * @param restOperations the {@link RestOperations} used when requesting the Access Token Response */ public void setRestOperations(RestOperations restOperations) { Assert.notNull(restOperations, "restOperations cannot be null"); this.restOperations = restOperations; } }
{ "content_hash": "709d301295d5cde5d68fcad2895a04f7", "timestamp": "", "source": "github", "line_count": 119, "max_line_length": 145, "avg_line_length": 47.142857142857146, "alnum_prop": 0.8037433155080214, "repo_name": "eddumelendez/spring-security", "id": "0efd37d8ebd43c7536a5aaa91d2d988a4237c415", "size": "6231", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/endpoint/DefaultRefreshTokenTokenResponseClient.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "AspectJ", "bytes": "3352" }, { "name": "Groovy", "bytes": "64322" }, { "name": "HTML", "bytes": "115" }, { "name": "Java", "bytes": "11317723" }, { "name": "JavaScript", "bytes": "10" }, { "name": "PLSQL", "bytes": "3180" }, { "name": "Python", "bytes": "129" }, { "name": "Shell", "bytes": "811" }, { "name": "XSLT", "bytes": "2344" } ], "symlink_target": "" }
package io.siddhi.core.executor.function; import io.siddhi.annotation.Example; import io.siddhi.annotation.Extension; import io.siddhi.annotation.Parameter; import io.siddhi.annotation.ReturnAttribute; import io.siddhi.annotation.util.DataType; import io.siddhi.core.config.SiddhiQueryContext; import io.siddhi.core.executor.ConstantExpressionExecutor; import io.siddhi.core.executor.ExpressionExecutor; import io.siddhi.core.util.config.ConfigReader; import io.siddhi.core.util.snapshot.state.State; import io.siddhi.core.util.snapshot.state.StateFactory; import io.siddhi.query.api.definition.Attribute; import io.siddhi.query.api.exception.SiddhiAppValidationException; /** * Executor class for convert function. Function execution logic is implemented in execute here. */ @Extension( name = "convert", namespace = "", description = "Converts the first input parameter according to the convertedTo parameter.", parameters = { @Parameter(name = "to.be.converted", description = "This specifies the value to be converted.", type = {DataType.INT, DataType.LONG, DataType.DOUBLE, DataType.FLOAT, DataType.STRING, DataType.BOOL}), @Parameter(name = "converted.to", description = "A string constant parameter to which type the attribute need to be converted " + " using one of the following strings values: 'int', 'long', 'float', 'double', " + "'string', 'bool'.", type = DataType.STRING) }, returnAttributes = @ReturnAttribute( description = "Based on the given convertedTo parameter.", type = {DataType.INT, DataType.LONG, DataType.DOUBLE, DataType.FLOAT, DataType.STRING, DataType.BOOL}), examples = { @Example( syntax = "from fooStream\n" + "select convert(temp, 'double') as temp\n" + "insert into barStream;", description = "This will convert fooStream temp value into 'double'."), @Example( syntax = "from fooStream\n" + "select convert(temp, 'int') as temp\n" + "insert into barStream;", description = "This will convert fooStream temp value into 'int' (value = \"convert(45.9, " + "'int') returns 46\")." ) } ) public class ConvertFunctionExecutor extends FunctionExecutor { private Attribute.Type returnType; private Attribute.Type inputType; @Override public StateFactory init(ExpressionExecutor[] attributeExpressionExecutors, ConfigReader configReader, SiddhiQueryContext siddhiQueryContext) { if (attributeExpressionExecutors.length != 2) { throw new SiddhiAppValidationException("convert() must have at 2 parameters, attribute and to be " + "converted type"); } inputType = attributeExpressionExecutors[0].getReturnType(); if (inputType == Attribute.Type.OBJECT) { throw new SiddhiAppValidationException("1st parameter of convert() cannot be 'object' as " + "it's not supported, it has to be either of (STRING, " + "INT, LONG, FLOAT, DOUBLE, BOOL), but found " + attributeExpressionExecutors[0].getReturnType()); } if (attributeExpressionExecutors[1].getReturnType() != Attribute.Type.STRING) { throw new SiddhiAppValidationException("2nd parameter of convert() must be 'string' have constant " + "value either of (STRING, INT, LONG, FLOAT, DOUBLE, " + "BOOL), but found " + attributeExpressionExecutors[0].getReturnType()); } if (!(attributeExpressionExecutors[1] instanceof ConstantExpressionExecutor)) { throw new SiddhiAppValidationException("2nd parameter of convert() must have constant value either " + "of (STRING, INT, LONG, FLOAT, DOUBLE, BOOL), but found " + "a variable expression"); } String type = (String) attributeExpressionExecutors[1].execute(null); if (Attribute.Type.STRING.toString().equalsIgnoreCase(type)) { returnType = Attribute.Type.STRING; } else if (Attribute.Type.BOOL.toString().equalsIgnoreCase(type)) { returnType = Attribute.Type.BOOL; } else if (Attribute.Type.DOUBLE.toString().equalsIgnoreCase(type)) { returnType = Attribute.Type.DOUBLE; } else if (Attribute.Type.FLOAT.toString().equalsIgnoreCase(type)) { returnType = Attribute.Type.FLOAT; } else if (Attribute.Type.INT.toString().equalsIgnoreCase(type)) { returnType = Attribute.Type.INT; } else if (Attribute.Type.LONG.toString().equalsIgnoreCase(type)) { returnType = Attribute.Type.LONG; } else { throw new SiddhiAppValidationException("2nd parameter of convert() must have value either of " + "(STRING, INT, LONG, FLOAT, DOUBLE, BOOL), but found '" + type + "'"); } return null; } @Override public Attribute.Type getReturnType() { return returnType; } protected Object execute(Object[] obj, State state) { Object data = obj[0]; if (data != null) { try { switch (returnType) { case STRING: return data.toString(); case INT: switch (inputType) { case STRING: return Integer.parseInt((String) data); case INT: return data; case LONG: return ((Long) data).intValue(); case FLOAT: return ((Float) data).intValue(); case DOUBLE: return ((Double) data).intValue(); case BOOL: return ((Boolean) data) ? 1 : 0; case OBJECT: return null; } break; case LONG: switch (inputType) { case STRING: return Long.parseLong((String) data); case INT: return ((Integer) data).longValue(); case LONG: return data; case FLOAT: return ((Float) data).longValue(); case DOUBLE: return ((Double) data).longValue(); case BOOL: return ((Boolean) data) ? 1L : 0L; case OBJECT: return null; } break; case FLOAT: switch (inputType) { case STRING: return Float.parseFloat((String) data); case INT: return ((Integer) data).floatValue(); case LONG: return ((Long) data).floatValue(); case FLOAT: return data; case DOUBLE: return ((Double) data).floatValue(); case BOOL: return ((Boolean) data) ? 1F : 0F; case OBJECT: return null; } break; case DOUBLE: switch (inputType) { case STRING: return Double.parseDouble((String) data); case INT: return ((Integer) data).doubleValue(); case LONG: return ((Long) data).doubleValue(); case FLOAT: return ((Float) data).doubleValue(); case DOUBLE: return data; case BOOL: return ((Boolean) data) ? 1.0 : 0.0; case OBJECT: return null; } break; case BOOL: switch (inputType) { case STRING: return Boolean.parseBoolean((String) data); case INT: return ((Integer) data) == 1; case LONG: return ((Long) data) == 1L; case FLOAT: return ((Float) data) == 1F; case DOUBLE: return ((Double) data) == 1.0; case BOOL: return data; case OBJECT: return null; } break; case OBJECT: break; } } catch (NumberFormatException e) { return null; } } return null; } @Override protected Object execute(Object data, State state) { //will not occur return null; } }
{ "content_hash": "93283dc54efc22b04728950b2ec8d3bd", "timestamp": "", "source": "github", "line_count": 223, "max_line_length": 119, "avg_line_length": 46.46188340807175, "alnum_prop": 0.45893253546954926, "repo_name": "ramindu90/siddhi", "id": "a5198d7c139dab595a93dc5a8da3490d40d53e53", "size": "11034", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "modules/siddhi-core/src/main/java/io/siddhi/core/executor/function/ConvertFunctionExecutor.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "20260" }, { "name": "FreeMarker", "bytes": "11990" }, { "name": "Java", "bytes": "7668644" }, { "name": "Shell", "bytes": "455" } ], "symlink_target": "" }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1"> <title>metacoq: 13 s 🏆</title> <link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" /> <link href="../../../../../bootstrap.min.css" rel="stylesheet"> <link href="../../../../../bootstrap-custom.css" rel="stylesheet"> <link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet"> <script src="../../../../../moment.min.js"></script> <!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries --> <!-- WARNING: Respond.js doesn't work if you view the page via file:// --> <!--[if lt IE 9]> <script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script> <script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script> <![endif]--> </head> <body> <div class="container"> <div class="navbar navbar-default" role="navigation"> <div class="container-fluid"> <div class="navbar-header"> <a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a> </div> <div id="navbar" class="collapse navbar-collapse"> <ul class="nav navbar-nav"> <li><a href="../..">clean / released</a></li> <li class="active"><a href="">8.9.1 / metacoq - 1.0~alpha1+8.9</a></li> </ul> </div> </div> </div> <div class="article"> <div class="row"> <div class="col-md-12"> <a href="../..">« Up</a> <h1> metacoq <small> 1.0~alpha1+8.9 <span class="label label-success">13 s 🏆</span> </small> </h1> <p>📅 <em><script>document.write(moment("2022-10-17 13:17:37 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2022-10-17 13:17:37 UTC)</em><p> <h2>Context</h2> <pre># Packages matching: installed # Name # Installed # Synopsis base-bigarray base base-threads base base-unix base camlp5 7.14 Preprocessor-pretty-printer of OCaml conf-findutils 1 Virtual package relying on findutils conf-perl 2 Virtual package relying on perl coq 8.9.1 Formal proof management system num 1.4 The legacy Num library for arbitrary-precision integer and rational arithmetic ocaml 4.08.1 The OCaml compiler (virtual package) ocaml-base-compiler 4.08.1 Official release 4.08.1 ocaml-config 1 OCaml Switch Configuration ocamlfind 1.9.5 A library manager for OCaml # opam file: opam-version: &quot;2.0&quot; maintainer: &quot;[email protected]&quot; homepage: &quot;https://metacoq.github.io/metacoq&quot; dev-repo: &quot;git+https://github.com/MetaCoq/metacoq.git#coq-8.8&quot; bug-reports: &quot;https://github.com/MetaCoq/metacoq/issues&quot; authors: [&quot;Abhishek Anand &lt;[email protected]&gt;&quot; &quot;Simon Boulier &lt;[email protected]&gt;&quot; &quot;Cyril Cohen &lt;[email protected]&gt;&quot; &quot;Yannick Forster &lt;[email protected]&gt;&quot; &quot;Fabian Kunze &lt;[email protected]&gt;&quot; &quot;Gregory Malecha &lt;[email protected]&gt;&quot; &quot;Matthieu Sozeau &lt;[email protected]&gt;&quot; &quot;Nicolas Tabareau &lt;[email protected]&gt;&quot; &quot;Théo Winterhalter &lt;[email protected]&gt;&quot; ] license: &quot;MIT&quot; depends: [ &quot;ocaml&quot; {&gt; &quot;4.02.3&quot;} &quot;coq&quot; {&gt;= &quot;8.9&quot; &amp; &lt; &quot;8.10~&quot;} &quot;coq-metacoq-template&quot; {= version} &quot;coq-metacoq-checker&quot; {= version} &quot;coq-metacoq-pcuic&quot; {= version} &quot;coq-metacoq-safechecker&quot; {= version} &quot;coq-metacoq-erasure&quot; {= version} &quot;coq-metacoq-translations&quot; {= version} ] synopsis: &quot;A meta-programming framework for Coq&quot; description: &quot;&quot;&quot; MetaCoq is a meta-programming framework for Coq. The meta-package includes the template-coq library, unverified checker for Coq, PCUIC development including a verified translation from Coq to PCUIC, safe checker and erasure for PCUIC and example translations. See individual packages for more detailed descriptions. &quot;&quot;&quot; url { src: &quot;https://github.com/MetaCoq/metacoq/archive/1.0-alpha+8.9.tar.gz&quot; checksum: &quot;sha256=899ef4ee73b1684a0f1d2e37ab9ab0f9b24424f6d8a10a10efd474c0ed93488e&quot; }</pre> <h2>Lint</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Dry install 🏜️</h2> <p>Dry install with the current Coq version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam install -y --show-action coq-metacoq.1.0~alpha1+8.9 coq.8.9.1</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Install dependencies</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam list; echo; ulimit -Sv 4000000; timeout 4h opam install -y --deps-only coq-metacoq.1.0~alpha1+8.9 coq.8.9.1</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>35 m 3 s</dd> </dl> <h2>Install 🚀</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam list; echo; ulimit -Sv 16000000; timeout 4h opam install -y -v coq-metacoq.1.0~alpha1+8.9 coq.8.9.1</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>13 s</dd> </dl> <h2>Installation size</h2> <p>No files were installed.</p> <h2>Uninstall 🧹</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam remove -y coq-metacoq.1.0~alpha1+8.9</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Missing removes</dt> <dd> none </dd> <dt>Wrong removes</dt> <dd> none </dd> </dl> </div> </div> </div> <hr/> <div class="footer"> <p class="text-center"> Sources are on <a href="https://github.com/coq-bench">GitHub</a> © Guillaume Claret 🐣 </p> </div> </div> <script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script> <script src="../../../../../bootstrap.min.js"></script> </body> </html>
{ "content_hash": "4d98a7b472b8d40100e50cd3eb81c6e6", "timestamp": "", "source": "github", "line_count": 170, "max_line_length": 159, "avg_line_length": 43.81764705882353, "alnum_prop": 0.5555108068197073, "repo_name": "coq-bench/coq-bench.github.io", "id": "426175d9aea64d8fad78425387dfba5c26550cbb", "size": "7475", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "clean/Linux-x86_64-4.08.1-2.0.5/released/8.9.1/metacoq/1.0~alpha1+8.9.html", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
package com.opengamma.core.id; import static org.testng.Assert.assertEquals; import java.util.Arrays; import java.util.List; import org.testng.annotations.Test; import com.opengamma.id.ExternalId; import com.opengamma.id.ExternalIdBundle; import com.opengamma.util.test.TestGroup; /** * Tests the {@link ExternalIdOrderConfig} class. */ @Test(groups = TestGroup.UNIT) @SuppressWarnings("deprecation") public class ExternalIdOrderConfigTest { /** * */ public void testGetPreferredEmpty() { assertEquals(ExternalIdOrderConfig.DEFAULT_CONFIG.getPreferred(ExternalIdBundle.EMPTY), null); } /** * */ public void testGetPreferredSingle() { assertEquals(ExternalIdOrderConfig.DEFAULT_CONFIG.getPreferred(ExternalId.of("Foo", "Bar").toBundle()), ExternalId.of("Foo", "Bar")); } /** * */ public void testGetPreferredNotListed() { assertEquals(ExternalIdOrderConfig.DEFAULT_CONFIG.getPreferred(ExternalIdBundle.of(ExternalId.of("Foo", "Bar"), ExternalId.of("Bar", "Foo"))), ExternalId.of("Bar", "Foo")); } /** * */ public void testGetPreferredDefault() { assertEquals(ExternalIdOrderConfig.DEFAULT_CONFIG.getPreferred(ExternalIdBundle.of(ExternalId.of(ExternalSchemes.BLOOMBERG_TCM, "tcm"), ExternalId.of(ExternalSchemes.BLOOMBERG_TICKER, "ticker"), ExternalId.of("Foo", "Bar"))), ExternalId.of(ExternalSchemes.BLOOMBERG_TCM, "tcm")); } /** * */ public void testSort() { final ExternalId a = ExternalId.of(ExternalSchemes.BLOOMBERG_TCM, "bbg_tcm"); final ExternalId b = ExternalId.of(ExternalSchemes.BLOOMBERG_TICKER, "bbg_ticker"); final ExternalId c = ExternalId.of(ExternalSchemes.RIC, "ric"); final ExternalId d = ExternalId.of(ExternalSchemes.BLOOMBERG_TICKER_WEAK, "bbg_ticker_weak"); final ExternalId e = ExternalId.of(ExternalSchemes.ACTIVFEED_TICKER, "activ_ticker"); final ExternalId f = ExternalId.of(ExternalSchemes.SURF, "surf"); final ExternalId g = ExternalId.of(ExternalSchemes.ISIN, "isin"); final ExternalId h = ExternalId.of(ExternalSchemes.CUSIP, "cusip"); final ExternalId i = ExternalId.of(ExternalSchemes.SEDOL1, "sedol1"); final ExternalId j = ExternalId.of(ExternalSchemes.OG_SYNTHETIC_TICKER, "opengamma"); final ExternalId k = ExternalId.of(ExternalSchemes.BLOOMBERG_BUID, "bbg_buid"); final ExternalId l = ExternalId.of(ExternalSchemes.BLOOMBERG_BUID_WEAK, "bbg_buid_weak"); final ExternalId m = ExternalId.of("Foo", "Bar"); final ExternalId n = ExternalId.of("Foo", "Cow"); final ExternalIdBundle bundle = ExternalIdBundle.of(d, l, a, b, c, g, m, n, h, i, e, f, k, j); final List<ExternalId> sorted = ExternalIdOrderConfig.DEFAULT_CONFIG.sort(bundle); assertEquals(sorted, Arrays.asList(a, b, c, d, e, f, g, h, i, j, k, l, m, n)); } }
{ "content_hash": "9fdc8c4e0ee8922b770dd94d1e532e9b", "timestamp": "", "source": "github", "line_count": 75, "max_line_length": 151, "avg_line_length": 37.70666666666666, "alnum_prop": 0.7146393210749646, "repo_name": "McLeodMoores/starling", "id": "863152d31610e99317b35c39f5a87cac703b6568", "size": "2965", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "projects/core/src/test/java/com/opengamma/core/id/ExternalIdOrderConfigTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "2505" }, { "name": "CSS", "bytes": "213501" }, { "name": "FreeMarker", "bytes": "310184" }, { "name": "GAP", "bytes": "1490" }, { "name": "Groovy", "bytes": "11518" }, { "name": "HTML", "bytes": "318295" }, { "name": "Java", "bytes": "79541905" }, { "name": "JavaScript", "bytes": "1511230" }, { "name": "PLSQL", "bytes": "398" }, { "name": "PLpgSQL", "bytes": "26901" }, { "name": "Shell", "bytes": "11481" }, { "name": "TSQL", "bytes": "604117" } ], "symlink_target": "" }
package com.wwq.genesisfreelander.view.fragment.home; import com.wwq.genesisfreelander.view.base.BaseOrderFragment; import cn.bingoogolapple.refreshlayout.BGARefreshLayout; /** * 进行中 * Created by wwq on 2017/6/7. */ public class OngoingFragment extends BaseOrderFragment { @Override public void onBGARefreshLayoutBeginRefreshing(BGARefreshLayout refreshLayout) { } @Override public boolean onBGARefreshLayoutBeginLoadingMore(BGARefreshLayout refreshLayout) { return false; } @Override public int getPageNumber() { return 0; } }
{ "content_hash": "b7ac8a63402b598272816e32be8cf99e", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 87, "avg_line_length": 21.85185185185185, "alnum_prop": 0.7389830508474576, "repo_name": "weiwenqiang/GenesisFreelander", "id": "0e5d02836631a84cbf1602775cb3c2adad5dbfe2", "size": "596", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/main/java/com/wwq/genesisfreelander/view/fragment/home/OngoingFragment.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "1614489" } ], "symlink_target": "" }
@interface ChimeIndex : NSObject @property (nonatomic, readonly) NSArray *symbols; // Designated initializer is init. @end
{ "content_hash": "3a213799885a64140cdd35fd8243a582", "timestamp": "", "source": "github", "line_count": 7, "max_line_length": 49, "avg_line_length": 18, "alnum_prop": 0.7619047619047619, "repo_name": "johndpope/Chime", "id": "a6b508cd55a4d38fcdfbcd42569212c288ad1fdd", "size": "363", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Chime Frameworks/Shared/ChimeIndex.h", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "199545" }, { "name": "Groff", "bytes": "3139" }, { "name": "Objective-C", "bytes": "61007" }, { "name": "Shell", "bytes": "807" } ], "symlink_target": "" }
import Symbol from 'core-js-pure/full/symbol'; QUnit.test('Symbol.dispose', assert => { assert.true('dispose' in Symbol, 'Symbol.dispose available'); assert.true(Object(Symbol.dispose) instanceof Symbol, 'Symbol.dispose is symbol'); });
{ "content_hash": "ab59787a954b4c1abe52435177154bc7", "timestamp": "", "source": "github", "line_count": 6, "max_line_length": 84, "avg_line_length": 40.333333333333336, "alnum_prop": 0.731404958677686, "repo_name": "zloirock/core-js", "id": "12cb13730c0522fe541dcc765f23be792737cf8b", "size": "242", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/unit-pure/esnext.symbol.dispose.js", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "1672" }, { "name": "JavaScript", "bytes": "3005012" } ], "symlink_target": "" }
require 'omniauth-oauth2' module OmniAuth module Strategies class Oauth51 < OmniAuth::Strategies::OAuth2 option :name, :oauth51 option :client_options, { site: 'https://www.oauth51.com', authorize_url: '/oauth/authorize' } uid { raw_info['id'] } def authorize_params super.tap do |params| if request.params[:scope] params[:scope] = request.params[:scope] end end end info do { email: raw_info['email'], image: raw_info['avatar_url'] } end def raw_info @raw_info ||= access_token.get('/api/v1/users/me.json').parsed end end end end
{ "content_hash": "5c163508da0ce9c2368d32b772dc8304", "timestamp": "", "source": "github", "line_count": 35, "max_line_length": 70, "avg_line_length": 20.485714285714284, "alnum_prop": 0.5397489539748954, "repo_name": "coders51/omniauth-oauth51", "id": "c113c60a014da7f3d27d7a09cea6b6d3f44edb24", "size": "717", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/omniauth/strategies/oauth51.rb", "mode": "33188", "license": "mit", "language": [ { "name": "Ruby", "bytes": "2751" } ], "symlink_target": "" }
var Hoek = require('hoek'); exports.register = function (server, options, next) { options = Hoek.applyToDefaults({ basePath: '' }, options); server.route({ method: 'DELETE', path: options.basePath + '/logout', config: { auth: { mode: 'try', strategy: 'session' }, plugins: { 'hapi-auth-cookie': { redirectTo: false } } }, handler: function (request, reply) { var Session = request.server.plugins['hapi-mongo-models'].Session; var credentials = request.auth.credentials || { session: {} }; var session = credentials.session || {}; Session.findByIdAndDelete(session._id, function (err, session) { if (err) { return reply(err); } if (!session) { return reply({ message: 'Session not found.' }).code(404); } request.auth.session.clear(); reply({ message: 'Success.' }); }); } }); next(); }; exports.register.attributes = { name: 'logout' };
{ "content_hash": "2a2bbea2941d3648383d61eb38c75c3f", "timestamp": "", "source": "github", "line_count": 52, "max_line_length": 72, "avg_line_length": 20.173076923076923, "alnum_prop": 0.5386081982840801, "repo_name": "williamle8300/aqua", "id": "dd8d5f97cce6e81e0e3a6b0a36ee38b556023c4b", "size": "1049", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "server/api/logout.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "2601" }, { "name": "JavaScript", "bytes": "908637" } ], "symlink_target": "" }
ACCEPTED #### According to Index Fungorum #### Published in null #### Original name Placodium circinatum f. ocellata Bagl. & Carestia ### Remarks null
{ "content_hash": "134c04419bb6dd7b0a6600e2a9cb7795", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 49, "avg_line_length": 11.846153846153847, "alnum_prop": 0.7142857142857143, "repo_name": "mdoering/backbone", "id": "b34526a0a4b16522ea65cc710bdd770a069119e0", "size": "234", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Fungi/Ascomycota/Lecanoromycetes/Lecanorales/Lecanoraceae/Circinaria/Circinaria radiosa/Circinaria radiosa ocellata/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
package com.frostwire.jlibtorrent.alerts; import com.frostwire.jlibtorrent.swig.dht_bootstrap_alert; /** * This alert is posted when the initial DHT bootstrap is done. * * @author gubatron * @author aldenml */ public final class DhtBootstrapAlert extends AbstractAlert<dht_bootstrap_alert> { public DhtBootstrapAlert(dht_bootstrap_alert alert) { super(alert); } }
{ "content_hash": "aeceea3d0ef0a3dd32e9fba52e501472", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 81, "avg_line_length": 24.25, "alnum_prop": 0.7396907216494846, "repo_name": "tchoulihan/frostwire-jlibtorrent", "id": "2b227dbbb390faf8bc06541d63d6ee9a6ddf2bac", "size": "388", "binary": false, "copies": "8", "ref": "refs/heads/master", "path": "src/com/frostwire/jlibtorrent/alerts/DhtBootstrapAlert.java", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "4225" }, { "name": "C++", "bytes": "2732950" }, { "name": "Java", "bytes": "1863822" }, { "name": "Shell", "bytes": "6434" } ], "symlink_target": "" }
static const char UNUSED *bitcoin_strings[] = { QT_TRANSLATE_NOOP("bitcoin-core", "" "%s, you must set a rpcpassword in the configuration file:\n" " %s\n" "It is recommended you use the following random password:\n" "rpcuser=novacoinrpc\n" "rpcpassword=%s\n" "(you do not need to remember this password)\n" "If the file does not exist, create it with owner-readable-only file " "permissions.\n"), QT_TRANSLATE_NOOP("bitcoin-core", "" "Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:" "@STRENGTH)"), QT_TRANSLATE_NOOP("bitcoin-core", "" "An error occurred while setting up the RPC port %u for listening on IPv4: %s"), QT_TRANSLATE_NOOP("bitcoin-core", "" "An error occurred while setting up the RPC port %u for listening on IPv6, " "falling back to IPv4: %s"), QT_TRANSLATE_NOOP("bitcoin-core", "" "Cannot obtain a lock on data directory %s. NovaCoin is probably already " "running."), QT_TRANSLATE_NOOP("bitcoin-core", "" "Detach block and address databases. Increases shutdown time (default: 0)"), QT_TRANSLATE_NOOP("bitcoin-core", "" "Error initializing database environment %s! To recover, BACKUP THAT " "DIRECTORY, then remove everything from it except for wallet.dat."), QT_TRANSLATE_NOOP("bitcoin-core", "" "Error: The transaction was rejected. This might happen if some of the coins " "in your wallet were already spent, such as if you used a copy of wallet.dat " "and coins were spent in the copy but not marked as spent here."), QT_TRANSLATE_NOOP("bitcoin-core", "" "Error: This transaction requires a transaction fee of at least %s because of " "its amount, complexity, or use of recently received funds "), QT_TRANSLATE_NOOP("bitcoin-core", "" "Error: Wallet unlocked for block minting only, unable to create transaction."), QT_TRANSLATE_NOOP("bitcoin-core", "" "Execute command when the best block changes (%s in cmd is replaced by block " "hash)"), QT_TRANSLATE_NOOP("bitcoin-core", "" "Listen for JSON-RPC connections on <port> (default: 8344 or testnet: 18344)"), QT_TRANSLATE_NOOP("bitcoin-core", "" "Number of seconds to keep misbehaving peers from reconnecting (default: " "86400)"), QT_TRANSLATE_NOOP("bitcoin-core", "" "Set maximum size of high-priority/low-fee transactions in bytes (default: " "27000)"), QT_TRANSLATE_NOOP("bitcoin-core", "" "Unable to bind to %s on this computer. NovaCoin is probably already running."), QT_TRANSLATE_NOOP("bitcoin-core", "" "Warning: -paytxfee is set very high! This is the transaction fee you will " "pay if you send a transaction."), QT_TRANSLATE_NOOP("bitcoin-core", "" "Warning: Please check that your computer's date and time are correct! If " "your clock is wrong NovaCoin will not work properly."), QT_TRANSLATE_NOOP("bitcoin-core", "" "Warning: error reading wallet.dat! All keys read correctly, but transaction " "data or address book entries might be missing or incorrect."), QT_TRANSLATE_NOOP("bitcoin-core", "" "Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as " "wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect " "you should restore from a backup."), QT_TRANSLATE_NOOP("bitcoin-core", "" "You must set rpcpassword=<password> in the configuration file:\n" "%s\n" "If the file does not exist, create it with owner-readable-only file " "permissions."), QT_TRANSLATE_NOOP("bitcoin-core", "Accept command line and JSON-RPC commands"), QT_TRANSLATE_NOOP("bitcoin-core", "Accept connections from outside (default: 1 if no -proxy or -connect)"), QT_TRANSLATE_NOOP("bitcoin-core", "Add a node to connect to and attempt to keep the connection open"), QT_TRANSLATE_NOOP("bitcoin-core", "Allow DNS lookups for -addnode, -seednode and -connect"), QT_TRANSLATE_NOOP("bitcoin-core", "Allow JSON-RPC connections from specified IP address"), QT_TRANSLATE_NOOP("bitcoin-core", "Attempt to recover private keys from a corrupt wallet.dat"), QT_TRANSLATE_NOOP("bitcoin-core", "Bind to given address. Use [host]:port notation for IPv6"), QT_TRANSLATE_NOOP("bitcoin-core", "Block creation options:"), QT_TRANSLATE_NOOP("bitcoin-core", "Cannot downgrade wallet"), QT_TRANSLATE_NOOP("bitcoin-core", "Cannot initialize keypool"), QT_TRANSLATE_NOOP("bitcoin-core", "Cannot resolve -bind address: '%s'"), QT_TRANSLATE_NOOP("bitcoin-core", "Cannot resolve -externalip address: '%s'"), QT_TRANSLATE_NOOP("bitcoin-core", "Cannot write default address"), QT_TRANSLATE_NOOP("bitcoin-core", "Connect only to the specified node(s)"), QT_TRANSLATE_NOOP("bitcoin-core", "Connect through socks proxy"), QT_TRANSLATE_NOOP("bitcoin-core", "Connect to a node to retrieve peer addresses, and disconnect"), QT_TRANSLATE_NOOP("bitcoin-core", "Discover own IP address (default: 1 when listening and no -externalip)"), QT_TRANSLATE_NOOP("bitcoin-core", "Done loading"), QT_TRANSLATE_NOOP("bitcoin-core", "Error loading blkindex.dat"), QT_TRANSLATE_NOOP("bitcoin-core", "Error loading wallet.dat"), QT_TRANSLATE_NOOP("bitcoin-core", "Error loading wallet.dat: Wallet corrupted"), QT_TRANSLATE_NOOP("bitcoin-core", "Error loading wallet.dat: Wallet requires newer version of NovaCoin"), QT_TRANSLATE_NOOP("bitcoin-core", "Error"), QT_TRANSLATE_NOOP("bitcoin-core", "Error: Transaction creation failed "), QT_TRANSLATE_NOOP("bitcoin-core", "Error: Wallet locked, unable to create transaction "), QT_TRANSLATE_NOOP("bitcoin-core", "Error: could not start node"), QT_TRANSLATE_NOOP("bitcoin-core", "Failed to listen on any port. Use -listen=0 if you want this."), QT_TRANSLATE_NOOP("bitcoin-core", "Fee per KB to add to transactions you send"), QT_TRANSLATE_NOOP("bitcoin-core", "Find peers using DNS lookup (default: 0)"), QT_TRANSLATE_NOOP("bitcoin-core", "Find peers using internet relay chat (default: 1)"), QT_TRANSLATE_NOOP("bitcoin-core", "Get help for a command"), QT_TRANSLATE_NOOP("bitcoin-core", "How many blocks to check at startup (default: 2500, 0 = all)"), QT_TRANSLATE_NOOP("bitcoin-core", "How thorough the block verification is (0-6, default: 1)"), QT_TRANSLATE_NOOP("bitcoin-core", "Importing blockchain data file."), QT_TRANSLATE_NOOP("bitcoin-core", "Importing bootstrap blockchain data file."), QT_TRANSLATE_NOOP("bitcoin-core", "Imports blocks from external blk000?.dat file"), QT_TRANSLATE_NOOP("bitcoin-core", "Insufficient funds"), QT_TRANSLATE_NOOP("bitcoin-core", "Invalid -proxy address: '%s'"), QT_TRANSLATE_NOOP("bitcoin-core", "Invalid -tor address: '%s'"), QT_TRANSLATE_NOOP("bitcoin-core", "Invalid amount for -paytxfee=<amount>: '%s'"), QT_TRANSLATE_NOOP("bitcoin-core", "Invalid amount for -reservebalance=<amount>"), QT_TRANSLATE_NOOP("bitcoin-core", "Invalid amount"), QT_TRANSLATE_NOOP("bitcoin-core", "List commands"), QT_TRANSLATE_NOOP("bitcoin-core", "Listen for connections on <port> (default: 7777 or testnet: 17777)"), QT_TRANSLATE_NOOP("bitcoin-core", "Loading addresses..."), QT_TRANSLATE_NOOP("bitcoin-core", "Loading block index..."), QT_TRANSLATE_NOOP("bitcoin-core", "Loading wallet..."), QT_TRANSLATE_NOOP("bitcoin-core", "Maintain at most <n> connections to peers (default: 125)"), QT_TRANSLATE_NOOP("bitcoin-core", "Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)"), QT_TRANSLATE_NOOP("bitcoin-core", "Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)"), QT_TRANSLATE_NOOP("bitcoin-core", "NovaCoin version"), QT_TRANSLATE_NOOP("bitcoin-core", "NovaCoin"), QT_TRANSLATE_NOOP("bitcoin-core", "Only connect to nodes in network <net> (IPv4, IPv6 or Tor)"), QT_TRANSLATE_NOOP("bitcoin-core", "Options:"), QT_TRANSLATE_NOOP("bitcoin-core", "Output extra debugging information. Implies all other -debug* options"), QT_TRANSLATE_NOOP("bitcoin-core", "Output extra network debugging information"), QT_TRANSLATE_NOOP("bitcoin-core", "Password for JSON-RPC connections"), QT_TRANSLATE_NOOP("bitcoin-core", "Prepend debug output with timestamp"), QT_TRANSLATE_NOOP("bitcoin-core", "Rescan the block chain for missing wallet transactions"), QT_TRANSLATE_NOOP("bitcoin-core", "Rescanning..."), QT_TRANSLATE_NOOP("bitcoin-core", "Run in the background as a daemon and accept commands"), QT_TRANSLATE_NOOP("bitcoin-core", "SSL options: (see the Bitcoin Wiki for SSL setup instructions)"), QT_TRANSLATE_NOOP("bitcoin-core", "Select the version of socks proxy to use (4-5, default: 5)"), QT_TRANSLATE_NOOP("bitcoin-core", "Send command to -server or novacoind"), QT_TRANSLATE_NOOP("bitcoin-core", "Send commands to node running on <ip> (default: 127.0.0.1)"), QT_TRANSLATE_NOOP("bitcoin-core", "Send trace/debug info to console instead of debug.log file"), QT_TRANSLATE_NOOP("bitcoin-core", "Send trace/debug info to debugger"), QT_TRANSLATE_NOOP("bitcoin-core", "Sending..."), QT_TRANSLATE_NOOP("bitcoin-core", "Server certificate file (default: server.cert)"), QT_TRANSLATE_NOOP("bitcoin-core", "Server private key (default: server.pem)"), QT_TRANSLATE_NOOP("bitcoin-core", "Set database cache size in megabytes (default: 25)"), QT_TRANSLATE_NOOP("bitcoin-core", "Set database disk log size in megabytes (default: 100)"), QT_TRANSLATE_NOOP("bitcoin-core", "Set key pool size to <n> (default: 100)"), QT_TRANSLATE_NOOP("bitcoin-core", "Set maximum block size in bytes (default: 250000)"), QT_TRANSLATE_NOOP("bitcoin-core", "Set minimum block size in bytes (default: 0)"), QT_TRANSLATE_NOOP("bitcoin-core", "Shrink debug.log file on client startup (default: 1 when no -debug)"), QT_TRANSLATE_NOOP("bitcoin-core", "Specify configuration file (default: novacoin.conf)"), QT_TRANSLATE_NOOP("bitcoin-core", "Specify connection timeout in milliseconds (default: 5000)"), QT_TRANSLATE_NOOP("bitcoin-core", "Specify data directory"), QT_TRANSLATE_NOOP("bitcoin-core", "Specify pid file (default: novacoind.pid)"), QT_TRANSLATE_NOOP("bitcoin-core", "Specify your own public address"), QT_TRANSLATE_NOOP("bitcoin-core", "This help message"), QT_TRANSLATE_NOOP("bitcoin-core", "Threshold for disconnecting misbehaving peers (default: 100)"), QT_TRANSLATE_NOOP("bitcoin-core", "To use the %s option"), QT_TRANSLATE_NOOP("bitcoin-core", "Unable to bind to %s on this computer (bind returned error %d, %s)"), QT_TRANSLATE_NOOP("bitcoin-core", "Unable to sign checkpoint, wrong checkpointkey?\n"), QT_TRANSLATE_NOOP("bitcoin-core", "Unknown -socks proxy version requested: %i"), QT_TRANSLATE_NOOP("bitcoin-core", "Unknown network specified in -onlynet: '%s'"), QT_TRANSLATE_NOOP("bitcoin-core", "Upgrade wallet to latest format"), QT_TRANSLATE_NOOP("bitcoin-core", "Usage:"), QT_TRANSLATE_NOOP("bitcoin-core", "Use OpenSSL (https) for JSON-RPC connections"), QT_TRANSLATE_NOOP("bitcoin-core", "Use UPnP to map the listening port (default: 0)"), QT_TRANSLATE_NOOP("bitcoin-core", "Use UPnP to map the listening port (default: 1 when listening)"), QT_TRANSLATE_NOOP("bitcoin-core", "Use proxy to reach tor hidden services (default: same as -proxy)"), QT_TRANSLATE_NOOP("bitcoin-core", "Use the test network"), QT_TRANSLATE_NOOP("bitcoin-core", "Username for JSON-RPC connections"), QT_TRANSLATE_NOOP("bitcoin-core", "Verifying database integrity..."), QT_TRANSLATE_NOOP("bitcoin-core", "Wallet needed to be rewritten: restart NovaCoin to complete"), QT_TRANSLATE_NOOP("bitcoin-core", "Warning: Disk space is low!"), QT_TRANSLATE_NOOP("bitcoin-core", "Warning: This version is obsolete, upgrade required!"), QT_TRANSLATE_NOOP("bitcoin-core", "wallet.dat corrupt, salvage failed"), QT_TRANSLATE_NOOP("bitcoin-core", "Specify wallet file (within data directory)"), QT_TRANSLATE_NOOP("bitcoin-core", "Use in-memory logging for block index database (default: 1)"), QT_TRANSLATE_NOOP("bitcoin-core", "Find peers using DNS lookup (default: 1)"), QT_TRANSLATE_NOOP("bitcoin-core", "Sync checkpoints policy (default: strict)"), QT_TRANSLATE_NOOP("bitcoin-core", "Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)"), QT_TRANSLATE_NOOP("bitcoin-core", "Require a confirmations for change (default: 0)"), QT_TRANSLATE_NOOP("bitcoin-core", "Enforce transaction scripts to use canonical PUSH operators (default: 1)"), QT_TRANSLATE_NOOP("bitcoin-core", "Set the number of script verification threads (1-16, 0=auto, default: 0)"), QT_TRANSLATE_NOOP("bitcoin-core", "When creating transactions, ignore inputs with value less than this (default: %s)"), };
{ "content_hash": "3d02cb22937362c408eeb5520ede77d1", "timestamp": "", "source": "github", "line_count": 177, "max_line_length": 119, "avg_line_length": 69.10169491525424, "alnum_prop": 0.7398413866405036, "repo_name": "elambert2014/novacoin", "id": "9b44e9ace67ef7b29ba079a553f8a752c20cdd8c", "size": "12383", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/qt/bitcoinstrings.cpp", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "39706" }, { "name": "Batchfile", "bytes": "6688" }, { "name": "C", "bytes": "49670" }, { "name": "C++", "bytes": "2642641" }, { "name": "CSS", "bytes": "1127" }, { "name": "Groff", "bytes": "12684" }, { "name": "HTML", "bytes": "50621" }, { "name": "Makefile", "bytes": "13582" }, { "name": "NSIS", "bytes": "6088" }, { "name": "Objective-C", "bytes": "1108" }, { "name": "Objective-C++", "bytes": "7225" }, { "name": "Perl", "bytes": "1049" }, { "name": "Python", "bytes": "45493" }, { "name": "QMake", "bytes": "16334" }, { "name": "Shell", "bytes": "17402" } ], "symlink_target": "" }