code
stringlengths 501
5.19M
| package
stringlengths 2
81
| path
stringlengths 9
304
| filename
stringlengths 4
145
|
---|---|---|---|
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
typeof define === 'function' && define.amd ? define(['exports'], factory) :
(factory((global.RSVP = global.RSVP || {})));
}(this, (function (exports) { 'use strict';
function indexOf(callbacks, callback) {
for (var i = 0, l = callbacks.length; i < l; i++) {
if (callbacks[i] === callback) {
return i;
}
}
return -1;
}
function callbacksFor(object) {
var callbacks = object._promiseCallbacks;
if (!callbacks) {
callbacks = object._promiseCallbacks = {};
}
return callbacks;
}
/**
@class RSVP.EventTarget
*/
var EventTarget = {
/**
`RSVP.EventTarget.mixin` extends an object with EventTarget methods. For
Example:
```javascript
let object = {};
RSVP.EventTarget.mixin(object);
object.on('finished', function(event) {
// handle event
});
object.trigger('finished', { detail: value });
```
`EventTarget.mixin` also works with prototypes:
```javascript
let Person = function() {};
RSVP.EventTarget.mixin(Person.prototype);
let yehuda = new Person();
let tom = new Person();
yehuda.on('poke', function(event) {
console.log('Yehuda says OW');
});
tom.on('poke', function(event) {
console.log('Tom says OW');
});
yehuda.trigger('poke');
tom.trigger('poke');
```
@method mixin
@for RSVP.EventTarget
@private
@param {Object} object object to extend with EventTarget methods
*/
mixin: function (object) {
object['on'] = this['on'];
object['off'] = this['off'];
object['trigger'] = this['trigger'];
object._promiseCallbacks = undefined;
return object;
},
/**
Registers a callback to be executed when `eventName` is triggered
```javascript
object.on('event', function(eventInfo){
// handle the event
});
object.trigger('event');
```
@method on
@for RSVP.EventTarget
@private
@param {String} eventName name of the event to listen for
@param {Function} callback function to be called when the event is triggered.
*/
on: function (eventName, callback) {
if (typeof callback !== 'function') {
throw new TypeError('Callback must be a function');
}
var allCallbacks = callbacksFor(this),
callbacks = void 0;
callbacks = allCallbacks[eventName];
if (!callbacks) {
callbacks = allCallbacks[eventName] = [];
}
if (indexOf(callbacks, callback) === -1) {
callbacks.push(callback);
}
},
/**
You can use `off` to stop firing a particular callback for an event:
```javascript
function doStuff() { // do stuff! }
object.on('stuff', doStuff);
object.trigger('stuff'); // doStuff will be called
// Unregister ONLY the doStuff callback
object.off('stuff', doStuff);
object.trigger('stuff'); // doStuff will NOT be called
```
If you don't pass a `callback` argument to `off`, ALL callbacks for the
event will not be executed when the event fires. For example:
```javascript
let callback1 = function(){};
let callback2 = function(){};
object.on('stuff', callback1);
object.on('stuff', callback2);
object.trigger('stuff'); // callback1 and callback2 will be executed.
object.off('stuff');
object.trigger('stuff'); // callback1 and callback2 will not be executed!
```
@method off
@for RSVP.EventTarget
@private
@param {String} eventName event to stop listening to
@param {Function} callback optional argument. If given, only the function
given will be removed from the event's callback queue. If no `callback`
argument is given, all callbacks will be removed from the event's callback
queue.
*/
off: function (eventName, callback) {
var allCallbacks = callbacksFor(this),
callbacks = void 0,
index = void 0;
if (!callback) {
allCallbacks[eventName] = [];
return;
}
callbacks = allCallbacks[eventName];
index = indexOf(callbacks, callback);
if (index !== -1) {
callbacks.splice(index, 1);
}
},
/**
Use `trigger` to fire custom events. For example:
```javascript
object.on('foo', function(){
console.log('foo event happened!');
});
object.trigger('foo');
// 'foo event happened!' logged to the console
```
You can also pass a value as a second argument to `trigger` that will be
passed as an argument to all event listeners for the event:
```javascript
object.on('foo', function(value){
console.log(value.name);
});
object.trigger('foo', { name: 'bar' });
// 'bar' logged to the console
```
@method trigger
@for RSVP.EventTarget
@private
@param {String} eventName name of the event to be triggered
@param {*} options optional value to be passed to any event handlers for
the given `eventName`
*/
trigger: function (eventName, options, label) {
var allCallbacks = callbacksFor(this),
callbacks = void 0,
callback = void 0;
if (callbacks = allCallbacks[eventName]) {
// Don't cache the callbacks.length since it may grow
for (var i = 0; i < callbacks.length; i++) {
callback = callbacks[i];
callback(options, label);
}
}
}
};
var config = {
instrument: false
};
EventTarget['mixin'](config);
function configure(name, value) {
if (arguments.length === 2) {
config[name] = value;
} else {
return config[name];
}
}
function objectOrFunction(x) {
var type = typeof x;
return x !== null && (type === 'object' || type === 'function');
}
function isFunction(x) {
return typeof x === 'function';
}
function isObject(x) {
return x !== null && typeof x === 'object';
}
function isMaybeThenable(x) {
return x !== null && typeof x === 'object';
}
var _isArray = void 0;
if (Array.isArray) {
_isArray = Array.isArray;
} else {
_isArray = function (x) {
return Object.prototype.toString.call(x) === '[object Array]';
};
}
var isArray = _isArray;
// Date.now is not available in browsers < IE9
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/now#Compatibility
var now = Date.now || function () {
return new Date().getTime();
};
var queue = [];
function scheduleFlush() {
setTimeout(function () {
for (var i = 0; i < queue.length; i++) {
var entry = queue[i];
var payload = entry.payload;
payload.guid = payload.key + payload.id;
payload.childGuid = payload.key + payload.childId;
if (payload.error) {
payload.stack = payload.error.stack;
}
config['trigger'](entry.name, entry.payload);
}
queue.length = 0;
}, 50);
}
function instrument(eventName, promise, child) {
if (1 === queue.push({
name: eventName,
payload: {
key: promise._guidKey,
id: promise._id,
eventName: eventName,
detail: promise._result,
childId: child && child._id,
label: promise._label,
timeStamp: now(),
error: config["instrument-with-stack"] ? new Error(promise._label) : null
} })) {
scheduleFlush();
}
}
/**
`RSVP.Promise.resolve` returns a promise that will become resolved with the
passed `value`. It is shorthand for the following:
```javascript
let promise = new RSVP.Promise(function(resolve, reject){
resolve(1);
});
promise.then(function(value){
// value === 1
});
```
Instead of writing the above, your code now simply becomes the following:
```javascript
let promise = RSVP.Promise.resolve(1);
promise.then(function(value){
// value === 1
});
```
@method resolve
@static
@param {*} object value that the returned promise will be resolved with
@param {String} label optional string for identifying the returned promise.
Useful for tooling.
@return {Promise} a promise that will become fulfilled with the given
`value`
*/
function resolve$1(object, label) {
/*jshint validthis:true */
var Constructor = this;
if (object && typeof object === 'object' && object.constructor === Constructor) {
return object;
}
var promise = new Constructor(noop, label);
resolve(promise, object);
return promise;
}
function withOwnPromise() {
return new TypeError('A promises callback cannot return that same promise.');
}
function noop() {}
var PENDING = void 0;
var FULFILLED = 1;
var REJECTED = 2;
var GET_THEN_ERROR = new ErrorObject();
function getThen(promise) {
try {
return promise.then;
} catch (error) {
GET_THEN_ERROR.error = error;
return GET_THEN_ERROR;
}
}
function tryThen(then$$1, value, fulfillmentHandler, rejectionHandler) {
try {
then$$1.call(value, fulfillmentHandler, rejectionHandler);
} catch (e) {
return e;
}
}
function handleForeignThenable(promise, thenable, then$$1) {
config.async(function (promise) {
var sealed = false;
var error = tryThen(then$$1, thenable, function (value) {
if (sealed) {
return;
}
sealed = true;
if (thenable !== value) {
resolve(promise, value, undefined);
} else {
fulfill(promise, value);
}
}, function (reason) {
if (sealed) {
return;
}
sealed = true;
reject(promise, reason);
}, 'Settle: ' + (promise._label || ' unknown promise'));
if (!sealed && error) {
sealed = true;
reject(promise, error);
}
}, promise);
}
function handleOwnThenable(promise, thenable) {
if (thenable._state === FULFILLED) {
fulfill(promise, thenable._result);
} else if (thenable._state === REJECTED) {
thenable._onError = null;
reject(promise, thenable._result);
} else {
subscribe(thenable, undefined, function (value) {
if (thenable !== value) {
resolve(promise, value, undefined);
} else {
fulfill(promise, value);
}
}, function (reason) {
return reject(promise, reason);
});
}
}
function handleMaybeThenable(promise, maybeThenable, then$$1) {
var isOwnThenable = maybeThenable.constructor === promise.constructor && then$$1 === then && promise.constructor.resolve === resolve$1;
if (isOwnThenable) {
handleOwnThenable(promise, maybeThenable);
} else if (then$$1 === GET_THEN_ERROR) {
reject(promise, GET_THEN_ERROR.error);
GET_THEN_ERROR.error = null;
} else if (isFunction(then$$1)) {
handleForeignThenable(promise, maybeThenable, then$$1);
} else {
fulfill(promise, maybeThenable);
}
}
function resolve(promise, value) {
if (promise === value) {
fulfill(promise, value);
} else if (objectOrFunction(value)) {
handleMaybeThenable(promise, value, getThen(value));
} else {
fulfill(promise, value);
}
}
function publishRejection(promise) {
if (promise._onError) {
promise._onError(promise._result);
}
publish(promise);
}
function fulfill(promise, value) {
if (promise._state !== PENDING) {
return;
}
promise._result = value;
promise._state = FULFILLED;
if (promise._subscribers.length === 0) {
if (config.instrument) {
instrument('fulfilled', promise);
}
} else {
config.async(publish, promise);
}
}
function reject(promise, reason) {
if (promise._state !== PENDING) {
return;
}
promise._state = REJECTED;
promise._result = reason;
config.async(publishRejection, promise);
}
function subscribe(parent, child, onFulfillment, onRejection) {
var subscribers = parent._subscribers;
var length = subscribers.length;
parent._onError = null;
subscribers[length] = child;
subscribers[length + FULFILLED] = onFulfillment;
subscribers[length + REJECTED] = onRejection;
if (length === 0 && parent._state) {
config.async(publish, parent);
}
}
function publish(promise) {
var subscribers = promise._subscribers;
var settled = promise._state;
if (config.instrument) {
instrument(settled === FULFILLED ? 'fulfilled' : 'rejected', promise);
}
if (subscribers.length === 0) {
return;
}
var child = void 0,
callback = void 0,
result = promise._result;
for (var i = 0; i < subscribers.length; i += 3) {
child = subscribers[i];
callback = subscribers[i + settled];
if (child) {
invokeCallback(settled, child, callback, result);
} else {
callback(result);
}
}
promise._subscribers.length = 0;
}
function ErrorObject() {
this.error = null;
}
var TRY_CATCH_ERROR = new ErrorObject();
function tryCatch(callback, result) {
try {
return callback(result);
} catch (e) {
TRY_CATCH_ERROR.error = e;
return TRY_CATCH_ERROR;
}
}
function invokeCallback(state, promise, callback, result) {
var hasCallback = isFunction(callback);
var value = void 0,
error = void 0;
if (hasCallback) {
value = tryCatch(callback, result);
if (value === TRY_CATCH_ERROR) {
error = value.error;
value.error = null; // release
} else if (value === promise) {
reject(promise, withOwnPromise());
return;
}
} else {
value = result;
}
if (promise._state !== PENDING) {
// noop
} else if (hasCallback && error === undefined) {
resolve(promise, value);
} else if (error !== undefined) {
reject(promise, error);
} else if (state === FULFILLED) {
fulfill(promise, value);
} else if (state === REJECTED) {
reject(promise, value);
}
}
function initializePromise(promise, resolver) {
var resolved = false;
try {
resolver(function (value) {
if (resolved) {
return;
}
resolved = true;
resolve(promise, value);
}, function (reason) {
if (resolved) {
return;
}
resolved = true;
reject(promise, reason);
});
} catch (e) {
reject(promise, e);
}
}
function then(onFulfillment, onRejection, label) {
var parent = this;
var state = parent._state;
if (state === FULFILLED && !onFulfillment || state === REJECTED && !onRejection) {
config.instrument && instrument('chained', parent, parent);
return parent;
}
parent._onError = null;
var child = new parent.constructor(noop, label);
var result = parent._result;
config.instrument && instrument('chained', parent, child);
if (state === PENDING) {
subscribe(parent, child, onFulfillment, onRejection);
} else {
var callback = state === FULFILLED ? onFulfillment : onRejection;
config.async(function () {
return invokeCallback(state, child, callback, result);
});
}
return child;
}
var Enumerator = function () {
function Enumerator(Constructor, input, abortOnReject, label) {
this._instanceConstructor = Constructor;
this.promise = new Constructor(noop, label);
this._abortOnReject = abortOnReject;
this._init.apply(this, arguments);
}
Enumerator.prototype._init = function _init(Constructor, input) {
var len = input.length || 0;
this.length = len;
this._remaining = len;
this._result = new Array(len);
this._enumerate(input);
if (this._remaining === 0) {
fulfill(this.promise, this._result);
}
};
Enumerator.prototype._enumerate = function _enumerate(input) {
var length = this.length;
var promise = this.promise;
for (var i = 0; promise._state === PENDING && i < length; i++) {
this._eachEntry(input[i], i);
}
};
Enumerator.prototype._settleMaybeThenable = function _settleMaybeThenable(entry, i) {
var c = this._instanceConstructor;
var resolve$$1 = c.resolve;
if (resolve$$1 === resolve$1) {
var then$$1 = getThen(entry);
if (then$$1 === then && entry._state !== PENDING) {
entry._onError = null;
this._settledAt(entry._state, i, entry._result);
} else if (typeof then$$1 !== 'function') {
this._remaining--;
this._result[i] = this._makeResult(FULFILLED, i, entry);
} else if (c === Promise) {
var promise = new c(noop);
handleMaybeThenable(promise, entry, then$$1);
this._willSettleAt(promise, i);
} else {
this._willSettleAt(new c(function (resolve$$1) {
return resolve$$1(entry);
}), i);
}
} else {
this._willSettleAt(resolve$$1(entry), i);
}
};
Enumerator.prototype._eachEntry = function _eachEntry(entry, i) {
if (isMaybeThenable(entry)) {
this._settleMaybeThenable(entry, i);
} else {
this._remaining--;
this._result[i] = this._makeResult(FULFILLED, i, entry);
}
};
Enumerator.prototype._settledAt = function _settledAt(state, i, value) {
var promise = this.promise;
if (promise._state === PENDING) {
if (this._abortOnReject && state === REJECTED) {
reject(promise, value);
} else {
this._remaining--;
this._result[i] = this._makeResult(state, i, value);
if (this._remaining === 0) {
fulfill(promise, this._result);
}
}
}
};
Enumerator.prototype._makeResult = function _makeResult(state, i, value) {
return value;
};
Enumerator.prototype._willSettleAt = function _willSettleAt(promise, i) {
var enumerator = this;
subscribe(promise, undefined, function (value) {
return enumerator._settledAt(FULFILLED, i, value);
}, function (reason) {
return enumerator._settledAt(REJECTED, i, reason);
});
};
return Enumerator;
}();
function makeSettledResult(state, position, value) {
if (state === FULFILLED) {
return {
state: 'fulfilled',
value: value
};
} else {
return {
state: 'rejected',
reason: value
};
}
}
/**
`RSVP.Promise.all` accepts an array of promises, and returns a new promise which
is fulfilled with an array of fulfillment values for the passed promises, or
rejected with the reason of the first passed promise to be rejected. It casts all
elements of the passed iterable to promises as it runs this algorithm.
Example:
```javascript
let promise1 = RSVP.resolve(1);
let promise2 = RSVP.resolve(2);
let promise3 = RSVP.resolve(3);
let promises = [ promise1, promise2, promise3 ];
RSVP.Promise.all(promises).then(function(array){
// The array here would be [ 1, 2, 3 ];
});
```
If any of the `promises` given to `RSVP.all` are rejected, the first promise
that is rejected will be given as an argument to the returned promises's
rejection handler. For example:
Example:
```javascript
let promise1 = RSVP.resolve(1);
let promise2 = RSVP.reject(new Error("2"));
let promise3 = RSVP.reject(new Error("3"));
let promises = [ promise1, promise2, promise3 ];
RSVP.Promise.all(promises).then(function(array){
// Code here never runs because there are rejected promises!
}, function(error) {
// error.message === "2"
});
```
@method all
@static
@param {Array} entries array of promises
@param {String} label optional string for labeling the promise.
Useful for tooling.
@return {Promise} promise that is fulfilled when all `promises` have been
fulfilled, or rejected if any of them become rejected.
@static
*/
function all(entries, label) {
if (!isArray(entries)) {
return this.reject(new TypeError("Promise.all must be called with an array"), label);
}
return new Enumerator(this, entries, true /* abort on reject */, label).promise;
}
/**
`RSVP.Promise.race` returns a new promise which is settled in the same way as the
first passed promise to settle.
Example:
```javascript
let promise1 = new RSVP.Promise(function(resolve, reject){
setTimeout(function(){
resolve('promise 1');
}, 200);
});
let promise2 = new RSVP.Promise(function(resolve, reject){
setTimeout(function(){
resolve('promise 2');
}, 100);
});
RSVP.Promise.race([promise1, promise2]).then(function(result){
// result === 'promise 2' because it was resolved before promise1
// was resolved.
});
```
`RSVP.Promise.race` is deterministic in that only the state of the first
settled promise matters. For example, even if other promises given to the
`promises` array argument are resolved, but the first settled promise has
become rejected before the other promises became fulfilled, the returned
promise will become rejected:
```javascript
let promise1 = new RSVP.Promise(function(resolve, reject){
setTimeout(function(){
resolve('promise 1');
}, 200);
});
let promise2 = new RSVP.Promise(function(resolve, reject){
setTimeout(function(){
reject(new Error('promise 2'));
}, 100);
});
RSVP.Promise.race([promise1, promise2]).then(function(result){
// Code here never runs
}, function(reason){
// reason.message === 'promise 2' because promise 2 became rejected before
// promise 1 became fulfilled
});
```
An example real-world use case is implementing timeouts:
```javascript
RSVP.Promise.race([ajax('foo.json'), timeout(5000)])
```
@method race
@static
@param {Array} entries array of promises to observe
@param {String} label optional string for describing the promise returned.
Useful for tooling.
@return {Promise} a promise which settles in the same way as the first passed
promise to settle.
*/
function race(entries, label) {
/*jshint validthis:true */
var Constructor = this;
var promise = new Constructor(noop, label);
if (!isArray(entries)) {
reject(promise, new TypeError('Promise.race must be called with an array'));
return promise;
}
for (var i = 0; promise._state === PENDING && i < entries.length; i++) {
subscribe(Constructor.resolve(entries[i]), undefined, function (value) {
return resolve(promise, value);
}, function (reason) {
return reject(promise, reason);
});
}
return promise;
}
/**
`RSVP.Promise.reject` returns a promise rejected with the passed `reason`.
It is shorthand for the following:
```javascript
let promise = new RSVP.Promise(function(resolve, reject){
reject(new Error('WHOOPS'));
});
promise.then(function(value){
// Code here doesn't run because the promise is rejected!
}, function(reason){
// reason.message === 'WHOOPS'
});
```
Instead of writing the above, your code now simply becomes the following:
```javascript
let promise = RSVP.Promise.reject(new Error('WHOOPS'));
promise.then(function(value){
// Code here doesn't run because the promise is rejected!
}, function(reason){
// reason.message === 'WHOOPS'
});
```
@method reject
@static
@param {*} reason value that the returned promise will be rejected with.
@param {String} label optional string for identifying the returned promise.
Useful for tooling.
@return {Promise} a promise rejected with the given `reason`.
*/
function reject$1(reason, label) {
/*jshint validthis:true */
var Constructor = this;
var promise = new Constructor(noop, label);
reject(promise, reason);
return promise;
}
var guidKey = 'rsvp_' + now() + '-';
var counter = 0;
function needsResolver() {
throw new TypeError('You must pass a resolver function as the first argument to the promise constructor');
}
function needsNew() {
throw new TypeError("Failed to construct 'Promise': Please use the 'new' operator, this object constructor cannot be called as a function.");
}
/**
Promise objects represent the eventual result of an asynchronous operation. The
primary way of interacting with a promise is through its `then` method, which
registers callbacks to receive either a promise’s eventual value or the reason
why the promise cannot be fulfilled.
Terminology
-----------
- `promise` is an object or function with a `then` method whose behavior conforms to this specification.
- `thenable` is an object or function that defines a `then` method.
- `value` is any legal JavaScript value (including undefined, a thenable, or a promise).
- `exception` is a value that is thrown using the throw statement.
- `reason` is a value that indicates why a promise was rejected.
- `settled` the final resting state of a promise, fulfilled or rejected.
A promise can be in one of three states: pending, fulfilled, or rejected.
Promises that are fulfilled have a fulfillment value and are in the fulfilled
state. Promises that are rejected have a rejection reason and are in the
rejected state. A fulfillment value is never a thenable.
Promises can also be said to *resolve* a value. If this value is also a
promise, then the original promise's settled state will match the value's
settled state. So a promise that *resolves* a promise that rejects will
itself reject, and a promise that *resolves* a promise that fulfills will
itself fulfill.
Basic Usage:
------------
```js
let promise = new Promise(function(resolve, reject) {
// on success
resolve(value);
// on failure
reject(reason);
});
promise.then(function(value) {
// on fulfillment
}, function(reason) {
// on rejection
});
```
Advanced Usage:
---------------
Promises shine when abstracting away asynchronous interactions such as
`XMLHttpRequest`s.
```js
function getJSON(url) {
return new Promise(function(resolve, reject){
let xhr = new XMLHttpRequest();
xhr.open('GET', url);
xhr.onreadystatechange = handler;
xhr.responseType = 'json';
xhr.setRequestHeader('Accept', 'application/json');
xhr.send();
function handler() {
if (this.readyState === this.DONE) {
if (this.status === 200) {
resolve(this.response);
} else {
reject(new Error('getJSON: `' + url + '` failed with status: [' + this.status + ']'));
}
}
};
});
}
getJSON('/posts.json').then(function(json) {
// on fulfillment
}, function(reason) {
// on rejection
});
```
Unlike callbacks, promises are great composable primitives.
```js
Promise.all([
getJSON('/posts'),
getJSON('/comments')
]).then(function(values){
values[0] // => postsJSON
values[1] // => commentsJSON
return values;
});
```
@class RSVP.Promise
@param {function} resolver
@param {String} label optional string for labeling the promise.
Useful for tooling.
@constructor
*/
var Promise = function () {
function Promise(resolver, label) {
this._id = counter++;
this._label = label;
this._state = undefined;
this._result = undefined;
this._subscribers = [];
config.instrument && instrument('created', this);
if (noop !== resolver) {
typeof resolver !== 'function' && needsResolver();
this instanceof Promise ? initializePromise(this, resolver) : needsNew();
}
}
Promise.prototype._onError = function _onError(reason) {
var _this = this;
config.after(function () {
if (_this._onError) {
config.trigger('error', reason, _this._label);
}
});
};
/**
`catch` is simply sugar for `then(undefined, onRejection)` which makes it the same
as the catch block of a try/catch statement.
```js
function findAuthor(){
throw new Error('couldn\'t find that author');
}
// synchronous
try {
findAuthor();
} catch(reason) {
// something went wrong
}
// async with promises
findAuthor().catch(function(reason){
// something went wrong
});
```
@method catch
@param {Function} onRejection
@param {String} label optional string for labeling the promise.
Useful for tooling.
@return {Promise}
*/
Promise.prototype.catch = function _catch(onRejection, label) {
return this.then(undefined, onRejection, label);
};
/**
`finally` will be invoked regardless of the promise's fate just as native
try/catch/finally behaves
Synchronous example:
```js
findAuthor() {
if (Math.random() > 0.5) {
throw new Error();
}
return new Author();
}
try {
return findAuthor(); // succeed or fail
} catch(error) {
return findOtherAuthor();
} finally {
// always runs
// doesn't affect the return value
}
```
Asynchronous example:
```js
findAuthor().catch(function(reason){
return findOtherAuthor();
}).finally(function(){
// author was either found, or not
});
```
@method finally
@param {Function} callback
@param {String} label optional string for labeling the promise.
Useful for tooling.
@return {Promise}
*/
Promise.prototype.finally = function _finally(callback, label) {
var promise = this;
var constructor = promise.constructor;
return promise.then(function (value) {
return constructor.resolve(callback()).then(function () {
return value;
});
}, function (reason) {
return constructor.resolve(callback()).then(function () {
throw reason;
});
}, label);
};
return Promise;
}();
Promise.cast = resolve$1; // deprecated
Promise.all = all;
Promise.race = race;
Promise.resolve = resolve$1;
Promise.reject = reject$1;
Promise.prototype._guidKey = guidKey;
/**
The primary way of interacting with a promise is through its `then` method,
which registers callbacks to receive either a promise's eventual value or the
reason why the promise cannot be fulfilled.
```js
findUser().then(function(user){
// user is available
}, function(reason){
// user is unavailable, and you are given the reason why
});
```
Chaining
--------
The return value of `then` is itself a promise. This second, 'downstream'
promise is resolved with the return value of the first promise's fulfillment
or rejection handler, or rejected if the handler throws an exception.
```js
findUser().then(function (user) {
return user.name;
}, function (reason) {
return 'default name';
}).then(function (userName) {
// If `findUser` fulfilled, `userName` will be the user's name, otherwise it
// will be `'default name'`
});
findUser().then(function (user) {
throw new Error('Found user, but still unhappy');
}, function (reason) {
throw new Error('`findUser` rejected and we\'re unhappy');
}).then(function (value) {
// never reached
}, function (reason) {
// if `findUser` fulfilled, `reason` will be 'Found user, but still unhappy'.
// If `findUser` rejected, `reason` will be '`findUser` rejected and we\'re unhappy'.
});
```
If the downstream promise does not specify a rejection handler, rejection reasons will be propagated further downstream.
```js
findUser().then(function (user) {
throw new PedagogicalException('Upstream error');
}).then(function (value) {
// never reached
}).then(function (value) {
// never reached
}, function (reason) {
// The `PedgagocialException` is propagated all the way down to here
});
```
Assimilation
------------
Sometimes the value you want to propagate to a downstream promise can only be
retrieved asynchronously. This can be achieved by returning a promise in the
fulfillment or rejection handler. The downstream promise will then be pending
until the returned promise is settled. This is called *assimilation*.
```js
findUser().then(function (user) {
return findCommentsByAuthor(user);
}).then(function (comments) {
// The user's comments are now available
});
```
If the assimliated promise rejects, then the downstream promise will also reject.
```js
findUser().then(function (user) {
return findCommentsByAuthor(user);
}).then(function (comments) {
// If `findCommentsByAuthor` fulfills, we'll have the value here
}, function (reason) {
// If `findCommentsByAuthor` rejects, we'll have the reason here
});
```
Simple Example
--------------
Synchronous Example
```javascript
let result;
try {
result = findResult();
// success
} catch(reason) {
// failure
}
```
Errback Example
```js
findResult(function(result, err){
if (err) {
// failure
} else {
// success
}
});
```
Promise Example;
```javascript
findResult().then(function(result){
// success
}, function(reason){
// failure
});
```
Advanced Example
--------------
Synchronous Example
```javascript
let author, books;
try {
author = findAuthor();
books = findBooksByAuthor(author);
// success
} catch(reason) {
// failure
}
```
Errback Example
```js
function foundBooks(books) {
}
function failure(reason) {
}
findAuthor(function(author, err){
if (err) {
failure(err);
// failure
} else {
try {
findBoooksByAuthor(author, function(books, err) {
if (err) {
failure(err);
} else {
try {
foundBooks(books);
} catch(reason) {
failure(reason);
}
}
});
} catch(error) {
failure(err);
}
// success
}
});
```
Promise Example;
```javascript
findAuthor().
then(findBooksByAuthor).
then(function(books){
// found books
}).catch(function(reason){
// something went wrong
});
```
@method then
@param {Function} onFulfillment
@param {Function} onRejection
@param {String} label optional string for labeling the promise.
Useful for tooling.
@return {Promise}
*/
Promise.prototype.then = then;
function Result() {
this.value = undefined;
}
var ERROR = new Result();
var GET_THEN_ERROR$1 = new Result();
function getThen$1(obj) {
try {
return obj.then;
} catch (error) {
ERROR.value = error;
return ERROR;
}
}
function tryApply(f, s, a) {
try {
f.apply(s, a);
} catch (error) {
ERROR.value = error;
return ERROR;
}
}
function makeObject(_, argumentNames) {
var obj = {};
var length = _.length;
var args = new Array(length);
for (var x = 0; x < length; x++) {
args[x] = _[x];
}
for (var i = 0; i < argumentNames.length; i++) {
var name = argumentNames[i];
obj[name] = args[i + 1];
}
return obj;
}
function arrayResult(_) {
var length = _.length;
var args = new Array(length - 1);
for (var i = 1; i < length; i++) {
args[i - 1] = _[i];
}
return args;
}
function wrapThenable(then, promise) {
return {
then: function (onFulFillment, onRejection) {
return then.call(promise, onFulFillment, onRejection);
}
};
}
/**
`RSVP.denodeify` takes a 'node-style' function and returns a function that
will return an `RSVP.Promise`. You can use `denodeify` in Node.js or the
browser when you'd prefer to use promises over using callbacks. For example,
`denodeify` transforms the following:
```javascript
let fs = require('fs');
fs.readFile('myfile.txt', function(err, data){
if (err) return handleError(err);
handleData(data);
});
```
into:
```javascript
let fs = require('fs');
let readFile = RSVP.denodeify(fs.readFile);
readFile('myfile.txt').then(handleData, handleError);
```
If the node function has multiple success parameters, then `denodeify`
just returns the first one:
```javascript
let request = RSVP.denodeify(require('request'));
request('http://example.com').then(function(res) {
// ...
});
```
However, if you need all success parameters, setting `denodeify`'s
second parameter to `true` causes it to return all success parameters
as an array:
```javascript
let request = RSVP.denodeify(require('request'), true);
request('http://example.com').then(function(result) {
// result[0] -> res
// result[1] -> body
});
```
Or if you pass it an array with names it returns the parameters as a hash:
```javascript
let request = RSVP.denodeify(require('request'), ['res', 'body']);
request('http://example.com').then(function(result) {
// result.res
// result.body
});
```
Sometimes you need to retain the `this`:
```javascript
let app = require('express')();
let render = RSVP.denodeify(app.render.bind(app));
```
The denodified function inherits from the original function. It works in all
environments, except IE 10 and below. Consequently all properties of the original
function are available to you. However, any properties you change on the
denodeified function won't be changed on the original function. Example:
```javascript
let request = RSVP.denodeify(require('request')),
cookieJar = request.jar(); // <- Inheritance is used here
request('http://example.com', {jar: cookieJar}).then(function(res) {
// cookieJar.cookies holds now the cookies returned by example.com
});
```
Using `denodeify` makes it easier to compose asynchronous operations instead
of using callbacks. For example, instead of:
```javascript
let fs = require('fs');
fs.readFile('myfile.txt', function(err, data){
if (err) { ... } // Handle error
fs.writeFile('myfile2.txt', data, function(err){
if (err) { ... } // Handle error
console.log('done')
});
});
```
you can chain the operations together using `then` from the returned promise:
```javascript
let fs = require('fs');
let readFile = RSVP.denodeify(fs.readFile);
let writeFile = RSVP.denodeify(fs.writeFile);
readFile('myfile.txt').then(function(data){
return writeFile('myfile2.txt', data);
}).then(function(){
console.log('done')
}).catch(function(error){
// Handle error
});
```
@method denodeify
@static
@for RSVP
@param {Function} nodeFunc a 'node-style' function that takes a callback as
its last argument. The callback expects an error to be passed as its first
argument (if an error occurred, otherwise null), and the value from the
operation as its second argument ('function(err, value){ }').
@param {Boolean|Array} [options] An optional paramter that if set
to `true` causes the promise to fulfill with the callback's success arguments
as an array. This is useful if the node function has multiple success
paramters. If you set this paramter to an array with names, the promise will
fulfill with a hash with these names as keys and the success parameters as
values.
@return {Function} a function that wraps `nodeFunc` to return an
`RSVP.Promise`
@static
*/
function denodeify(nodeFunc, options) {
var fn = function () {
var self = this;
var l = arguments.length;
var args = new Array(l + 1);
var promiseInput = false;
for (var i = 0; i < l; ++i) {
var arg = arguments[i];
if (!promiseInput) {
// TODO: clean this up
promiseInput = needsPromiseInput(arg);
if (promiseInput === GET_THEN_ERROR$1) {
var p = new Promise(noop);
reject(p, GET_THEN_ERROR$1.value);
return p;
} else if (promiseInput && promiseInput !== true) {
arg = wrapThenable(promiseInput, arg);
}
}
args[i] = arg;
}
var promise = new Promise(noop);
args[l] = function (err, val) {
if (err) reject(promise, err);else if (options === undefined) resolve(promise, val);else if (options === true) resolve(promise, arrayResult(arguments));else if (isArray(options)) resolve(promise, makeObject(arguments, options));else resolve(promise, val);
};
if (promiseInput) {
return handlePromiseInput(promise, args, nodeFunc, self);
} else {
return handleValueInput(promise, args, nodeFunc, self);
}
};
fn.__proto__ = nodeFunc;
return fn;
}
function handleValueInput(promise, args, nodeFunc, self) {
var result = tryApply(nodeFunc, self, args);
if (result === ERROR) {
reject(promise, result.value);
}
return promise;
}
function handlePromiseInput(promise, args, nodeFunc, self) {
return Promise.all(args).then(function (args) {
var result = tryApply(nodeFunc, self, args);
if (result === ERROR) {
reject(promise, result.value);
}
return promise;
});
}
function needsPromiseInput(arg) {
if (arg && typeof arg === 'object') {
if (arg.constructor === Promise) {
return true;
} else {
return getThen$1(arg);
}
} else {
return false;
}
}
/**
This is a convenient alias for `RSVP.Promise.all`.
@method all
@static
@for RSVP
@param {Array} array Array of promises.
@param {String} label An optional label. This is useful
for tooling.
*/
function all$1(array, label) {
return Promise.all(array, label);
}
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
var AllSettled = function (_Enumerator) {
_inherits(AllSettled, _Enumerator);
function AllSettled(Constructor, entries, label) {
return _possibleConstructorReturn(this, _Enumerator.call(this, Constructor, entries, false /* don't abort on reject */, label));
}
return AllSettled;
}(Enumerator);
AllSettled.prototype._makeResult = makeSettledResult;
/**
`RSVP.allSettled` is similar to `RSVP.all`, but instead of implementing
a fail-fast method, it waits until all the promises have returned and
shows you all the results. This is useful if you want to handle multiple
promises' failure states together as a set.
Returns a promise that is fulfilled when all the given promises have been
settled. The return promise is fulfilled with an array of the states of
the promises passed into the `promises` array argument.
Each state object will either indicate fulfillment or rejection, and
provide the corresponding value or reason. The states will take one of
the following formats:
```javascript
{ state: 'fulfilled', value: value }
or
{ state: 'rejected', reason: reason }
```
Example:
```javascript
let promise1 = RSVP.Promise.resolve(1);
let promise2 = RSVP.Promise.reject(new Error('2'));
let promise3 = RSVP.Promise.reject(new Error('3'));
let promises = [ promise1, promise2, promise3 ];
RSVP.allSettled(promises).then(function(array){
// array == [
// { state: 'fulfilled', value: 1 },
// { state: 'rejected', reason: Error },
// { state: 'rejected', reason: Error }
// ]
// Note that for the second item, reason.message will be '2', and for the
// third item, reason.message will be '3'.
}, function(error) {
// Not run. (This block would only be called if allSettled had failed,
// for instance if passed an incorrect argument type.)
});
```
@method allSettled
@static
@for RSVP
@param {Array} entries
@param {String} label - optional string that describes the promise.
Useful for tooling.
@return {Promise} promise that is fulfilled with an array of the settled
states of the constituent promises.
*/
function allSettled(entries, label) {
if (!isArray(entries)) {
return Promise.reject(new TypeError("Promise.allSettled must be called with an array"), label);
}
return new AllSettled(Promise, entries, label).promise;
}
/**
This is a convenient alias for `RSVP.Promise.race`.
@method race
@static
@for RSVP
@param {Array} array Array of promises.
@param {String} label An optional label. This is useful
for tooling.
*/
function race$1(array, label) {
return Promise.race(array, label);
}
function _possibleConstructorReturn$1(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits$1(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
var hasOwnProperty = Object.prototype.hasOwnProperty;
var PromiseHash = function (_Enumerator) {
_inherits$1(PromiseHash, _Enumerator);
function PromiseHash(Constructor, object) {
var abortOnReject = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : true;
var label = arguments[3];
return _possibleConstructorReturn$1(this, _Enumerator.call(this, Constructor, object, abortOnReject, label));
}
PromiseHash.prototype._init = function _init(Constructor, object) {
this._result = {};
this._enumerate(object);
if (this._remaining === 0) {
fulfill(this.promise, this._result);
}
};
PromiseHash.prototype._enumerate = function _enumerate(input) {
var promise = this.promise;
var results = [];
for (var key in input) {
if (hasOwnProperty.call(input, key)) {
results.push({
position: key,
entry: input[key]
});
}
}
var length = results.length;
this._remaining = length;
var result = void 0;
for (var i = 0; promise._state === PENDING && i < length; i++) {
result = results[i];
this._eachEntry(result.entry, result.position);
}
};
return PromiseHash;
}(Enumerator);
/**
`RSVP.hash` is similar to `RSVP.all`, but takes an object instead of an array
for its `promises` argument.
Returns a promise that is fulfilled when all the given promises have been
fulfilled, or rejected if any of them become rejected. The returned promise
is fulfilled with a hash that has the same key names as the `promises` object
argument. If any of the values in the object are not promises, they will
simply be copied over to the fulfilled object.
Example:
```javascript
let promises = {
myPromise: RSVP.resolve(1),
yourPromise: RSVP.resolve(2),
theirPromise: RSVP.resolve(3),
notAPromise: 4
};
RSVP.hash(promises).then(function(hash){
// hash here is an object that looks like:
// {
// myPromise: 1,
// yourPromise: 2,
// theirPromise: 3,
// notAPromise: 4
// }
});
````
If any of the `promises` given to `RSVP.hash` are rejected, the first promise
that is rejected will be given as the reason to the rejection handler.
Example:
```javascript
let promises = {
myPromise: RSVP.resolve(1),
rejectedPromise: RSVP.reject(new Error('rejectedPromise')),
anotherRejectedPromise: RSVP.reject(new Error('anotherRejectedPromise')),
};
RSVP.hash(promises).then(function(hash){
// Code here never runs because there are rejected promises!
}, function(reason) {
// reason.message === 'rejectedPromise'
});
```
An important note: `RSVP.hash` is intended for plain JavaScript objects that
are just a set of keys and values. `RSVP.hash` will NOT preserve prototype
chains.
Example:
```javascript
function MyConstructor(){
this.example = RSVP.resolve('Example');
}
MyConstructor.prototype = {
protoProperty: RSVP.resolve('Proto Property')
};
let myObject = new MyConstructor();
RSVP.hash(myObject).then(function(hash){
// protoProperty will not be present, instead you will just have an
// object that looks like:
// {
// example: 'Example'
// }
//
// hash.hasOwnProperty('protoProperty'); // false
// 'undefined' === typeof hash.protoProperty
});
```
@method hash
@static
@for RSVP
@param {Object} object
@param {String} label optional string that describes the promise.
Useful for tooling.
@return {Promise} promise that is fulfilled when all properties of `promises`
have been fulfilled, or rejected if any of them become rejected.
*/
function hash(object, label) {
if (!isObject(object)) {
return Promise.reject(new TypeError("Promise.hash must be called with an object"), label);
}
return new PromiseHash(Promise, object, label).promise;
}
function _possibleConstructorReturn$2(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits$2(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
var HashSettled = function (_PromiseHash) {
_inherits$2(HashSettled, _PromiseHash);
function HashSettled(Constructor, object, label) {
return _possibleConstructorReturn$2(this, _PromiseHash.call(this, Constructor, object, false, label));
}
return HashSettled;
}(PromiseHash);
HashSettled.prototype._makeResult = makeSettledResult;
/**
`RSVP.hashSettled` is similar to `RSVP.allSettled`, but takes an object
instead of an array for its `promises` argument.
Unlike `RSVP.all` or `RSVP.hash`, which implement a fail-fast method,
but like `RSVP.allSettled`, `hashSettled` waits until all the
constituent promises have returned and then shows you all the results
with their states and values/reasons. This is useful if you want to
handle multiple promises' failure states together as a set.
Returns a promise that is fulfilled when all the given promises have been
settled, or rejected if the passed parameters are invalid.
The returned promise is fulfilled with a hash that has the same key names as
the `promises` object argument. If any of the values in the object are not
promises, they will be copied over to the fulfilled object and marked with state
'fulfilled'.
Example:
```javascript
let promises = {
myPromise: RSVP.Promise.resolve(1),
yourPromise: RSVP.Promise.resolve(2),
theirPromise: RSVP.Promise.resolve(3),
notAPromise: 4
};
RSVP.hashSettled(promises).then(function(hash){
// hash here is an object that looks like:
// {
// myPromise: { state: 'fulfilled', value: 1 },
// yourPromise: { state: 'fulfilled', value: 2 },
// theirPromise: { state: 'fulfilled', value: 3 },
// notAPromise: { state: 'fulfilled', value: 4 }
// }
});
```
If any of the `promises` given to `RSVP.hash` are rejected, the state will
be set to 'rejected' and the reason for rejection provided.
Example:
```javascript
let promises = {
myPromise: RSVP.Promise.resolve(1),
rejectedPromise: RSVP.Promise.reject(new Error('rejection')),
anotherRejectedPromise: RSVP.Promise.reject(new Error('more rejection')),
};
RSVP.hashSettled(promises).then(function(hash){
// hash here is an object that looks like:
// {
// myPromise: { state: 'fulfilled', value: 1 },
// rejectedPromise: { state: 'rejected', reason: Error },
// anotherRejectedPromise: { state: 'rejected', reason: Error },
// }
// Note that for rejectedPromise, reason.message == 'rejection',
// and for anotherRejectedPromise, reason.message == 'more rejection'.
});
```
An important note: `RSVP.hashSettled` is intended for plain JavaScript objects that
are just a set of keys and values. `RSVP.hashSettled` will NOT preserve prototype
chains.
Example:
```javascript
function MyConstructor(){
this.example = RSVP.Promise.resolve('Example');
}
MyConstructor.prototype = {
protoProperty: RSVP.Promise.resolve('Proto Property')
};
let myObject = new MyConstructor();
RSVP.hashSettled(myObject).then(function(hash){
// protoProperty will not be present, instead you will just have an
// object that looks like:
// {
// example: { state: 'fulfilled', value: 'Example' }
// }
//
// hash.hasOwnProperty('protoProperty'); // false
// 'undefined' === typeof hash.protoProperty
});
```
@method hashSettled
@for RSVP
@param {Object} object
@param {String} label optional string that describes the promise.
Useful for tooling.
@return {Promise} promise that is fulfilled when when all properties of `promises`
have been settled.
@static
*/
function hashSettled(object, label) {
if (!isObject(object)) {
return Promise.reject(new TypeError("RSVP.hashSettled must be called with an object"), label);
}
return new HashSettled(Promise, object, false, label).promise;
}
/**
`RSVP.rethrow` will rethrow an error on the next turn of the JavaScript event
loop in order to aid debugging.
Promises A+ specifies that any exceptions that occur with a promise must be
caught by the promises implementation and bubbled to the last handler. For
this reason, it is recommended that you always specify a second rejection
handler function to `then`. However, `RSVP.rethrow` will throw the exception
outside of the promise, so it bubbles up to your console if in the browser,
or domain/cause uncaught exception in Node. `rethrow` will also throw the
error again so the error can be handled by the promise per the spec.
```javascript
function throws(){
throw new Error('Whoops!');
}
let promise = new RSVP.Promise(function(resolve, reject){
throws();
});
promise.catch(RSVP.rethrow).then(function(){
// Code here doesn't run because the promise became rejected due to an
// error!
}, function (err){
// handle the error here
});
```
The 'Whoops' error will be thrown on the next turn of the event loop
and you can watch for it in your console. You can also handle it using a
rejection handler given to `.then` or `.catch` on the returned promise.
@method rethrow
@static
@for RSVP
@param {Error} reason reason the promise became rejected.
@throws Error
@static
*/
function rethrow(reason) {
setTimeout(function () {
throw reason;
});
throw reason;
}
/**
`RSVP.defer` returns an object similar to jQuery's `$.Deferred`.
`RSVP.defer` should be used when porting over code reliant on `$.Deferred`'s
interface. New code should use the `RSVP.Promise` constructor instead.
The object returned from `RSVP.defer` is a plain object with three properties:
* promise - an `RSVP.Promise`.
* reject - a function that causes the `promise` property on this object to
become rejected
* resolve - a function that causes the `promise` property on this object to
become fulfilled.
Example:
```javascript
let deferred = RSVP.defer();
deferred.resolve("Success!");
deferred.promise.then(function(value){
// value here is "Success!"
});
```
@method defer
@static
@for RSVP
@param {String} label optional string for labeling the promise.
Useful for tooling.
@return {Object}
*/
function defer(label) {
var deferred = { resolve: undefined, reject: undefined };
deferred.promise = new Promise(function (resolve, reject) {
deferred.resolve = resolve;
deferred.reject = reject;
}, label);
return deferred;
}
/**
`RSVP.map` is similar to JavaScript's native `map` method, except that it
waits for all promises to become fulfilled before running the `mapFn` on
each item in given to `promises`. `RSVP.map` returns a promise that will
become fulfilled with the result of running `mapFn` on the values the promises
become fulfilled with.
For example:
```javascript
let promise1 = RSVP.resolve(1);
let promise2 = RSVP.resolve(2);
let promise3 = RSVP.resolve(3);
let promises = [ promise1, promise2, promise3 ];
let mapFn = function(item){
return item + 1;
};
RSVP.map(promises, mapFn).then(function(result){
// result is [ 2, 3, 4 ]
});
```
If any of the `promises` given to `RSVP.map` are rejected, the first promise
that is rejected will be given as an argument to the returned promise's
rejection handler. For example:
```javascript
let promise1 = RSVP.resolve(1);
let promise2 = RSVP.reject(new Error('2'));
let promise3 = RSVP.reject(new Error('3'));
let promises = [ promise1, promise2, promise3 ];
let mapFn = function(item){
return item + 1;
};
RSVP.map(promises, mapFn).then(function(array){
// Code here never runs because there are rejected promises!
}, function(reason) {
// reason.message === '2'
});
```
`RSVP.map` will also wait if a promise is returned from `mapFn`. For example,
say you want to get all comments from a set of blog posts, but you need
the blog posts first because they contain a url to those comments.
```javscript
let mapFn = function(blogPost){
// getComments does some ajax and returns an RSVP.Promise that is fulfilled
// with some comments data
return getComments(blogPost.comments_url);
};
// getBlogPosts does some ajax and returns an RSVP.Promise that is fulfilled
// with some blog post data
RSVP.map(getBlogPosts(), mapFn).then(function(comments){
// comments is the result of asking the server for the comments
// of all blog posts returned from getBlogPosts()
});
```
@method map
@static
@for RSVP
@param {Array} promises
@param {Function} mapFn function to be called on each fulfilled promise.
@param {String} label optional string for labeling the promise.
Useful for tooling.
@return {Promise} promise that is fulfilled with the result of calling
`mapFn` on each fulfilled promise or value when they become fulfilled.
The promise will be rejected if any of the given `promises` become rejected.
@static
*/
function map(promises, mapFn, label) {
if (!isArray(promises)) {
return Promise.reject(new TypeError("RSVP.map must be called with an array"), label);
}
if (!isFunction(mapFn)) {
return Promise.reject(new TypeError("RSVP.map expects a function as a second argument"), label);
}
return Promise.all(promises, label).then(function (values) {
var length = values.length;
var results = new Array(length);
for (var i = 0; i < length; i++) {
results[i] = mapFn(values[i]);
}
return Promise.all(results, label);
});
}
/**
This is a convenient alias for `RSVP.Promise.resolve`.
@method resolve
@static
@for RSVP
@param {*} value value that the returned promise will be resolved with
@param {String} label optional string for identifying the returned promise.
Useful for tooling.
@return {Promise} a promise that will become fulfilled with the given
`value`
*/
function resolve$2(value, label) {
return Promise.resolve(value, label);
}
/**
This is a convenient alias for `RSVP.Promise.reject`.
@method reject
@static
@for RSVP
@param {*} reason value that the returned promise will be rejected with.
@param {String} label optional string for identifying the returned promise.
Useful for tooling.
@return {Promise} a promise rejected with the given `reason`.
*/
function reject$2(reason, label) {
return Promise.reject(reason, label);
}
/**
`RSVP.filter` is similar to JavaScript's native `filter` method, except that it
waits for all promises to become fulfilled before running the `filterFn` on
each item in given to `promises`. `RSVP.filter` returns a promise that will
become fulfilled with the result of running `filterFn` on the values the
promises become fulfilled with.
For example:
```javascript
let promise1 = RSVP.resolve(1);
let promise2 = RSVP.resolve(2);
let promise3 = RSVP.resolve(3);
let promises = [promise1, promise2, promise3];
let filterFn = function(item){
return item > 1;
};
RSVP.filter(promises, filterFn).then(function(result){
// result is [ 2, 3 ]
});
```
If any of the `promises` given to `RSVP.filter` are rejected, the first promise
that is rejected will be given as an argument to the returned promise's
rejection handler. For example:
```javascript
let promise1 = RSVP.resolve(1);
let promise2 = RSVP.reject(new Error('2'));
let promise3 = RSVP.reject(new Error('3'));
let promises = [ promise1, promise2, promise3 ];
let filterFn = function(item){
return item > 1;
};
RSVP.filter(promises, filterFn).then(function(array){
// Code here never runs because there are rejected promises!
}, function(reason) {
// reason.message === '2'
});
```
`RSVP.filter` will also wait for any promises returned from `filterFn`.
For instance, you may want to fetch a list of users then return a subset
of those users based on some asynchronous operation:
```javascript
let alice = { name: 'alice' };
let bob = { name: 'bob' };
let users = [ alice, bob ];
let promises = users.map(function(user){
return RSVP.resolve(user);
});
let filterFn = function(user){
// Here, Alice has permissions to create a blog post, but Bob does not.
return getPrivilegesForUser(user).then(function(privs){
return privs.can_create_blog_post === true;
});
};
RSVP.filter(promises, filterFn).then(function(users){
// true, because the server told us only Alice can create a blog post.
users.length === 1;
// false, because Alice is the only user present in `users`
users[0] === bob;
});
```
@method filter
@static
@for RSVP
@param {Array} promises
@param {Function} filterFn - function to be called on each resolved value to
filter the final results.
@param {String} label optional string describing the promise. Useful for
tooling.
@return {Promise}
*/
function resolveAll(promises, label) {
return Promise.all(promises, label);
}
function resolveSingle(promise, label) {
return Promise.resolve(promise, label).then(function (promises) {
return resolveAll(promises, label);
});
}
function filter(promises, filterFn, label) {
if (!isArray(promises) && !(isObject(promises) && promises.then !== undefined)) {
return Promise.reject(new TypeError("RSVP.filter must be called with an array or promise"), label);
}
if (!isFunction(filterFn)) {
return Promise.reject(new TypeError("RSVP.filter expects function as a second argument"), label);
}
var promise = isArray(promises) ? resolveAll(promises, label) : resolveSingle(promises, label);
return promise.then(function (values) {
var length = values.length;
var filtered = new Array(length);
for (var i = 0; i < length; i++) {
filtered[i] = filterFn(values[i]);
}
return resolveAll(filtered, label).then(function (filtered) {
var results = new Array(length);
var newLength = 0;
for (var _i = 0; _i < length; _i++) {
if (filtered[_i]) {
results[newLength] = values[_i];
newLength++;
}
}
results.length = newLength;
return results;
});
});
}
var len = 0;
var vertxNext = void 0;
function asap(callback, arg) {
queue$1[len] = callback;
queue$1[len + 1] = arg;
len += 2;
if (len === 2) {
// If len is 1, that means that we need to schedule an async flush.
// If additional callbacks are queued before the queue is flushed, they
// will be processed by this flush that we are scheduling.
scheduleFlush$1();
}
}
var browserWindow = typeof window !== 'undefined' ? window : undefined;
var browserGlobal = browserWindow || {};
var BrowserMutationObserver = browserGlobal.MutationObserver || browserGlobal.WebKitMutationObserver;
var isNode = typeof self === 'undefined' && typeof process !== 'undefined' && {}.toString.call(process) === '[object process]';
// test for web worker but not in IE10
var isWorker = typeof Uint8ClampedArray !== 'undefined' && typeof importScripts !== 'undefined' && typeof MessageChannel !== 'undefined';
// node
function useNextTick() {
var nextTick = process.nextTick;
// node version 0.10.x displays a deprecation warning when nextTick is used recursively
// setImmediate should be used instead instead
var version = process.versions.node.match(/^(?:(\d+)\.)?(?:(\d+)\.)?(\*|\d+)$/);
if (Array.isArray(version) && version[1] === '0' && version[2] === '10') {
nextTick = setImmediate;
}
return function () {
return nextTick(flush);
};
}
// vertx
function useVertxTimer() {
if (typeof vertxNext !== 'undefined') {
return function () {
vertxNext(flush);
};
}
return useSetTimeout();
}
function useMutationObserver() {
var iterations = 0;
var observer = new BrowserMutationObserver(flush);
var node = document.createTextNode('');
observer.observe(node, { characterData: true });
return function () {
return node.data = iterations = ++iterations % 2;
};
}
// web worker
function useMessageChannel() {
var channel = new MessageChannel();
channel.port1.onmessage = flush;
return function () {
return channel.port2.postMessage(0);
};
}
function useSetTimeout() {
return function () {
return setTimeout(flush, 1);
};
}
var queue$1 = new Array(1000);
function flush() {
for (var i = 0; i < len; i += 2) {
var callback = queue$1[i];
var arg = queue$1[i + 1];
callback(arg);
queue$1[i] = undefined;
queue$1[i + 1] = undefined;
}
len = 0;
}
function attemptVertex() {
try {
var r = require;
var vertx = r('vertx');
vertxNext = vertx.runOnLoop || vertx.runOnContext;
return useVertxTimer();
} catch (e) {
return useSetTimeout();
}
}
var scheduleFlush$1 = void 0;
// Decide what async method to use to triggering processing of queued callbacks:
if (isNode) {
scheduleFlush$1 = useNextTick();
} else if (BrowserMutationObserver) {
scheduleFlush$1 = useMutationObserver();
} else if (isWorker) {
scheduleFlush$1 = useMessageChannel();
} else if (browserWindow === undefined && typeof require === 'function') {
scheduleFlush$1 = attemptVertex();
} else {
scheduleFlush$1 = useSetTimeout();
}
var platform = void 0;
/* global self */
if (typeof self === 'object') {
platform = self;
/* global global */
} else if (typeof global === 'object') {
platform = global;
} else {
throw new Error('no global: `self` or `global` found');
}
var _asap$cast$Promise$Ev;
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
// defaults
config.async = asap;
config.after = function (cb) {
return setTimeout(cb, 0);
};
var cast = resolve$2;
var async = function (callback, arg) {
return config.async(callback, arg);
};
function on() {
config['on'].apply(config, arguments);
}
function off() {
config['off'].apply(config, arguments);
}
// Set up instrumentation through `window.__PROMISE_INTRUMENTATION__`
if (typeof window !== 'undefined' && typeof window['__PROMISE_INSTRUMENTATION__'] === 'object') {
var callbacks = window['__PROMISE_INSTRUMENTATION__'];
configure('instrument', true);
for (var eventName in callbacks) {
if (callbacks.hasOwnProperty(eventName)) {
on(eventName, callbacks[eventName]);
}
}
}
// the default export here is for backwards compat:
// https://github.com/tildeio/rsvp.js/issues/434
var rsvp = (_asap$cast$Promise$Ev = {
asap: asap,
cast: cast,
Promise: Promise,
EventTarget: EventTarget,
all: all$1,
allSettled: allSettled,
race: race$1,
hash: hash,
hashSettled: hashSettled,
rethrow: rethrow,
defer: defer,
denodeify: denodeify,
configure: configure,
on: on,
off: off,
resolve: resolve$2,
reject: reject$2,
map: map
}, _defineProperty(_asap$cast$Promise$Ev, 'async', async), _defineProperty(_asap$cast$Promise$Ev, 'filter', filter), _asap$cast$Promise$Ev);
exports['default'] = rsvp;
exports.asap = asap;
exports.cast = cast;
exports.Promise = Promise;
exports.EventTarget = EventTarget;
exports.all = all$1;
exports.allSettled = allSettled;
exports.race = race$1;
exports.hash = hash;
exports.hashSettled = hashSettled;
exports.rethrow = rethrow;
exports.defer = defer;
exports.denodeify = denodeify;
exports.configure = configure;
exports.on = on;
exports.off = off;
exports.resolve = resolve$2;
exports.reject = reject$2;
exports.map = map;
exports.async = async;
exports.filter = filter;
Object.defineProperty(exports, '__esModule', { value: true });
})));
//
var EPUBJS = EPUBJS || {};
EPUBJS.core = {};
var ELEMENT_NODE = 1;
var TEXT_NODE = 3;
var COMMENT_NODE = 8;
var DOCUMENT_NODE = 9;
//-- Get a element for an id
EPUBJS.core.getEl = function(elem) {
return document.getElementById(elem);
};
//-- Get all elements for a class
EPUBJS.core.getEls = function(classes) {
return document.getElementsByClassName(classes);
};
EPUBJS.core.request = function(url, type, withCredentials) {
var supportsURL = window.URL;
var BLOB_RESPONSE = supportsURL ? "blob" : "arraybuffer";
var deferred = new RSVP.defer();
var xhr = new XMLHttpRequest();
var uri;
//-- Check from PDF.js:
// https://github.com/mozilla/pdf.js/blob/master/web/compatibility.js
var xhrPrototype = XMLHttpRequest.prototype;
var handler = function() {
var r;
if (this.readyState != this.DONE) return;
if ((this.status === 200 || this.status === 0) && this.response) { // Android & Firefox reporting 0 for local & blob urls
if (type == 'xml'){
// If this.responseXML wasn't set, try to parse using a DOMParser from text
if(!this.responseXML) {
r = new DOMParser().parseFromString(this.response, "application/xml");
} else {
r = this.responseXML;
}
} else if (type == 'xhtml') {
if (!this.responseXML){
r = new DOMParser().parseFromString(this.response, "application/xhtml+xml");
} else {
r = this.responseXML;
}
} else if (type == 'html') {
if (!this.responseXML){
r = new DOMParser().parseFromString(this.response, "text/html");
} else {
r = this.responseXML;
}
} else if (type == 'json') {
r = JSON.parse(this.response);
} else if (type == 'blob') {
if (supportsURL) {
r = this.response;
} else {
//-- Safari doesn't support responseType blob, so create a blob from arraybuffer
r = new Blob([this.response]);
}
} else {
r = this.response;
}
deferred.resolve(r);
} else {
deferred.reject({
message : this.response,
stack : new Error().stack
});
}
};
if (!('overrideMimeType' in xhrPrototype)) {
// IE10 might have response, but not overrideMimeType
Object.defineProperty(xhrPrototype, 'overrideMimeType', {
value: function xmlHttpRequestOverrideMimeType(mimeType) {}
});
}
xhr.onreadystatechange = handler;
xhr.open("GET", url, true);
if(withCredentials) {
xhr.withCredentials = true;
}
// If type isn't set, determine it from the file extension
if(!type) {
uri = EPUBJS.core.uri(url);
type = uri.extension;
type = {
'htm': 'html'
}[type] || type;
}
if(type == 'blob'){
xhr.responseType = BLOB_RESPONSE;
}
if(type == "json") {
xhr.setRequestHeader("Accept", "application/json");
}
if(type == 'xml') {
xhr.responseType = "document";
xhr.overrideMimeType('text/xml'); // for OPF parsing
}
if(type == 'xhtml') {
xhr.responseType = "document";
}
if(type == 'html') {
xhr.responseType = "document";
}
if(type == "binary") {
xhr.responseType = "arraybuffer";
}
xhr.send();
return deferred.promise;
};
EPUBJS.core.toArray = function(obj) {
var arr = [];
for (var member in obj) {
var newitm;
if ( obj.hasOwnProperty(member) ) {
newitm = obj[member];
newitm.ident = member;
arr.push(newitm);
}
}
return arr;
};
//-- Parse the different parts of a url, returning a object
EPUBJS.core.uri = function(url){
var uri = {
protocol : '',
host : '',
path : '',
origin : '',
directory : '',
base : '',
filename : '',
extension : '',
fragment : '',
href : url
},
blob = url.indexOf('blob:'),
doubleSlash = url.indexOf('://'),
search = url.indexOf('?'),
fragment = url.indexOf("#"),
withoutProtocol,
dot,
firstSlash;
if(blob === 0) {
uri.protocol = "blob";
uri.base = url.indexOf(0, fragment);
return uri;
}
if(fragment != -1) {
uri.fragment = url.slice(fragment + 1);
url = url.slice(0, fragment);
}
if(search != -1) {
uri.search = url.slice(search + 1);
url = url.slice(0, search);
href = uri.href;
}
if(doubleSlash != -1) {
uri.protocol = url.slice(0, doubleSlash);
withoutProtocol = url.slice(doubleSlash+3);
firstSlash = withoutProtocol.indexOf('/');
if(firstSlash === -1) {
uri.host = uri.path;
uri.path = "";
} else {
uri.host = withoutProtocol.slice(0, firstSlash);
uri.path = withoutProtocol.slice(firstSlash);
}
uri.origin = uri.protocol + "://" + uri.host;
uri.directory = EPUBJS.core.folder(uri.path);
uri.base = uri.origin + uri.directory;
// return origin;
} else {
uri.path = url;
uri.directory = EPUBJS.core.folder(url);
uri.base = uri.directory;
}
//-- Filename
uri.filename = url.replace(uri.base, '');
dot = uri.filename.lastIndexOf('.');
if(dot != -1) {
uri.extension = uri.filename.slice(dot+1);
}
return uri;
};
//-- Parse out the folder, will return everything before the last slash
EPUBJS.core.folder = function(url){
var lastSlash = url.lastIndexOf('/');
if(lastSlash == -1) var folder = '';
folder = url.slice(0, lastSlash + 1);
return folder;
};
//-- https://github.com/ebidel/filer.js/blob/master/src/filer.js#L128
EPUBJS.core.dataURLToBlob = function(dataURL) {
var BASE64_MARKER = ';base64,',
parts, contentType, raw, rawLength, uInt8Array;
if (dataURL.indexOf(BASE64_MARKER) == -1) {
parts = dataURL.split(',');
contentType = parts[0].split(':')[1];
raw = parts[1];
return new Blob([raw], {type: contentType});
}
parts = dataURL.split(BASE64_MARKER);
contentType = parts[0].split(':')[1];
raw = window.atob(parts[1]);
rawLength = raw.length;
uInt8Array = new Uint8Array(rawLength);
for (var i = 0; i < rawLength; ++i) {
uInt8Array[i] = raw.charCodeAt(i);
}
return new Blob([uInt8Array], {type: contentType});
};
//-- Load scripts async: http://stackoverflow.com/questions/7718935/load-scripts-asynchronously
EPUBJS.core.addScript = function(src, callback, target) {
var s, r;
r = false;
s = document.createElement('script');
s.type = 'text/javascript';
s.async = false;
s.src = src;
s.onload = s.onreadystatechange = function() {
if ( !r && (!this.readyState || this.readyState == 'complete') ) {
r = true;
if(callback) callback();
}
};
target = target || document.body;
target.appendChild(s);
};
EPUBJS.core.addScripts = function(srcArr, callback, target) {
var total = srcArr.length,
curr = 0,
cb = function(){
curr++;
if(total == curr){
if(callback) callback();
}else{
EPUBJS.core.addScript(srcArr[curr], cb, target);
}
};
EPUBJS.core.addScript(srcArr[curr], cb, target);
};
EPUBJS.core.addCss = function(src, callback, target) {
var s, r;
r = false;
s = document.createElement('link');
s.type = 'text/css';
s.rel = "stylesheet";
s.href = src;
s.onload = s.onreadystatechange = function() {
if ( !r && (!this.readyState || this.readyState == 'complete') ) {
r = true;
if(callback) callback();
}
};
target = target || document.body;
target.appendChild(s);
};
EPUBJS.core.prefixed = function(unprefixed) {
var vendors = ["Webkit", "Moz", "O", "ms" ],
prefixes = ['-Webkit-', '-moz-', '-o-', '-ms-'],
upper = unprefixed[0].toUpperCase() + unprefixed.slice(1),
length = vendors.length;
if (typeof(document.documentElement.style[unprefixed]) != 'undefined') {
return unprefixed;
}
for ( var i=0; i < length; i++ ) {
if (typeof(document.documentElement.style[vendors[i] + upper]) != 'undefined') {
return vendors[i] + upper;
}
}
return unprefixed;
};
EPUBJS.core.resolveUrl = function(base, path) {
var url,
segments = [],
uri = EPUBJS.core.uri(path),
folders = base.split("/"),
paths;
if(uri.host) {
return path;
}
folders.pop();
paths = path.split("/");
paths.forEach(function(p){
if(p === ".."){
folders.pop();
}else{
segments.push(p);
}
});
url = folders.concat(segments);
return url.join("/");
};
// http://stackoverflow.com/questions/105034/how-to-create-a-guid-uuid-in-javascript
EPUBJS.core.uuid = function() {
var d = new Date().getTime();
var uuid = 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
var r = (d + Math.random()*16)%16 | 0;
d = Math.floor(d/16);
return (c=='x' ? r : (r&0x7|0x8)).toString(16);
});
return uuid;
};
// Fast quicksort insert for sorted array -- based on:
// http://stackoverflow.com/questions/1344500/efficient-way-to-insert-a-number-into-a-sorted-array-of-numbers
EPUBJS.core.insert = function(item, array, compareFunction) {
var location = EPUBJS.core.locationOf(item, array, compareFunction);
array.splice(location, 0, item);
return location;
};
EPUBJS.core.locationOf = function(item, array, compareFunction, _start, _end) {
var start = _start || 0;
var end = _end || array.length;
var pivot = parseInt(start + (end - start) / 2);
var compared;
if(!compareFunction){
compareFunction = function(a, b) {
if(a > b) return 1;
if(a < b) return -1;
if(a = b) return 0;
};
}
if(end-start <= 0) {
return pivot;
}
compared = compareFunction(array[pivot], item);
if(end-start === 1) {
return compared > 0 ? pivot : pivot + 1;
}
if(compared === 0) {
return pivot;
}
if(compared === -1) {
return EPUBJS.core.locationOf(item, array, compareFunction, pivot, end);
} else{
return EPUBJS.core.locationOf(item, array, compareFunction, start, pivot);
}
};
EPUBJS.core.indexOfSorted = function(item, array, compareFunction, _start, _end) {
var start = _start || 0;
var end = _end || array.length;
var pivot = parseInt(start + (end - start) / 2);
var compared;
if(!compareFunction){
compareFunction = function(a, b) {
if(a > b) return 1;
if(a < b) return -1;
if(a = b) return 0;
};
}
if(end-start <= 0) {
return -1; // Not found
}
compared = compareFunction(array[pivot], item);
if(end-start === 1) {
return compared === 0 ? pivot : -1;
}
if(compared === 0) {
return pivot; // Found
}
if(compared === -1) {
return EPUBJS.core.indexOfSorted(item, array, compareFunction, pivot, end);
} else{
return EPUBJS.core.indexOfSorted(item, array, compareFunction, start, pivot);
}
};
EPUBJS.core.queue = function(_scope){
var _q = [];
var scope = _scope;
// Add an item to the queue
var enqueue = function(funcName, args, context) {
_q.push({
"funcName" : funcName,
"args" : args,
"context" : context
});
return _q;
};
// Run one item
var dequeue = function(){
var inwait;
if(_q.length) {
inwait = _q.shift();
// Defer to any current tasks
// setTimeout(function(){
scope[inwait.funcName].apply(inwait.context || scope, inwait.args);
// }, 0);
}
};
// Run All
var flush = function(){
while(_q.length) {
dequeue();
}
};
// Clear all items in wait
var clear = function(){
_q = [];
};
var length = function(){
return _q.length;
};
return {
"enqueue" : enqueue,
"dequeue" : dequeue,
"flush" : flush,
"clear" : clear,
"length" : length
};
};
// From: https://code.google.com/p/fbug/source/browse/branches/firebug1.10/content/firebug/lib/xpath.js
/**
* Gets an XPath for an element which describes its hierarchical location.
*/
EPUBJS.core.getElementXPath = function(element) {
if (element && element.id) {
return '//*[@id="' + element.id + '"]';
} else {
return EPUBJS.core.getElementTreeXPath(element);
}
};
EPUBJS.core.getElementTreeXPath = function(element) {
var paths = [];
var isXhtml = (element.ownerDocument.documentElement.getAttribute('xmlns') === "http://www.w3.org/1999/xhtml");
var index, nodeName, tagName, pathIndex;
if(element.nodeType === Node.TEXT_NODE){
// index = Array.prototype.indexOf.call(element.parentNode.childNodes, element) + 1;
index = EPUBJS.core.indexOfTextNode(element) + 1;
paths.push("text()["+index+"]");
element = element.parentNode;
}
// Use nodeName (instead of localName) so namespace prefix is included (if any).
for (; element && element.nodeType == 1; element = element.parentNode)
{
index = 0;
for (var sibling = element.previousSibling; sibling; sibling = sibling.previousSibling)
{
// Ignore document type declaration.
if (sibling.nodeType == Node.DOCUMENT_TYPE_NODE) {
continue;
}
if (sibling.nodeName == element.nodeName) {
++index;
}
}
nodeName = element.nodeName.toLowerCase();
tagName = (isXhtml ? "xhtml:" + nodeName : nodeName);
pathIndex = (index ? "[" + (index+1) + "]" : "");
paths.splice(0, 0, tagName + pathIndex);
}
return paths.length ? "./" + paths.join("/") : null;
};
EPUBJS.core.nsResolver = function(prefix) {
var ns = {
'xhtml' : 'http://www.w3.org/1999/xhtml',
'epub': 'http://www.idpf.org/2007/ops'
};
return ns[prefix] || null;
};
//https://stackoverflow.com/questions/13482352/xquery-looking-for-text-with-single-quote/13483496#13483496
EPUBJS.core.cleanStringForXpath = function(str) {
var parts = str.match(/[^'"]+|['"]/g);
parts = parts.map(function(part){
if (part === "'") {
return '\"\'\"'; // output "'"
}
if (part === '"') {
return "\'\"\'"; // output '"'
}
return "\'" + part + "\'";
});
return "concat(\'\'," + parts.join(",") + ")";
};
EPUBJS.core.indexOfTextNode = function(textNode){
var parent = textNode.parentNode;
var children = parent.childNodes;
var sib;
var index = -1;
for (var i = 0; i < children.length; i++) {
sib = children[i];
if(sib.nodeType === Node.TEXT_NODE){
index++;
}
if(sib == textNode) break;
}
return index;
};
// Underscore
EPUBJS.core.defaults = function(obj) {
for (var i = 1, length = arguments.length; i < length; i++) {
var source = arguments[i];
for (var prop in source) {
if (obj[prop] === void 0) obj[prop] = source[prop];
}
}
return obj;
};
EPUBJS.core.extend = function(target) {
var sources = [].slice.call(arguments, 1);
sources.forEach(function (source) {
if(!source) return;
Object.getOwnPropertyNames(source).forEach(function(propName) {
Object.defineProperty(target, propName, Object.getOwnPropertyDescriptor(source, propName));
});
});
return target;
};
EPUBJS.core.clone = function(obj) {
return EPUBJS.core.isArray(obj) ? obj.slice() : EPUBJS.core.extend({}, obj);
};
EPUBJS.core.isElement = function(obj) {
return !!(obj && obj.nodeType == 1);
};
EPUBJS.core.isNumber = function(n) {
return !isNaN(parseFloat(n)) && isFinite(n);
};
EPUBJS.core.isString = function(str) {
return (typeof str === 'string' || str instanceof String);
};
EPUBJS.core.isArray = Array.isArray || function(obj) {
return Object.prototype.toString.call(obj) === '[object Array]';
};
// Lodash
EPUBJS.core.values = function(object) {
var index = -1;
var props, length, result;
if(!object) return [];
props = Object.keys(object);
length = props.length;
result = Array(length);
while (++index < length) {
result[index] = object[props[index]];
}
return result;
};
EPUBJS.core.indexOfNode = function(node, typeId) {
var parent = node.parentNode;
var children = parent.childNodes;
var sib;
var index = -1;
for (var i = 0; i < children.length; i++) {
sib = children[i];
if (sib.nodeType === typeId) {
index++;
}
if (sib == node) break;
}
return index;
}
EPUBJS.core.indexOfTextNode = function(textNode) {
return EPUBJS.core.indexOfNode(textNode, TEXT_NODE);
}
EPUBJS.core.indexOfElementNode = function(elementNode) {
return EPUBJS.core.indexOfNode(elementNode, ELEMENT_NODE);
}
var EPUBJS = EPUBJS || {};
EPUBJS.reader = {};
EPUBJS.reader.plugins = {}; //-- Attach extra Controllers as plugins (like search?)
(function(root, $) {
var previousReader = root.ePubReader || {};
var ePubReader = root.ePubReader = function(path, options) {
return new EPUBJS.Reader(path, options);
};
//exports to multiple environments
if (typeof define === 'function' && define.amd) {
//AMD
define(function(){ return Reader; });
} else if (typeof module != "undefined" && module.exports) {
//Node
module.exports = ePubReader;
}
})(window, jQuery);
EPUBJS.Reader = function(bookPath, _options) {
var reader = this;
var book;
var plugin;
var $viewer = $("#viewer");
var search = window.location.search;
var parameters;
this.settings = EPUBJS.core.defaults(_options || {}, {
bookPath : bookPath,
restore : false,
reload : false,
bookmarks : undefined,
annotations : undefined,
contained : undefined,
bookKey : undefined,
styles : undefined,
sidebarReflow: false,
generatePagination: false,
history: true
});
// Overide options with search parameters
if(search) {
parameters = search.slice(1).split("&");
parameters.forEach(function(p){
var split = p.split("=");
var name = split[0];
var value = split[1] || '';
reader.settings[name] = decodeURIComponent(value);
});
}
this.setBookKey(this.settings.bookPath); //-- This could be username + path or any unique string
if(this.settings.restore && this.isSaved()) {
this.applySavedSettings();
}
this.settings.styles = this.settings.styles || {
fontSize : "100%"
};
this.book = book = new ePub(this.settings.bookPath, this.settings);
this.offline = false;
this.sidebarOpen = false;
if(!this.settings.bookmarks) {
this.settings.bookmarks = [];
}
if(!this.settings.annotations) {
this.settings.annotations = [];
}
if(this.settings.generatePagination) {
book.generatePagination($viewer.width(), $viewer.height());
}
this.rendition = book.renderTo("viewer", {
ignoreClass: "annotator-hl",
width: "100%",
height: "100%"
});
if(this.settings.previousLocationCfi) {
this.displayed = this.rendition.display(this.settings.previousLocationCfi);
} else {
this.displayed = this.rendition.display();
}
book.ready.then(function () {
reader.ReaderController = EPUBJS.reader.ReaderController.call(reader, book);
reader.SettingsController = EPUBJS.reader.SettingsController.call(reader, book);
reader.ControlsController = EPUBJS.reader.ControlsController.call(reader, book);
reader.SidebarController = EPUBJS.reader.SidebarController.call(reader, book);
reader.BookmarksController = EPUBJS.reader.BookmarksController.call(reader, book);
reader.NotesController = EPUBJS.reader.NotesController.call(reader, book);
window.addEventListener("hashchange", this.hashChanged.bind(this), false);
document.addEventListener('keydown', this.adjustFontSize.bind(this), false);
this.rendition.on("keydown", this.adjustFontSize.bind(this));
this.rendition.on("keydown", reader.ReaderController.arrowKeys.bind(this));
this.rendition.on("selected", this.selectedRange.bind(this));
}.bind(this)).then(function() {
reader.ReaderController.hideLoader();
}.bind(this));
// Call Plugins
for(plugin in EPUBJS.reader.plugins) {
if(EPUBJS.reader.plugins.hasOwnProperty(plugin)) {
reader[plugin] = EPUBJS.reader.plugins[plugin].call(reader, book);
}
}
book.loaded.metadata.then(function(meta) {
reader.MetaController = EPUBJS.reader.MetaController.call(reader, meta);
});
book.loaded.navigation.then(function(navigation) {
reader.TocController = EPUBJS.reader.TocController.call(reader, navigation);
});
window.addEventListener("beforeunload", this.unload.bind(this), false);
return this;
};
EPUBJS.Reader.prototype.adjustFontSize = function(e) {
var fontSize;
var interval = 2;
var PLUS = 187;
var MINUS = 189;
var ZERO = 48;
var MOD = (e.ctrlKey || e.metaKey );
if(!this.settings.styles) return;
if(!this.settings.styles.fontSize) {
this.settings.styles.fontSize = "100%";
}
fontSize = parseInt(this.settings.styles.fontSize.slice(0, -1));
if(MOD && e.keyCode == PLUS) {
e.preventDefault();
this.book.setStyle("fontSize", (fontSize + interval) + "%");
}
if(MOD && e.keyCode == MINUS){
e.preventDefault();
this.book.setStyle("fontSize", (fontSize - interval) + "%");
}
if(MOD && e.keyCode == ZERO){
e.preventDefault();
this.book.setStyle("fontSize", "100%");
}
};
EPUBJS.Reader.prototype.addBookmark = function(cfi) {
var present = this.isBookmarked(cfi);
if(present > -1 ) return;
this.settings.bookmarks.push(cfi);
this.trigger("reader:bookmarked", cfi);
};
EPUBJS.Reader.prototype.removeBookmark = function(cfi) {
var bookmark = this.isBookmarked(cfi);
if( bookmark === -1 ) return;
this.settings.bookmarks.splice(bookmark, 1);
this.trigger("reader:unbookmarked", bookmark);
};
EPUBJS.Reader.prototype.isBookmarked = function(cfi) {
var bookmarks = this.settings.bookmarks;
return bookmarks.indexOf(cfi);
};
/*
EPUBJS.Reader.prototype.searchBookmarked = function(cfi) {
var bookmarks = this.settings.bookmarks,
len = bookmarks.length,
i;
for(i = 0; i < len; i++) {
if (bookmarks[i]['cfi'] === cfi) return i;
}
return -1;
};
*/
EPUBJS.Reader.prototype.clearBookmarks = function() {
this.settings.bookmarks = [];
};
//-- Notes
EPUBJS.Reader.prototype.addNote = function(note) {
this.settings.annotations.push(note);
};
EPUBJS.Reader.prototype.removeNote = function(note) {
var index = this.settings.annotations.indexOf(note);
if( index === -1 ) return;
delete this.settings.annotations[index];
};
EPUBJS.Reader.prototype.clearNotes = function() {
this.settings.annotations = [];
};
//-- Settings
EPUBJS.Reader.prototype.setBookKey = function(identifier){
if(!this.settings.bookKey) {
this.settings.bookKey = "epubjsreader:" + EPUBJS.VERSION + ":" + window.location.host + ":" + identifier;
}
return this.settings.bookKey;
};
//-- Checks if the book setting can be retrieved from localStorage
EPUBJS.Reader.prototype.isSaved = function(bookPath) {
var storedSettings;
if(!localStorage) {
return false;
}
storedSettings = localStorage.getItem(this.settings.bookKey);
if(storedSettings === null) {
return false;
} else {
return true;
}
};
EPUBJS.Reader.prototype.removeSavedSettings = function() {
if(!localStorage) {
return false;
}
localStorage.removeItem(this.settings.bookKey);
};
EPUBJS.Reader.prototype.applySavedSettings = function() {
var stored;
if(!localStorage) {
return false;
}
try {
stored = JSON.parse(localStorage.getItem(this.settings.bookKey));
} catch (e) { // parsing error of localStorage
return false;
}
if(stored) {
// Merge styles
if(stored.styles) {
this.settings.styles = EPUBJS.core.defaults(this.settings.styles || {}, stored.styles);
}
// Merge the rest
this.settings = EPUBJS.core.defaults(this.settings, stored);
return true;
} else {
return false;
}
};
EPUBJS.Reader.prototype.saveSettings = function(){
if(this.book) {
this.settings.previousLocationCfi = this.rendition.currentLocation().start.cfi;
}
if(!localStorage) {
return false;
}
localStorage.setItem(this.settings.bookKey, JSON.stringify(this.settings));
};
EPUBJS.Reader.prototype.unload = function(){
if(this.settings.restore && localStorage) {
this.saveSettings();
}
};
EPUBJS.Reader.prototype.hashChanged = function(){
var hash = window.location.hash.slice(1);
this.rendition.display(hash);
};
EPUBJS.Reader.prototype.selectedRange = function(cfiRange){
var cfiFragment = "#"+cfiRange;
// Update the History Location
if(this.settings.history &&
window.location.hash != cfiFragment) {
// Add CFI fragment to the history
history.pushState({}, '', cfiFragment);
this.currentLocationCfi = cfiRange;
}
};
//-- Enable binding events to reader
RSVP.EventTarget.mixin(EPUBJS.Reader.prototype);
EPUBJS.reader.BookmarksController = function() {
var reader = this;
var book = this.book;
var rendition = this.rendition;
var $bookmarks = $("#bookmarksView"),
$list = $bookmarks.find("#bookmarks");
var docfrag = document.createDocumentFragment();
var show = function() {
$bookmarks.show();
};
var hide = function() {
$bookmarks.hide();
};
var counter = 0;
var createBookmarkItem = function(cfi) {
var listitem = document.createElement("li"),
link = document.createElement("a");
listitem.id = "bookmark-"+counter;
listitem.classList.add('list_item');
var spineItem = book.spine.get(cfi);
var tocItem;
if (spineItem.index in book.navigation.toc) {
tocItem = book.navigation.toc[spineItem.index];
link.textContent = tocItem.label;
} else {
link.textContent = cfi;
}
link.href = cfi;
link.classList.add('bookmark_link');
link.addEventListener("click", function(event){
var cfi = this.getAttribute('href');
rendition.display(cfi);
event.preventDefault();
}, false);
listitem.appendChild(link);
counter++;
return listitem;
};
this.settings.bookmarks.forEach(function(cfi) {
var bookmark = createBookmarkItem(cfi);
docfrag.appendChild(bookmark);
});
$list.append(docfrag);
this.on("reader:bookmarked", function(cfi) {
var item = createBookmarkItem(cfi);
$list.append(item);
});
this.on("reader:unbookmarked", function(index) {
var $item = $("#bookmark-"+index);
$item.remove();
});
return {
"show" : show,
"hide" : hide
};
};
EPUBJS.reader.ControlsController = function(book) {
var reader = this;
var rendition = this.rendition;
var $store = $("#store"),
$fullscreen = $("#fullscreen"),
$fullscreenicon = $("#fullscreenicon"),
$cancelfullscreenicon = $("#cancelfullscreenicon"),
$slider = $("#slider"),
$main = $("#main"),
$sidebar = $("#sidebar"),
$settings = $("#setting"),
$bookmark = $("#bookmark");
/*
var goOnline = function() {
reader.offline = false;
// $store.attr("src", $icon.data("save"));
};
var goOffline = function() {
reader.offline = true;
// $store.attr("src", $icon.data("saved"));
};
var fullscreen = false;
book.on("book:online", goOnline);
book.on("book:offline", goOffline);
*/
$slider.on("click", function () {
if(reader.sidebarOpen) {
reader.SidebarController.hide();
$slider.addClass("icon-menu");
$slider.removeClass("icon-right");
} else {
reader.SidebarController.show();
$slider.addClass("icon-right");
$slider.removeClass("icon-menu");
}
});
if(typeof screenfull !== 'undefined') {
$fullscreen.on("click", function() {
screenfull.toggle($('#container')[0]);
});
if(screenfull.raw) {
document.addEventListener(screenfull.raw.fullscreenchange, function() {
fullscreen = screenfull.isFullscreen;
if(fullscreen) {
$fullscreen
.addClass("icon-resize-small")
.removeClass("icon-resize-full");
} else {
$fullscreen
.addClass("icon-resize-full")
.removeClass("icon-resize-small");
}
});
}
}
$settings.on("click", function() {
reader.SettingsController.show();
});
$bookmark.on("click", function() {
var cfi = reader.rendition.currentLocation().start.cfi;
var bookmarked = reader.isBookmarked(cfi);
if(bookmarked === -1) { //-- Add bookmark
reader.addBookmark(cfi);
$bookmark
.addClass("icon-bookmark")
.removeClass("icon-bookmark-empty");
} else { //-- Remove Bookmark
reader.removeBookmark(cfi);
$bookmark
.removeClass("icon-bookmark")
.addClass("icon-bookmark-empty");
}
});
rendition.on('relocated', function(location){
var cfi = location.start.cfi;
var cfiFragment = "#" + cfi;
//-- Check if bookmarked
var bookmarked = reader.isBookmarked(cfi);
if(bookmarked === -1) { //-- Not bookmarked
$bookmark
.removeClass("icon-bookmark")
.addClass("icon-bookmark-empty");
} else { //-- Bookmarked
$bookmark
.addClass("icon-bookmark")
.removeClass("icon-bookmark-empty");
}
reader.currentLocationCfi = cfi;
// Update the History Location
if(reader.settings.history &&
window.location.hash != cfiFragment) {
// Add CFI fragment to the history
history.pushState({}, '', cfiFragment);
}
});
return {
};
};
EPUBJS.reader.MetaController = function(meta) {
var title = meta.title,
author = meta.creator;
var $title = $("#book-title"),
$author = $("#chapter-title"),
$dash = $("#title-seperator");
document.title = title+" – "+author;
$title.html(title);
$author.html(author);
$dash.show();
};
EPUBJS.reader.NotesController = function() {
var book = this.book;
var rendition = this.rendition;
var reader = this;
var $notesView = $("#notesView");
var $notes = $("#notes");
var $text = $("#note-text");
var $anchor = $("#note-anchor");
var annotations = reader.settings.annotations;
var renderer = book.renderer;
var popups = [];
var epubcfi = new ePub.CFI();
var show = function() {
$notesView.show();
};
var hide = function() {
$notesView.hide();
}
var insertAtPoint = function(e) {
var range;
var textNode;
var offset;
var doc = book.renderer.doc;
var cfi;
var annotation;
// standard
if (doc.caretPositionFromPoint) {
range = doc.caretPositionFromPoint(e.clientX, e.clientY);
textNode = range.offsetNode;
offset = range.offset;
// WebKit
} else if (doc.caretRangeFromPoint) {
range = doc.caretRangeFromPoint(e.clientX, e.clientY);
textNode = range.startContainer;
offset = range.startOffset;
}
if (textNode.nodeType !== 3) {
for (var i=0; i < textNode.childNodes.length; i++) {
if (textNode.childNodes[i].nodeType == 3) {
textNode = textNode.childNodes[i];
break;
}
}
}
// Find the end of the sentance
offset = textNode.textContent.indexOf(".", offset);
if(offset === -1){
offset = textNode.length; // Last item
} else {
offset += 1; // After the period
}
cfi = epubcfi.generateCfiFromTextNode(textNode, offset, book.renderer.currentChapter.cfiBase);
annotation = {
annotatedAt: new Date(),
anchor: cfi,
body: $text.val()
}
// add to list
reader.addNote(annotation);
// attach
addAnnotation(annotation);
placeMarker(annotation);
// clear
$text.val('');
$anchor.text("Attach");
$text.prop("disabled", false);
rendition.off("click", insertAtPoint);
};
var addAnnotation = function(annotation){
var note = document.createElement("li");
var link = document.createElement("a");
note.innerHTML = annotation.body;
// note.setAttribute("ref", annotation.anchor);
link.innerHTML = " context »";
link.href = "#"+annotation.anchor;
link.onclick = function(){
rendition.display(annotation.anchor);
return false;
};
note.appendChild(link);
$notes.append(note);
};
var placeMarker = function(annotation){
var doc = book.renderer.doc;
var marker = document.createElement("span");
var mark = document.createElement("a");
marker.classList.add("footnotesuperscript", "reader_generated");
marker.style.verticalAlign = "super";
marker.style.fontSize = ".75em";
// marker.style.position = "relative";
marker.style.lineHeight = "1em";
// mark.style.display = "inline-block";
mark.style.padding = "2px";
mark.style.backgroundColor = "#fffa96";
mark.style.borderRadius = "5px";
mark.style.cursor = "pointer";
marker.id = "note-"+EPUBJS.core.uuid();
mark.innerHTML = annotations.indexOf(annotation) + 1 + "[Reader]";
marker.appendChild(mark);
epubcfi.addMarker(annotation.anchor, doc, marker);
markerEvents(marker, annotation.body);
}
var markerEvents = function(item, txt){
var id = item.id;
var showPop = function(){
var poppos,
iheight = renderer.height,
iwidth = renderer.width,
tip,
pop,
maxHeight = 225,
itemRect,
left,
top,
pos;
//-- create a popup with endnote inside of it
if(!popups[id]) {
popups[id] = document.createElement("div");
popups[id].setAttribute("class", "popup");
pop_content = document.createElement("div");
popups[id].appendChild(pop_content);
pop_content.innerHTML = txt;
pop_content.setAttribute("class", "pop_content");
renderer.render.document.body.appendChild(popups[id]);
//-- TODO: will these leak memory? - Fred
popups[id].addEventListener("mouseover", onPop, false);
popups[id].addEventListener("mouseout", offPop, false);
//-- Add hide on page change
rendition.on("locationChanged", hidePop, this);
rendition.on("locationChanged", offPop, this);
// chapter.book.on("renderer:chapterDestroy", hidePop, this);
}
pop = popups[id];
//-- get location of item
itemRect = item.getBoundingClientRect();
left = itemRect.left;
top = itemRect.top;
//-- show the popup
pop.classList.add("show");
//-- locations of popup
popRect = pop.getBoundingClientRect();
//-- position the popup
pop.style.left = left - popRect.width / 2 + "px";
pop.style.top = top + "px";
//-- Adjust max height
if(maxHeight > iheight / 2.5) {
maxHeight = iheight / 2.5;
pop_content.style.maxHeight = maxHeight + "px";
}
//-- switch above / below
if(popRect.height + top >= iheight - 25) {
pop.style.top = top - popRect.height + "px";
pop.classList.add("above");
}else{
pop.classList.remove("above");
}
//-- switch left
if(left - popRect.width <= 0) {
pop.style.left = left + "px";
pop.classList.add("left");
}else{
pop.classList.remove("left");
}
//-- switch right
if(left + popRect.width / 2 >= iwidth) {
//-- TEMP MOVE: 300
pop.style.left = left - 300 + "px";
popRect = pop.getBoundingClientRect();
pop.style.left = left - popRect.width + "px";
//-- switch above / below again
if(popRect.height + top >= iheight - 25) {
pop.style.top = top - popRect.height + "px";
pop.classList.add("above");
}else{
pop.classList.remove("above");
}
pop.classList.add("right");
}else{
pop.classList.remove("right");
}
}
var onPop = function(){
popups[id].classList.add("on");
}
var offPop = function(){
popups[id].classList.remove("on");
}
var hidePop = function(){
setTimeout(function(){
popups[id].classList.remove("show");
}, 100);
}
var openSidebar = function(){
reader.ReaderController.slideOut();
show();
};
item.addEventListener("mouseover", showPop, false);
item.addEventListener("mouseout", hidePop, false);
item.addEventListener("click", openSidebar, false);
}
$anchor.on("click", function(e){
$anchor.text("Cancel");
$text.prop("disabled", "true");
// listen for selection
rendition.on("click", insertAtPoint);
});
annotations.forEach(function(note) {
addAnnotation(note);
});
/*
renderer.registerHook("beforeChapterDisplay", function(callback, renderer){
var chapter = renderer.currentChapter;
annotations.forEach(function(note) {
var cfi = epubcfi.parse(note.anchor);
if(cfi.spinePos === chapter.spinePos) {
try {
placeMarker(note);
} catch(e) {
console.log("anchoring failed", note.anchor);
}
}
});
callback();
}, true);
*/
return {
"show" : show,
"hide" : hide
};
};
EPUBJS.reader.ReaderController = function(book) {
var $main = $("#main"),
$divider = $("#divider"),
$loader = $("#loader"),
$next = $("#next"),
$prev = $("#prev");
var reader = this;
var book = this.book;
var rendition = this.rendition;
var slideIn = function() {
var currentPosition = rendition.currentLocation().start.cfi;
if (reader.settings.sidebarReflow){
$main.removeClass('single');
$main.one("transitionend", function(){
rendition.resize();
});
} else {
$main.removeClass("closed");
}
};
var slideOut = function() {
var location = rendition.currentLocation();
if (!location) {
return;
}
var currentPosition = location.start.cfi;
if (reader.settings.sidebarReflow){
$main.addClass('single');
$main.one("transitionend", function(){
rendition.resize();
});
} else {
$main.addClass("closed");
}
};
var showLoader = function() {
$loader.show();
hideDivider();
};
var hideLoader = function() {
$loader.hide();
//-- If the book is using spreads, show the divider
// if(book.settings.spreads) {
// showDivider();
// }
};
var showDivider = function() {
$divider.addClass("show");
};
var hideDivider = function() {
$divider.removeClass("show");
};
var keylock = false;
var arrowKeys = function(e) {
if(e.keyCode == 37) {
if(book.package.metadata.direction === "rtl") {
rendition.next();
} else {
rendition.prev();
}
$prev.addClass("active");
keylock = true;
setTimeout(function(){
keylock = false;
$prev.removeClass("active");
}, 100);
e.preventDefault();
}
if(e.keyCode == 39) {
if(book.package.metadata.direction === "rtl") {
rendition.prev();
} else {
rendition.next();
}
$next.addClass("active");
keylock = true;
setTimeout(function(){
keylock = false;
$next.removeClass("active");
}, 100);
e.preventDefault();
}
}
document.addEventListener('keydown', arrowKeys, false);
$next.on("click", function(e){
if(book.package.metadata.direction === "rtl") {
rendition.prev();
} else {
rendition.next();
}
e.preventDefault();
});
$prev.on("click", function(e){
if(book.package.metadata.direction === "rtl") {
rendition.next();
} else {
rendition.prev();
}
e.preventDefault();
});
rendition.on("layout", function(props){
if(props.spread === true) {
showDivider();
} else {
hideDivider();
}
});
rendition.on('relocated', function(location){
if (location.atStart) {
$prev.addClass("disabled");
}
if (location.atEnd) {
$next.addClass("disabled");
}
});
return {
"slideOut" : slideOut,
"slideIn" : slideIn,
"showLoader" : showLoader,
"hideLoader" : hideLoader,
"showDivider" : showDivider,
"hideDivider" : hideDivider,
"arrowKeys" : arrowKeys
};
};
EPUBJS.reader.SettingsController = function() {
var book = this.book;
var reader = this;
var $settings = $("#settings-modal"),
$overlay = $(".overlay");
var show = function() {
$settings.addClass("md-show");
};
var hide = function() {
$settings.removeClass("md-show");
};
var $sidebarReflowSetting = $('#sidebarReflow');
$sidebarReflowSetting.on('click', function() {
reader.settings.sidebarReflow = !reader.settings.sidebarReflow;
});
$settings.find(".closer").on("click", function() {
hide();
});
$overlay.on("click", function() {
hide();
});
return {
"show" : show,
"hide" : hide
};
};
EPUBJS.reader.SidebarController = function(book) {
var reader = this;
var $sidebar = $("#sidebar"),
$panels = $("#panels");
var activePanel = "Toc";
var changePanelTo = function(viewName) {
var controllerName = viewName + "Controller";
if(activePanel == viewName || typeof reader[controllerName] === 'undefined' ) return;
reader[activePanel+ "Controller"].hide();
reader[controllerName].show();
activePanel = viewName;
$panels.find('.active').removeClass("active");
$panels.find("#show-" + viewName ).addClass("active");
};
var getActivePanel = function() {
return activePanel;
};
var show = function() {
reader.sidebarOpen = true;
reader.ReaderController.slideOut();
$sidebar.addClass("open");
}
var hide = function() {
reader.sidebarOpen = false;
reader.ReaderController.slideIn();
$sidebar.removeClass("open");
}
$panels.find(".show_view").on("click", function(event) {
var view = $(this).data("view");
changePanelTo(view);
event.preventDefault();
});
return {
'show' : show,
'hide' : hide,
'getActivePanel' : getActivePanel,
'changePanelTo' : changePanelTo
};
};
EPUBJS.reader.TocController = function(toc) {
var book = this.book;
var rendition = this.rendition;
var $list = $("#tocView"),
docfrag = document.createDocumentFragment();
var currentChapter = false;
var generateTocItems = function(toc, level) {
var container = document.createElement("ul");
if(!level) level = 1;
toc.forEach(function(chapter) {
var listitem = document.createElement("li"),
link = document.createElement("a");
toggle = document.createElement("a");
var subitems;
listitem.id = "toc-"+chapter.id;
listitem.classList.add('list_item');
link.textContent = chapter.label;
link.href = chapter.href;
link.classList.add('toc_link');
listitem.appendChild(link);
if(chapter.subitems && chapter.subitems.length > 0) {
level++;
subitems = generateTocItems(chapter.subitems, level);
toggle.classList.add('toc_toggle');
listitem.insertBefore(toggle, link);
listitem.appendChild(subitems);
}
container.appendChild(listitem);
});
return container;
};
var onShow = function() {
$list.show();
};
var onHide = function() {
$list.hide();
};
var chapterChange = function(e) {
var id = e.id,
$item = $list.find("#toc-"+id),
$current = $list.find(".currentChapter"),
$open = $list.find('.openChapter');
if($item.length){
if($item != $current && $item.has(currentChapter).length > 0) {
$current.removeClass("currentChapter");
}
$item.addClass("currentChapter");
// $open.removeClass("openChapter");
$item.parents('li').addClass("openChapter");
}
};
rendition.on('renderered', chapterChange);
var tocitems = generateTocItems(toc);
docfrag.appendChild(tocitems);
$list.append(docfrag);
$list.find(".toc_link").on("click", function(event){
var url = this.getAttribute('href');
event.preventDefault();
//-- Provide the Book with the url to show
// The Url must be found in the books manifest
rendition.display(url);
$list.find(".currentChapter")
.addClass("openChapter")
.removeClass("currentChapter");
$(this).parent('li').addClass("currentChapter");
});
$list.find(".toc_toggle").on("click", function(event){
var $el = $(this).parent('li'),
open = $el.hasClass("openChapter");
event.preventDefault();
if(open){
$el.removeClass("openChapter");
} else {
$el.addClass("openChapter");
}
});
return {
"show" : onShow,
"hide" : onHide
};
};
//# sourceMappingURL=reader.js.map | yuehui-miji | /yuehui-miji-2022.10.11.0.tar.gz/yuehui-miji-2022.10.11.0/YuehuiMiji/js/reader.js | reader.js |
window.hypothesisConfig = function() {
var Annotator = window.Annotator;
var $main = $("#main");
function EpubAnnotationSidebar(elem, options) {
options = {
server: true,
origin: true,
showHighlights: true,
Toolbar: {container: '#annotation-controls'}
}
Annotator.Host.call(this, elem, options);
}
EpubAnnotationSidebar.prototype = Object.create(Annotator.Host.prototype);
EpubAnnotationSidebar.prototype.show = function() {
this.frame.css({
'margin-left': (-1 * this.frame.width()) + "px"
});
this.frame.removeClass('annotator-collapsed');
if (!$main.hasClass('single')) {
$main.addClass("single");
this.toolbar.find('[name=sidebar-toggle]').removeClass('h-icon-chevron-left').addClass('h-icon-chevron-right');
this.setVisibleHighlights(true);
}
};
EpubAnnotationSidebar.prototype.hide = function() {
this.frame.css({
'margin-left': ''
});
this.frame.addClass('annotator-collapsed');
if ($main.hasClass('single')) {
$main.removeClass("single");
this.toolbar.find('[name=sidebar-toggle]').removeClass('h-icon-chevron-right').addClass('h-icon-chevron-left');
this.setVisibleHighlights(false);
}
};
return {
constructor: EpubAnnotationSidebar,
}
};
// This is the Epub.js plugin. Annotations are updated on location change.
EPUBJS.reader.plugins.HypothesisController = function (Book) {
var reader = this;
var $main = $("#main");
var updateAnnotations = function () {
var annotator = Book.renderer.render.window.annotator;
if (annotator && annotator.constructor.$) {
var annotations = getVisibleAnnotations(annotator.constructor.$);
annotator.showAnnotations(annotations)
}
};
var getVisibleAnnotations = function ($) {
var width = Book.renderer.render.iframe.clientWidth;
return $('.annotator-hl').map(function() {
var $this = $(this),
left = this.getBoundingClientRect().left;
if (left >= 0 && left <= width) {
return $this.data('annotation');
}
}).get();
};
Book.on("renderer:locationChanged", updateAnnotations);
return {}
}; | yuehui-miji | /yuehui-miji-2022.10.11.0.tar.gz/yuehui-miji-2022.10.11.0/YuehuiMiji/js/plugins/hypothesis.js | hypothesis.js |
EPUBJS.reader.search = {};
// Search Server -- https://github.com/futurepress/epubjs-search
EPUBJS.reader.search.SERVER = "https://pacific-cliffs-3579.herokuapp.com";
EPUBJS.reader.search.request = function(q, callback) {
var fetch = $.ajax({
dataType: "json",
url: EPUBJS.reader.search.SERVER + "/search?q=" + encodeURIComponent(q)
});
fetch.fail(function(err) {
console.error(err);
});
fetch.done(function(results) {
callback(results);
});
};
EPUBJS.reader.plugins.SearchController = function(Book) {
var reader = this;
var $searchBox = $("#searchBox"),
$searchResults = $("#searchResults"),
$searchView = $("#searchView"),
iframeDoc;
var searchShown = false;
var onShow = function() {
query();
searchShown = true;
$searchView.addClass("shown");
};
var onHide = function() {
searchShown = false;
$searchView.removeClass("shown");
};
var query = function() {
var q = $searchBox.val();
if(q == '') {
return;
}
$searchResults.empty();
$searchResults.append("<li><p>Searching...</p></li>");
EPUBJS.reader.search.request(q, function(data) {
var results = data.results;
$searchResults.empty();
if(iframeDoc) {
$(iframeDoc).find('body').unhighlight();
}
if(results.length == 0) {
$searchResults.append("<li><p>No Results Found</p></li>");
return;
}
iframeDoc = $("#viewer iframe")[0].contentDocument;
$(iframeDoc).find('body').highlight(q, { element: 'span' });
results.forEach(function(result) {
var $li = $("<li></li>");
var $item = $("<a href='"+result.href+"' data-cfi='"+result.cfi+"'><span>"+result.title+"</span><p>"+result.highlight+"</p></a>");
$item.on("click", function(e) {
var $this = $(this),
cfi = $this.data("cfi");
e.preventDefault();
Book.gotoCfi(cfi+"/1:0");
Book.on("renderer:chapterDisplayed", function() {
iframeDoc = $("#viewer iframe")[0].contentDocument;
$(iframeDoc).find('body').highlight(q, { element: 'span' });
})
});
$li.append($item);
$searchResults.append($li);
});
});
};
$searchBox.on("search", function(e) {
var q = $searchBox.val();
//-- SearchBox is empty or cleared
if(q == '') {
$searchResults.empty();
if(reader.SidebarController.getActivePanel() == "Search") {
reader.SidebarController.changePanelTo("Toc");
}
$(iframeDoc).find('body').unhighlight();
iframeDoc = false;
return;
}
reader.SidebarController.changePanelTo("Search");
e.preventDefault();
});
return {
"show" : onShow,
"hide" : onHide
};
}; | yuehui-miji | /yuehui-miji-2022.10.11.0.tar.gz/yuehui-miji-2022.10.11.0/YuehuiMiji/js/plugins/search.js | search.js |
!function(a){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=a();else if("function"==typeof define&&define.amd)define([],a);else{var b;b="undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this,b.JSZip=a()}}(function(){return function a(b,c,d){function e(g,h){if(!c[g]){if(!b[g]){var i="function"==typeof require&&require;if(!h&&i)return i(g,!0);if(f)return f(g,!0);var j=new Error("Cannot find module '"+g+"'");throw j.code="MODULE_NOT_FOUND",j}var k=c[g]={exports:{}};b[g][0].call(k.exports,function(a){var c=b[g][1][a];return e(c?c:a)},k,k.exports,a,b,c,d)}return c[g].exports}for(var f="function"==typeof require&&require,g=0;g<d.length;g++)e(d[g]);return e}({1:[function(a,b,c){"use strict";var d=a("./utils"),e=a("./support"),f="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";c.encode=function(a){for(var b,c,e,g,h,i,j,k=[],l=0,m=a.length,n=m,o="string"!==d.getTypeOf(a);l<a.length;)n=m-l,o?(b=a[l++],c=l<m?a[l++]:0,e=l<m?a[l++]:0):(b=a.charCodeAt(l++),c=l<m?a.charCodeAt(l++):0,e=l<m?a.charCodeAt(l++):0),g=b>>2,h=(3&b)<<4|c>>4,i=n>1?(15&c)<<2|e>>6:64,j=n>2?63&e:64,k.push(f.charAt(g)+f.charAt(h)+f.charAt(i)+f.charAt(j));return k.join("")},c.decode=function(a){var b,c,d,g,h,i,j,k=0,l=0,m="data:";if(a.substr(0,m.length)===m)throw new Error("Invalid base64 input, it looks like a data url.");a=a.replace(/[^A-Za-z0-9\+\/\=]/g,"");var n=3*a.length/4;if(a.charAt(a.length-1)===f.charAt(64)&&n--,a.charAt(a.length-2)===f.charAt(64)&&n--,n%1!==0)throw new Error("Invalid base64 input, bad content length.");var o;for(o=e.uint8array?new Uint8Array(0|n):new Array(0|n);k<a.length;)g=f.indexOf(a.charAt(k++)),h=f.indexOf(a.charAt(k++)),i=f.indexOf(a.charAt(k++)),j=f.indexOf(a.charAt(k++)),b=g<<2|h>>4,c=(15&h)<<4|i>>2,d=(3&i)<<6|j,o[l++]=b,64!==i&&(o[l++]=c),64!==j&&(o[l++]=d);return o}},{"./support":30,"./utils":32}],2:[function(a,b,c){"use strict";function d(a,b,c,d,e){this.compressedSize=a,this.uncompressedSize=b,this.crc32=c,this.compression=d,this.compressedContent=e}var e=a("./external"),f=a("./stream/DataWorker"),g=a("./stream/DataLengthProbe"),h=a("./stream/Crc32Probe"),g=a("./stream/DataLengthProbe");d.prototype={getContentWorker:function(){var a=new f(e.Promise.resolve(this.compressedContent)).pipe(this.compression.uncompressWorker()).pipe(new g("data_length")),b=this;return a.on("end",function(){if(this.streamInfo.data_length!==b.uncompressedSize)throw new Error("Bug : uncompressed data size mismatch")}),a},getCompressedWorker:function(){return new f(e.Promise.resolve(this.compressedContent)).withStreamInfo("compressedSize",this.compressedSize).withStreamInfo("uncompressedSize",this.uncompressedSize).withStreamInfo("crc32",this.crc32).withStreamInfo("compression",this.compression)}},d.createWorkerFrom=function(a,b,c){return a.pipe(new h).pipe(new g("uncompressedSize")).pipe(b.compressWorker(c)).pipe(new g("compressedSize")).withStreamInfo("compression",b)},b.exports=d},{"./external":6,"./stream/Crc32Probe":25,"./stream/DataLengthProbe":26,"./stream/DataWorker":27}],3:[function(a,b,c){"use strict";var d=a("./stream/GenericWorker");c.STORE={magic:"\0\0",compressWorker:function(a){return new d("STORE compression")},uncompressWorker:function(){return new d("STORE decompression")}},c.DEFLATE=a("./flate")},{"./flate":7,"./stream/GenericWorker":28}],4:[function(a,b,c){"use strict";function d(){for(var a,b=[],c=0;c<256;c++){a=c;for(var d=0;d<8;d++)a=1&a?3988292384^a>>>1:a>>>1;b[c]=a}return b}function e(a,b,c,d){var e=h,f=d+c;a^=-1;for(var g=d;g<f;g++)a=a>>>8^e[255&(a^b[g])];return a^-1}function f(a,b,c,d){var e=h,f=d+c;a^=-1;for(var g=d;g<f;g++)a=a>>>8^e[255&(a^b.charCodeAt(g))];return a^-1}var g=a("./utils"),h=d();b.exports=function(a,b){if("undefined"==typeof a||!a.length)return 0;var c="string"!==g.getTypeOf(a);return c?e(0|b,a,a.length,0):f(0|b,a,a.length,0)}},{"./utils":32}],5:[function(a,b,c){"use strict";c.base64=!1,c.binary=!1,c.dir=!1,c.createFolders=!0,c.date=null,c.compression=null,c.compressionOptions=null,c.comment=null,c.unixPermissions=null,c.dosPermissions=null},{}],6:[function(a,b,c){"use strict";var d=null;d="undefined"!=typeof Promise?Promise:a("lie"),b.exports={Promise:d}},{lie:58}],7:[function(a,b,c){"use strict";function d(a,b){h.call(this,"FlateWorker/"+a),this._pako=null,this._pakoAction=a,this._pakoOptions=b,this.meta={}}var e="undefined"!=typeof Uint8Array&&"undefined"!=typeof Uint16Array&&"undefined"!=typeof Uint32Array,f=a("pako"),g=a("./utils"),h=a("./stream/GenericWorker"),i=e?"uint8array":"array";c.magic="\b\0",g.inherits(d,h),d.prototype.processChunk=function(a){this.meta=a.meta,null===this._pako&&this._createPako(),this._pako.push(g.transformTo(i,a.data),!1)},d.prototype.flush=function(){h.prototype.flush.call(this),null===this._pako&&this._createPako(),this._pako.push([],!0)},d.prototype.cleanUp=function(){h.prototype.cleanUp.call(this),this._pako=null},d.prototype._createPako=function(){this._pako=new f[this._pakoAction]({raw:!0,level:this._pakoOptions.level||-1});var a=this;this._pako.onData=function(b){a.push({data:b,meta:a.meta})}},c.compressWorker=function(a){return new d("Deflate",a)},c.uncompressWorker=function(){return new d("Inflate",{})}},{"./stream/GenericWorker":28,"./utils":32,pako:59}],8:[function(a,b,c){"use strict";function d(a,b,c,d){f.call(this,"ZipFileWorker"),this.bytesWritten=0,this.zipComment=b,this.zipPlatform=c,this.encodeFileName=d,this.streamFiles=a,this.accumulate=!1,this.contentBuffer=[],this.dirRecords=[],this.currentSourceOffset=0,this.entriesCount=0,this.currentFile=null,this._sources=[]}var e=a("../utils"),f=a("../stream/GenericWorker"),g=a("../utf8"),h=a("../crc32"),i=a("../signature"),j=function(a,b){var c,d="";for(c=0;c<b;c++)d+=String.fromCharCode(255&a),a>>>=8;return d},k=function(a,b){var c=a;return a||(c=b?16893:33204),(65535&c)<<16},l=function(a,b){return 63&(a||0)},m=function(a,b,c,d,f,m){var n,o,p=a.file,q=a.compression,r=m!==g.utf8encode,s=e.transformTo("string",m(p.name)),t=e.transformTo("string",g.utf8encode(p.name)),u=p.comment,v=e.transformTo("string",m(u)),w=e.transformTo("string",g.utf8encode(u)),x=t.length!==p.name.length,y=w.length!==u.length,z="",A="",B="",C=p.dir,D=p.date,E={crc32:0,compressedSize:0,uncompressedSize:0};b&&!c||(E.crc32=a.crc32,E.compressedSize=a.compressedSize,E.uncompressedSize=a.uncompressedSize);var F=0;b&&(F|=8),r||!x&&!y||(F|=2048);var G=0,H=0;C&&(G|=16),"UNIX"===f?(H=798,G|=k(p.unixPermissions,C)):(H=20,G|=l(p.dosPermissions,C)),n=D.getUTCHours(),n<<=6,n|=D.getUTCMinutes(),n<<=5,n|=D.getUTCSeconds()/2,o=D.getUTCFullYear()-1980,o<<=4,o|=D.getUTCMonth()+1,o<<=5,o|=D.getUTCDate(),x&&(A=j(1,1)+j(h(s),4)+t,z+="up"+j(A.length,2)+A),y&&(B=j(1,1)+j(h(v),4)+w,z+="uc"+j(B.length,2)+B);var I="";I+="\n\0",I+=j(F,2),I+=q.magic,I+=j(n,2),I+=j(o,2),I+=j(E.crc32,4),I+=j(E.compressedSize,4),I+=j(E.uncompressedSize,4),I+=j(s.length,2),I+=j(z.length,2);var J=i.LOCAL_FILE_HEADER+I+s+z,K=i.CENTRAL_FILE_HEADER+j(H,2)+I+j(v.length,2)+"\0\0\0\0"+j(G,4)+j(d,4)+s+z+v;return{fileRecord:J,dirRecord:K}},n=function(a,b,c,d,f){var g="",h=e.transformTo("string",f(d));return g=i.CENTRAL_DIRECTORY_END+"\0\0\0\0"+j(a,2)+j(a,2)+j(b,4)+j(c,4)+j(h.length,2)+h},o=function(a){var b="";return b=i.DATA_DESCRIPTOR+j(a.crc32,4)+j(a.compressedSize,4)+j(a.uncompressedSize,4)};e.inherits(d,f),d.prototype.push=function(a){var b=a.meta.percent||0,c=this.entriesCount,d=this._sources.length;this.accumulate?this.contentBuffer.push(a):(this.bytesWritten+=a.data.length,f.prototype.push.call(this,{data:a.data,meta:{currentFile:this.currentFile,percent:c?(b+100*(c-d-1))/c:100}}))},d.prototype.openedSource=function(a){this.currentSourceOffset=this.bytesWritten,this.currentFile=a.file.name;var b=this.streamFiles&&!a.file.dir;if(b){var c=m(a,b,!1,this.currentSourceOffset,this.zipPlatform,this.encodeFileName);this.push({data:c.fileRecord,meta:{percent:0}})}else this.accumulate=!0},d.prototype.closedSource=function(a){this.accumulate=!1;var b=this.streamFiles&&!a.file.dir,c=m(a,b,!0,this.currentSourceOffset,this.zipPlatform,this.encodeFileName);if(this.dirRecords.push(c.dirRecord),b)this.push({data:o(a),meta:{percent:100}});else for(this.push({data:c.fileRecord,meta:{percent:0}});this.contentBuffer.length;)this.push(this.contentBuffer.shift());this.currentFile=null},d.prototype.flush=function(){for(var a=this.bytesWritten,b=0;b<this.dirRecords.length;b++)this.push({data:this.dirRecords[b],meta:{percent:100}});var c=this.bytesWritten-a,d=n(this.dirRecords.length,c,a,this.zipComment,this.encodeFileName);this.push({data:d,meta:{percent:100}})},d.prototype.prepareNextSource=function(){this.previous=this._sources.shift(),this.openedSource(this.previous.streamInfo),this.isPaused?this.previous.pause():this.previous.resume()},d.prototype.registerPrevious=function(a){this._sources.push(a);var b=this;return a.on("data",function(a){b.processChunk(a)}),a.on("end",function(){b.closedSource(b.previous.streamInfo),b._sources.length?b.prepareNextSource():b.end()}),a.on("error",function(a){b.error(a)}),this},d.prototype.resume=function(){return!!f.prototype.resume.call(this)&&(!this.previous&&this._sources.length?(this.prepareNextSource(),!0):this.previous||this._sources.length||this.generatedError?void 0:(this.end(),!0))},d.prototype.error=function(a){var b=this._sources;if(!f.prototype.error.call(this,a))return!1;for(var c=0;c<b.length;c++)try{b[c].error(a)}catch(a){}return!0},d.prototype.lock=function(){f.prototype.lock.call(this);for(var a=this._sources,b=0;b<a.length;b++)a[b].lock()},b.exports=d},{"../crc32":4,"../signature":23,"../stream/GenericWorker":28,"../utf8":31,"../utils":32}],9:[function(a,b,c){"use strict";var d=a("../compressions"),e=a("./ZipFileWorker"),f=function(a,b){var c=a||b,e=d[c];if(!e)throw new Error(c+" is not a valid compression method !");return e};c.generateWorker=function(a,b,c){var d=new e(b.streamFiles,c,b.platform,b.encodeFileName),g=0;try{a.forEach(function(a,c){g++;var e=f(c.options.compression,b.compression),h=c.options.compressionOptions||b.compressionOptions||{},i=c.dir,j=c.date;c._compressWorker(e,h).withStreamInfo("file",{name:a,dir:i,date:j,comment:c.comment||"",unixPermissions:c.unixPermissions,dosPermissions:c.dosPermissions}).pipe(d)}),d.entriesCount=g}catch(h){d.error(h)}return d}},{"../compressions":3,"./ZipFileWorker":8}],10:[function(a,b,c){"use strict";function d(){if(!(this instanceof d))return new d;if(arguments.length)throw new Error("The constructor with parameters has been removed in JSZip 3.0, please check the upgrade guide.");this.files={},this.comment=null,this.root="",this.clone=function(){var a=new d;for(var b in this)"function"!=typeof this[b]&&(a[b]=this[b]);return a}}d.prototype=a("./object"),d.prototype.loadAsync=a("./load"),d.support=a("./support"),d.defaults=a("./defaults"),d.version="3.1.5",d.loadAsync=function(a,b){return(new d).loadAsync(a,b)},d.external=a("./external"),b.exports=d},{"./defaults":5,"./external":6,"./load":11,"./object":15,"./support":30}],11:[function(a,b,c){"use strict";function d(a){return new f.Promise(function(b,c){var d=a.decompressed.getContentWorker().pipe(new i);d.on("error",function(a){c(a)}).on("end",function(){d.streamInfo.crc32!==a.decompressed.crc32?c(new Error("Corrupted zip : CRC32 mismatch")):b()}).resume()})}var e=a("./utils"),f=a("./external"),g=a("./utf8"),e=a("./utils"),h=a("./zipEntries"),i=a("./stream/Crc32Probe"),j=a("./nodejsUtils");b.exports=function(a,b){var c=this;return b=e.extend(b||{},{base64:!1,checkCRC32:!1,optimizedBinaryString:!1,createFolders:!1,decodeFileName:g.utf8decode}),j.isNode&&j.isStream(a)?f.Promise.reject(new Error("JSZip can't accept a stream when loading a zip file.")):e.prepareContent("the loaded zip file",a,!0,b.optimizedBinaryString,b.base64).then(function(a){var c=new h(b);return c.load(a),c}).then(function(a){var c=[f.Promise.resolve(a)],e=a.files;if(b.checkCRC32)for(var g=0;g<e.length;g++)c.push(d(e[g]));return f.Promise.all(c)}).then(function(a){for(var d=a.shift(),e=d.files,f=0;f<e.length;f++){var g=e[f];c.file(g.fileNameStr,g.decompressed,{binary:!0,optimizedBinaryString:!0,date:g.date,dir:g.dir,comment:g.fileCommentStr.length?g.fileCommentStr:null,unixPermissions:g.unixPermissions,dosPermissions:g.dosPermissions,createFolders:b.createFolders})}return d.zipComment.length&&(c.comment=d.zipComment),c})}},{"./external":6,"./nodejsUtils":14,"./stream/Crc32Probe":25,"./utf8":31,"./utils":32,"./zipEntries":33}],12:[function(a,b,c){"use strict";function d(a,b){f.call(this,"Nodejs stream input adapter for "+a),this._upstreamEnded=!1,this._bindStream(b)}var e=a("../utils"),f=a("../stream/GenericWorker");e.inherits(d,f),d.prototype._bindStream=function(a){var b=this;this._stream=a,a.pause(),a.on("data",function(a){b.push({data:a,meta:{percent:0}})}).on("error",function(a){b.isPaused?this.generatedError=a:b.error(a)}).on("end",function(){b.isPaused?b._upstreamEnded=!0:b.end()})},d.prototype.pause=function(){return!!f.prototype.pause.call(this)&&(this._stream.pause(),!0)},d.prototype.resume=function(){return!!f.prototype.resume.call(this)&&(this._upstreamEnded?this.end():this._stream.resume(),!0)},b.exports=d},{"../stream/GenericWorker":28,"../utils":32}],13:[function(a,b,c){"use strict";function d(a,b,c){e.call(this,b),this._helper=a;var d=this;a.on("data",function(a,b){d.push(a)||d._helper.pause(),c&&c(b)}).on("error",function(a){d.emit("error",a)}).on("end",function(){d.push(null)})}var e=a("readable-stream").Readable,f=a("../utils");f.inherits(d,e),d.prototype._read=function(){this._helper.resume()},b.exports=d},{"../utils":32,"readable-stream":16}],14:[function(a,b,c){"use strict";b.exports={isNode:"undefined"!=typeof Buffer,newBufferFrom:function(a,b){return new Buffer(a,b)},allocBuffer:function(a){return Buffer.alloc?Buffer.alloc(a):new Buffer(a)},isBuffer:function(a){return Buffer.isBuffer(a)},isStream:function(a){return a&&"function"==typeof a.on&&"function"==typeof a.pause&&"function"==typeof a.resume}}},{}],15:[function(a,b,c){"use strict";function d(a){return"[object RegExp]"===Object.prototype.toString.call(a)}var e=a("./utf8"),f=a("./utils"),g=a("./stream/GenericWorker"),h=a("./stream/StreamHelper"),i=a("./defaults"),j=a("./compressedObject"),k=a("./zipObject"),l=a("./generate"),m=a("./nodejsUtils"),n=a("./nodejs/NodejsStreamInputAdapter"),o=function(a,b,c){var d,e=f.getTypeOf(b),h=f.extend(c||{},i);h.date=h.date||new Date,null!==h.compression&&(h.compression=h.compression.toUpperCase()),"string"==typeof h.unixPermissions&&(h.unixPermissions=parseInt(h.unixPermissions,8)),h.unixPermissions&&16384&h.unixPermissions&&(h.dir=!0),h.dosPermissions&&16&h.dosPermissions&&(h.dir=!0),h.dir&&(a=q(a)),h.createFolders&&(d=p(a))&&r.call(this,d,!0);var l="string"===e&&h.binary===!1&&h.base64===!1;c&&"undefined"!=typeof c.binary||(h.binary=!l);var o=b instanceof j&&0===b.uncompressedSize;(o||h.dir||!b||0===b.length)&&(h.base64=!1,h.binary=!0,b="",h.compression="STORE",e="string");var s=null;s=b instanceof j||b instanceof g?b:m.isNode&&m.isStream(b)?new n(a,b):f.prepareContent(a,b,h.binary,h.optimizedBinaryString,h.base64);var t=new k(a,s,h);this.files[a]=t},p=function(a){"/"===a.slice(-1)&&(a=a.substring(0,a.length-1));var b=a.lastIndexOf("/");return b>0?a.substring(0,b):""},q=function(a){return"/"!==a.slice(-1)&&(a+="/"),a},r=function(a,b){return b="undefined"!=typeof b?b:i.createFolders,a=q(a),this.files[a]||o.call(this,a,null,{dir:!0,createFolders:b}),this.files[a]},s={load:function(){throw new Error("This method has been removed in JSZip 3.0, please check the upgrade guide.")},forEach:function(a){var b,c,d;for(b in this.files)this.files.hasOwnProperty(b)&&(d=this.files[b],c=b.slice(this.root.length,b.length),c&&b.slice(0,this.root.length)===this.root&&a(c,d))},filter:function(a){var b=[];return this.forEach(function(c,d){a(c,d)&&b.push(d)}),b},file:function(a,b,c){if(1===arguments.length){if(d(a)){var e=a;return this.filter(function(a,b){return!b.dir&&e.test(a)})}var f=this.files[this.root+a];return f&&!f.dir?f:null}return a=this.root+a,o.call(this,a,b,c),this},folder:function(a){if(!a)return this;if(d(a))return this.filter(function(b,c){return c.dir&&a.test(b)});var b=this.root+a,c=r.call(this,b),e=this.clone();return e.root=c.name,e},remove:function(a){a=this.root+a;var b=this.files[a];if(b||("/"!==a.slice(-1)&&(a+="/"),b=this.files[a]),b&&!b.dir)delete this.files[a];else for(var c=this.filter(function(b,c){return c.name.slice(0,a.length)===a}),d=0;d<c.length;d++)delete this.files[c[d].name];return this},generate:function(a){throw new Error("This method has been removed in JSZip 3.0, please check the upgrade guide.")},generateInternalStream:function(a){var b,c={};try{if(c=f.extend(a||{},{streamFiles:!1,compression:"STORE",compressionOptions:null,type:"",platform:"DOS",comment:null,mimeType:"application/zip",encodeFileName:e.utf8encode}),c.type=c.type.toLowerCase(),c.compression=c.compression.toUpperCase(),"binarystring"===c.type&&(c.type="string"),!c.type)throw new Error("No output type specified.");f.checkSupport(c.type),"darwin"!==c.platform&&"freebsd"!==c.platform&&"linux"!==c.platform&&"sunos"!==c.platform||(c.platform="UNIX"),"win32"===c.platform&&(c.platform="DOS");var d=c.comment||this.comment||"";b=l.generateWorker(this,c,d)}catch(i){b=new g("error"),b.error(i)}return new h(b,c.type||"string",c.mimeType)},generateAsync:function(a,b){return this.generateInternalStream(a).accumulate(b)},generateNodeStream:function(a,b){return a=a||{},a.type||(a.type="nodebuffer"),this.generateInternalStream(a).toNodejsStream(b)}};b.exports=s},{"./compressedObject":2,"./defaults":5,"./generate":9,"./nodejs/NodejsStreamInputAdapter":12,"./nodejsUtils":14,"./stream/GenericWorker":28,"./stream/StreamHelper":29,"./utf8":31,"./utils":32,"./zipObject":35}],16:[function(a,b,c){b.exports=a("stream")},{stream:void 0}],17:[function(a,b,c){"use strict";function d(a){e.call(this,a);for(var b=0;b<this.data.length;b++)a[b]=255&a[b]}var e=a("./DataReader"),f=a("../utils");f.inherits(d,e),d.prototype.byteAt=function(a){return this.data[this.zero+a]},d.prototype.lastIndexOfSignature=function(a){for(var b=a.charCodeAt(0),c=a.charCodeAt(1),d=a.charCodeAt(2),e=a.charCodeAt(3),f=this.length-4;f>=0;--f)if(this.data[f]===b&&this.data[f+1]===c&&this.data[f+2]===d&&this.data[f+3]===e)return f-this.zero;return-1},d.prototype.readAndCheckSignature=function(a){var b=a.charCodeAt(0),c=a.charCodeAt(1),d=a.charCodeAt(2),e=a.charCodeAt(3),f=this.readData(4);return b===f[0]&&c===f[1]&&d===f[2]&&e===f[3]},d.prototype.readData=function(a){if(this.checkOffset(a),0===a)return[];var b=this.data.slice(this.zero+this.index,this.zero+this.index+a);return this.index+=a,b},b.exports=d},{"../utils":32,"./DataReader":18}],18:[function(a,b,c){"use strict";function d(a){this.data=a,this.length=a.length,this.index=0,this.zero=0}var e=a("../utils");d.prototype={checkOffset:function(a){this.checkIndex(this.index+a)},checkIndex:function(a){if(this.length<this.zero+a||a<0)throw new Error("End of data reached (data length = "+this.length+", asked index = "+a+"). Corrupted zip ?")},setIndex:function(a){this.checkIndex(a),this.index=a},skip:function(a){this.setIndex(this.index+a)},byteAt:function(a){},readInt:function(a){var b,c=0;for(this.checkOffset(a),b=this.index+a-1;b>=this.index;b--)c=(c<<8)+this.byteAt(b);return this.index+=a,c},readString:function(a){return e.transformTo("string",this.readData(a))},readData:function(a){},lastIndexOfSignature:function(a){},readAndCheckSignature:function(a){},readDate:function(){var a=this.readInt(4);return new Date(Date.UTC((a>>25&127)+1980,(a>>21&15)-1,a>>16&31,a>>11&31,a>>5&63,(31&a)<<1))}},b.exports=d},{"../utils":32}],19:[function(a,b,c){"use strict";function d(a){e.call(this,a)}var e=a("./Uint8ArrayReader"),f=a("../utils");f.inherits(d,e),d.prototype.readData=function(a){this.checkOffset(a);var b=this.data.slice(this.zero+this.index,this.zero+this.index+a);return this.index+=a,b},b.exports=d},{"../utils":32,"./Uint8ArrayReader":21}],20:[function(a,b,c){"use strict";function d(a){e.call(this,a)}var e=a("./DataReader"),f=a("../utils");f.inherits(d,e),d.prototype.byteAt=function(a){return this.data.charCodeAt(this.zero+a)},d.prototype.lastIndexOfSignature=function(a){return this.data.lastIndexOf(a)-this.zero},d.prototype.readAndCheckSignature=function(a){var b=this.readData(4);return a===b},d.prototype.readData=function(a){this.checkOffset(a);var b=this.data.slice(this.zero+this.index,this.zero+this.index+a);return this.index+=a,b},b.exports=d},{"../utils":32,"./DataReader":18}],21:[function(a,b,c){"use strict";function d(a){e.call(this,a)}var e=a("./ArrayReader"),f=a("../utils");f.inherits(d,e),d.prototype.readData=function(a){if(this.checkOffset(a),0===a)return new Uint8Array(0);var b=this.data.subarray(this.zero+this.index,this.zero+this.index+a);return this.index+=a,b},b.exports=d},{"../utils":32,"./ArrayReader":17}],22:[function(a,b,c){"use strict";var d=a("../utils"),e=a("../support"),f=a("./ArrayReader"),g=a("./StringReader"),h=a("./NodeBufferReader"),i=a("./Uint8ArrayReader");b.exports=function(a){var b=d.getTypeOf(a);return d.checkSupport(b),"string"!==b||e.uint8array?"nodebuffer"===b?new h(a):e.uint8array?new i(d.transformTo("uint8array",a)):new f(d.transformTo("array",a)):new g(a)}},{"../support":30,"../utils":32,"./ArrayReader":17,"./NodeBufferReader":19,"./StringReader":20,"./Uint8ArrayReader":21}],23:[function(a,b,c){"use strict";c.LOCAL_FILE_HEADER="PK",c.CENTRAL_FILE_HEADER="PK",c.CENTRAL_DIRECTORY_END="PK",c.ZIP64_CENTRAL_DIRECTORY_LOCATOR="PK",c.ZIP64_CENTRAL_DIRECTORY_END="PK",c.DATA_DESCRIPTOR="PK\b"},{}],24:[function(a,b,c){"use strict";function d(a){e.call(this,"ConvertWorker to "+a),this.destType=a}var e=a("./GenericWorker"),f=a("../utils");f.inherits(d,e),d.prototype.processChunk=function(a){this.push({data:f.transformTo(this.destType,a.data),meta:a.meta})},b.exports=d},{"../utils":32,"./GenericWorker":28}],25:[function(a,b,c){"use strict";function d(){e.call(this,"Crc32Probe"),this.withStreamInfo("crc32",0)}var e=a("./GenericWorker"),f=a("../crc32"),g=a("../utils");g.inherits(d,e),d.prototype.processChunk=function(a){this.streamInfo.crc32=f(a.data,this.streamInfo.crc32||0),this.push(a)},b.exports=d},{"../crc32":4,"../utils":32,"./GenericWorker":28}],26:[function(a,b,c){"use strict";function d(a){f.call(this,"DataLengthProbe for "+a),this.propName=a,this.withStreamInfo(a,0)}var e=a("../utils"),f=a("./GenericWorker");e.inherits(d,f),d.prototype.processChunk=function(a){if(a){var b=this.streamInfo[this.propName]||0;this.streamInfo[this.propName]=b+a.data.length}f.prototype.processChunk.call(this,a)},b.exports=d},{"../utils":32,"./GenericWorker":28}],27:[function(a,b,c){"use strict";function d(a){f.call(this,"DataWorker");var b=this;this.dataIsReady=!1,this.index=0,this.max=0,this.data=null,this.type="",this._tickScheduled=!1,a.then(function(a){b.dataIsReady=!0,b.data=a,b.max=a&&a.length||0,b.type=e.getTypeOf(a),b.isPaused||b._tickAndRepeat()},function(a){b.error(a)})}var e=a("../utils"),f=a("./GenericWorker"),g=16384;e.inherits(d,f),d.prototype.cleanUp=function(){f.prototype.cleanUp.call(this),this.data=null},d.prototype.resume=function(){return!!f.prototype.resume.call(this)&&(!this._tickScheduled&&this.dataIsReady&&(this._tickScheduled=!0,e.delay(this._tickAndRepeat,[],this)),!0)},d.prototype._tickAndRepeat=function(){this._tickScheduled=!1,this.isPaused||this.isFinished||(this._tick(),this.isFinished||(e.delay(this._tickAndRepeat,[],this),this._tickScheduled=!0))},d.prototype._tick=function(){if(this.isPaused||this.isFinished)return!1;var a=g,b=null,c=Math.min(this.max,this.index+a);if(this.index>=this.max)return this.end();switch(this.type){case"string":b=this.data.substring(this.index,c);break;case"uint8array":b=this.data.subarray(this.index,c);break;case"array":case"nodebuffer":b=this.data.slice(this.index,c)}return this.index=c,this.push({data:b,meta:{percent:this.max?this.index/this.max*100:0}})},b.exports=d},{"../utils":32,"./GenericWorker":28}],28:[function(a,b,c){"use strict";function d(a){this.name=a||"default",this.streamInfo={},this.generatedError=null,this.extraStreamInfo={},this.isPaused=!0,this.isFinished=!1,this.isLocked=!1,this._listeners={data:[],end:[],error:[]},this.previous=null}d.prototype={push:function(a){this.emit("data",a)},end:function(){if(this.isFinished)return!1;this.flush();try{this.emit("end"),this.cleanUp(),this.isFinished=!0}catch(a){this.emit("error",a)}return!0},error:function(a){return!this.isFinished&&(this.isPaused?this.generatedError=a:(this.isFinished=!0,this.emit("error",a),this.previous&&this.previous.error(a),this.cleanUp()),!0)},on:function(a,b){return this._listeners[a].push(b),this},cleanUp:function(){this.streamInfo=this.generatedError=this.extraStreamInfo=null,this._listeners=[]},emit:function(a,b){if(this._listeners[a])for(var c=0;c<this._listeners[a].length;c++)this._listeners[a][c].call(this,b)},pipe:function(a){return a.registerPrevious(this)},registerPrevious:function(a){if(this.isLocked)throw new Error("The stream '"+this+"' has already been used.");this.streamInfo=a.streamInfo,this.mergeStreamInfo(),this.previous=a;var b=this;return a.on("data",function(a){b.processChunk(a)}),a.on("end",function(){b.end()}),a.on("error",function(a){b.error(a)}),this},pause:function(){return!this.isPaused&&!this.isFinished&&(this.isPaused=!0,this.previous&&this.previous.pause(),!0)},resume:function(){if(!this.isPaused||this.isFinished)return!1;this.isPaused=!1;var a=!1;return this.generatedError&&(this.error(this.generatedError),a=!0),this.previous&&this.previous.resume(),!a},flush:function(){},processChunk:function(a){this.push(a)},withStreamInfo:function(a,b){return this.extraStreamInfo[a]=b,this.mergeStreamInfo(),this},mergeStreamInfo:function(){for(var a in this.extraStreamInfo)this.extraStreamInfo.hasOwnProperty(a)&&(this.streamInfo[a]=this.extraStreamInfo[a])},lock:function(){if(this.isLocked)throw new Error("The stream '"+this+"' has already been used.");this.isLocked=!0,this.previous&&this.previous.lock()},toString:function(){var a="Worker "+this.name;return this.previous?this.previous+" -> "+a:a}},b.exports=d},{}],29:[function(a,b,c){"use strict";function d(a,b,c){switch(a){case"blob":return h.newBlob(h.transformTo("arraybuffer",b),c);case"base64":return k.encode(b);default:return h.transformTo(a,b)}}function e(a,b){var c,d=0,e=null,f=0;for(c=0;c<b.length;c++)f+=b[c].length;switch(a){case"string":return b.join("");case"array":return Array.prototype.concat.apply([],b);case"uint8array":for(e=new Uint8Array(f),c=0;c<b.length;c++)e.set(b[c],d),d+=b[c].length;return e;case"nodebuffer":return Buffer.concat(b);default:throw new Error("concat : unsupported type '"+a+"'")}}function f(a,b){return new m.Promise(function(c,f){var g=[],h=a._internalType,i=a._outputType,j=a._mimeType;a.on("data",function(a,c){g.push(a),b&&b(c)}).on("error",function(a){g=[],f(a)}).on("end",function(){try{var a=d(i,e(h,g),j);c(a)}catch(b){f(b)}g=[]}).resume()})}function g(a,b,c){var d=b;switch(b){case"blob":case"arraybuffer":d="uint8array";break;case"base64":d="string"}try{this._internalType=d,this._outputType=b,this._mimeType=c,h.checkSupport(d),this._worker=a.pipe(new i(d)),a.lock()}catch(e){this._worker=new j("error"),this._worker.error(e)}}var h=a("../utils"),i=a("./ConvertWorker"),j=a("./GenericWorker"),k=a("../base64"),l=a("../support"),m=a("../external"),n=null;if(l.nodestream)try{n=a("../nodejs/NodejsStreamOutputAdapter")}catch(o){}g.prototype={accumulate:function(a){return f(this,a)},on:function(a,b){var c=this;return"data"===a?this._worker.on(a,function(a){b.call(c,a.data,a.meta)}):this._worker.on(a,function(){h.delay(b,arguments,c)}),this},resume:function(){return h.delay(this._worker.resume,[],this._worker),this},pause:function(){return this._worker.pause(),this},toNodejsStream:function(a){if(h.checkSupport("nodestream"),"nodebuffer"!==this._outputType)throw new Error(this._outputType+" is not supported by this method");return new n(this,{objectMode:"nodebuffer"!==this._outputType},a)}},b.exports=g},{"../base64":1,"../external":6,"../nodejs/NodejsStreamOutputAdapter":13,"../support":30,"../utils":32,"./ConvertWorker":24,"./GenericWorker":28}],30:[function(a,b,c){"use strict";if(c.base64=!0,c.array=!0,c.string=!0,c.arraybuffer="undefined"!=typeof ArrayBuffer&&"undefined"!=typeof Uint8Array,c.nodebuffer="undefined"!=typeof Buffer,c.uint8array="undefined"!=typeof Uint8Array,"undefined"==typeof ArrayBuffer)c.blob=!1;else{var d=new ArrayBuffer(0);try{c.blob=0===new Blob([d],{type:"application/zip"}).size}catch(e){try{var f=self.BlobBuilder||self.WebKitBlobBuilder||self.MozBlobBuilder||self.MSBlobBuilder,g=new f;g.append(d),c.blob=0===g.getBlob("application/zip").size}catch(e){c.blob=!1}}}try{c.nodestream=!!a("readable-stream").Readable}catch(e){c.nodestream=!1}},{"readable-stream":16}],31:[function(a,b,c){"use strict";function d(){i.call(this,"utf-8 decode"),this.leftOver=null}function e(){i.call(this,"utf-8 encode")}for(var f=a("./utils"),g=a("./support"),h=a("./nodejsUtils"),i=a("./stream/GenericWorker"),j=new Array(256),k=0;k<256;k++)j[k]=k>=252?6:k>=248?5:k>=240?4:k>=224?3:k>=192?2:1;j[254]=j[254]=1;var l=function(a){var b,c,d,e,f,h=a.length,i=0;for(e=0;e<h;e++)c=a.charCodeAt(e),55296===(64512&c)&&e+1<h&&(d=a.charCodeAt(e+1),56320===(64512&d)&&(c=65536+(c-55296<<10)+(d-56320),e++)),i+=c<128?1:c<2048?2:c<65536?3:4;for(b=g.uint8array?new Uint8Array(i):new Array(i),f=0,e=0;f<i;e++)c=a.charCodeAt(e),55296===(64512&c)&&e+1<h&&(d=a.charCodeAt(e+1),56320===(64512&d)&&(c=65536+(c-55296<<10)+(d-56320),e++)),c<128?b[f++]=c:c<2048?(b[f++]=192|c>>>6,b[f++]=128|63&c):c<65536?(b[f++]=224|c>>>12,b[f++]=128|c>>>6&63,b[f++]=128|63&c):(b[f++]=240|c>>>18,b[f++]=128|c>>>12&63,b[f++]=128|c>>>6&63,b[f++]=128|63&c);return b},m=function(a,b){var c;for(b=b||a.length,b>a.length&&(b=a.length),c=b-1;c>=0&&128===(192&a[c]);)c--;return c<0?b:0===c?b:c+j[a[c]]>b?c:b},n=function(a){var b,c,d,e,g=a.length,h=new Array(2*g);for(c=0,b=0;b<g;)if(d=a[b++],d<128)h[c++]=d;else if(e=j[d],e>4)h[c++]=65533,b+=e-1;else{for(d&=2===e?31:3===e?15:7;e>1&&b<g;)d=d<<6|63&a[b++],e--;e>1?h[c++]=65533:d<65536?h[c++]=d:(d-=65536,h[c++]=55296|d>>10&1023,h[c++]=56320|1023&d)}return h.length!==c&&(h.subarray?h=h.subarray(0,c):h.length=c),f.applyFromCharCode(h)};c.utf8encode=function(a){return g.nodebuffer?h.newBufferFrom(a,"utf-8"):l(a)},c.utf8decode=function(a){return g.nodebuffer?f.transformTo("nodebuffer",a).toString("utf-8"):(a=f.transformTo(g.uint8array?"uint8array":"array",a),n(a))},f.inherits(d,i),d.prototype.processChunk=function(a){var b=f.transformTo(g.uint8array?"uint8array":"array",a.data);if(this.leftOver&&this.leftOver.length){if(g.uint8array){var d=b;b=new Uint8Array(d.length+this.leftOver.length),b.set(this.leftOver,0),b.set(d,this.leftOver.length)}else b=this.leftOver.concat(b);this.leftOver=null}var e=m(b),h=b;e!==b.length&&(g.uint8array?(h=b.subarray(0,e),this.leftOver=b.subarray(e,b.length)):(h=b.slice(0,e),this.leftOver=b.slice(e,b.length))),this.push({data:c.utf8decode(h),meta:a.meta})},d.prototype.flush=function(){this.leftOver&&this.leftOver.length&&(this.push({data:c.utf8decode(this.leftOver),meta:{}}),this.leftOver=null)},c.Utf8DecodeWorker=d,f.inherits(e,i),e.prototype.processChunk=function(a){this.push({data:c.utf8encode(a.data),meta:a.meta})},c.Utf8EncodeWorker=e},{"./nodejsUtils":14,"./stream/GenericWorker":28,"./support":30,"./utils":32}],32:[function(a,b,c){"use strict";function d(a){var b=null;return b=i.uint8array?new Uint8Array(a.length):new Array(a.length),f(a,b)}function e(a){return a}function f(a,b){for(var c=0;c<a.length;++c)b[c]=255&a.charCodeAt(c);return b}function g(a){var b=65536,d=c.getTypeOf(a),e=!0;if("uint8array"===d?e=n.applyCanBeUsed.uint8array:"nodebuffer"===d&&(e=n.applyCanBeUsed.nodebuffer),e)for(;b>1;)try{return n.stringifyByChunk(a,d,b)}catch(f){b=Math.floor(b/2)}return n.stringifyByChar(a)}function h(a,b){for(var c=0;c<a.length;c++)b[c]=a[c];
return b}var i=a("./support"),j=a("./base64"),k=a("./nodejsUtils"),l=a("core-js/library/fn/set-immediate"),m=a("./external");c.newBlob=function(a,b){c.checkSupport("blob");try{return new Blob([a],{type:b})}catch(d){try{var e=self.BlobBuilder||self.WebKitBlobBuilder||self.MozBlobBuilder||self.MSBlobBuilder,f=new e;return f.append(a),f.getBlob(b)}catch(d){throw new Error("Bug : can't construct the Blob.")}}};var n={stringifyByChunk:function(a,b,c){var d=[],e=0,f=a.length;if(f<=c)return String.fromCharCode.apply(null,a);for(;e<f;)"array"===b||"nodebuffer"===b?d.push(String.fromCharCode.apply(null,a.slice(e,Math.min(e+c,f)))):d.push(String.fromCharCode.apply(null,a.subarray(e,Math.min(e+c,f)))),e+=c;return d.join("")},stringifyByChar:function(a){for(var b="",c=0;c<a.length;c++)b+=String.fromCharCode(a[c]);return b},applyCanBeUsed:{uint8array:function(){try{return i.uint8array&&1===String.fromCharCode.apply(null,new Uint8Array(1)).length}catch(a){return!1}}(),nodebuffer:function(){try{return i.nodebuffer&&1===String.fromCharCode.apply(null,k.allocBuffer(1)).length}catch(a){return!1}}()}};c.applyFromCharCode=g;var o={};o.string={string:e,array:function(a){return f(a,new Array(a.length))},arraybuffer:function(a){return o.string.uint8array(a).buffer},uint8array:function(a){return f(a,new Uint8Array(a.length))},nodebuffer:function(a){return f(a,k.allocBuffer(a.length))}},o.array={string:g,array:e,arraybuffer:function(a){return new Uint8Array(a).buffer},uint8array:function(a){return new Uint8Array(a)},nodebuffer:function(a){return k.newBufferFrom(a)}},o.arraybuffer={string:function(a){return g(new Uint8Array(a))},array:function(a){return h(new Uint8Array(a),new Array(a.byteLength))},arraybuffer:e,uint8array:function(a){return new Uint8Array(a)},nodebuffer:function(a){return k.newBufferFrom(new Uint8Array(a))}},o.uint8array={string:g,array:function(a){return h(a,new Array(a.length))},arraybuffer:function(a){return a.buffer},uint8array:e,nodebuffer:function(a){return k.newBufferFrom(a)}},o.nodebuffer={string:g,array:function(a){return h(a,new Array(a.length))},arraybuffer:function(a){return o.nodebuffer.uint8array(a).buffer},uint8array:function(a){return h(a,new Uint8Array(a.length))},nodebuffer:e},c.transformTo=function(a,b){if(b||(b=""),!a)return b;c.checkSupport(a);var d=c.getTypeOf(b),e=o[d][a](b);return e},c.getTypeOf=function(a){return"string"==typeof a?"string":"[object Array]"===Object.prototype.toString.call(a)?"array":i.nodebuffer&&k.isBuffer(a)?"nodebuffer":i.uint8array&&a instanceof Uint8Array?"uint8array":i.arraybuffer&&a instanceof ArrayBuffer?"arraybuffer":void 0},c.checkSupport=function(a){var b=i[a.toLowerCase()];if(!b)throw new Error(a+" is not supported by this platform")},c.MAX_VALUE_16BITS=65535,c.MAX_VALUE_32BITS=-1,c.pretty=function(a){var b,c,d="";for(c=0;c<(a||"").length;c++)b=a.charCodeAt(c),d+="\\x"+(b<16?"0":"")+b.toString(16).toUpperCase();return d},c.delay=function(a,b,c){l(function(){a.apply(c||null,b||[])})},c.inherits=function(a,b){var c=function(){};c.prototype=b.prototype,a.prototype=new c},c.extend=function(){var a,b,c={};for(a=0;a<arguments.length;a++)for(b in arguments[a])arguments[a].hasOwnProperty(b)&&"undefined"==typeof c[b]&&(c[b]=arguments[a][b]);return c},c.prepareContent=function(a,b,e,f,g){var h=m.Promise.resolve(b).then(function(a){var b=i.blob&&(a instanceof Blob||["[object File]","[object Blob]"].indexOf(Object.prototype.toString.call(a))!==-1);return b&&"undefined"!=typeof FileReader?new m.Promise(function(b,c){var d=new FileReader;d.onload=function(a){b(a.target.result)},d.onerror=function(a){c(a.target.error)},d.readAsArrayBuffer(a)}):a});return h.then(function(b){var h=c.getTypeOf(b);return h?("arraybuffer"===h?b=c.transformTo("uint8array",b):"string"===h&&(g?b=j.decode(b):e&&f!==!0&&(b=d(b))),b):m.Promise.reject(new Error("Can't read the data of '"+a+"'. Is it in a supported JavaScript type (String, Blob, ArrayBuffer, etc) ?"))})}},{"./base64":1,"./external":6,"./nodejsUtils":14,"./support":30,"core-js/library/fn/set-immediate":36}],33:[function(a,b,c){"use strict";function d(a){this.files=[],this.loadOptions=a}var e=a("./reader/readerFor"),f=a("./utils"),g=a("./signature"),h=a("./zipEntry"),i=(a("./utf8"),a("./support"));d.prototype={checkSignature:function(a){if(!this.reader.readAndCheckSignature(a)){this.reader.index-=4;var b=this.reader.readString(4);throw new Error("Corrupted zip or bug: unexpected signature ("+f.pretty(b)+", expected "+f.pretty(a)+")")}},isSignature:function(a,b){var c=this.reader.index;this.reader.setIndex(a);var d=this.reader.readString(4),e=d===b;return this.reader.setIndex(c),e},readBlockEndOfCentral:function(){this.diskNumber=this.reader.readInt(2),this.diskWithCentralDirStart=this.reader.readInt(2),this.centralDirRecordsOnThisDisk=this.reader.readInt(2),this.centralDirRecords=this.reader.readInt(2),this.centralDirSize=this.reader.readInt(4),this.centralDirOffset=this.reader.readInt(4),this.zipCommentLength=this.reader.readInt(2);var a=this.reader.readData(this.zipCommentLength),b=i.uint8array?"uint8array":"array",c=f.transformTo(b,a);this.zipComment=this.loadOptions.decodeFileName(c)},readBlockZip64EndOfCentral:function(){this.zip64EndOfCentralSize=this.reader.readInt(8),this.reader.skip(4),this.diskNumber=this.reader.readInt(4),this.diskWithCentralDirStart=this.reader.readInt(4),this.centralDirRecordsOnThisDisk=this.reader.readInt(8),this.centralDirRecords=this.reader.readInt(8),this.centralDirSize=this.reader.readInt(8),this.centralDirOffset=this.reader.readInt(8),this.zip64ExtensibleData={};for(var a,b,c,d=this.zip64EndOfCentralSize-44,e=0;e<d;)a=this.reader.readInt(2),b=this.reader.readInt(4),c=this.reader.readData(b),this.zip64ExtensibleData[a]={id:a,length:b,value:c}},readBlockZip64EndOfCentralLocator:function(){if(this.diskWithZip64CentralDirStart=this.reader.readInt(4),this.relativeOffsetEndOfZip64CentralDir=this.reader.readInt(8),this.disksCount=this.reader.readInt(4),this.disksCount>1)throw new Error("Multi-volumes zip are not supported")},readLocalFiles:function(){var a,b;for(a=0;a<this.files.length;a++)b=this.files[a],this.reader.setIndex(b.localHeaderOffset),this.checkSignature(g.LOCAL_FILE_HEADER),b.readLocalPart(this.reader),b.handleUTF8(),b.processAttributes()},readCentralDir:function(){var a;for(this.reader.setIndex(this.centralDirOffset);this.reader.readAndCheckSignature(g.CENTRAL_FILE_HEADER);)a=new h({zip64:this.zip64},this.loadOptions),a.readCentralPart(this.reader),this.files.push(a);if(this.centralDirRecords!==this.files.length&&0!==this.centralDirRecords&&0===this.files.length)throw new Error("Corrupted zip or bug: expected "+this.centralDirRecords+" records in central dir, got "+this.files.length)},readEndOfCentral:function(){var a=this.reader.lastIndexOfSignature(g.CENTRAL_DIRECTORY_END);if(a<0){var b=!this.isSignature(0,g.LOCAL_FILE_HEADER);throw b?new Error("Can't find end of central directory : is this a zip file ? If it is, see https://stuk.github.io/jszip/documentation/howto/read_zip.html"):new Error("Corrupted zip: can't find end of central directory")}this.reader.setIndex(a);var c=a;if(this.checkSignature(g.CENTRAL_DIRECTORY_END),this.readBlockEndOfCentral(),this.diskNumber===f.MAX_VALUE_16BITS||this.diskWithCentralDirStart===f.MAX_VALUE_16BITS||this.centralDirRecordsOnThisDisk===f.MAX_VALUE_16BITS||this.centralDirRecords===f.MAX_VALUE_16BITS||this.centralDirSize===f.MAX_VALUE_32BITS||this.centralDirOffset===f.MAX_VALUE_32BITS){if(this.zip64=!0,a=this.reader.lastIndexOfSignature(g.ZIP64_CENTRAL_DIRECTORY_LOCATOR),a<0)throw new Error("Corrupted zip: can't find the ZIP64 end of central directory locator");if(this.reader.setIndex(a),this.checkSignature(g.ZIP64_CENTRAL_DIRECTORY_LOCATOR),this.readBlockZip64EndOfCentralLocator(),!this.isSignature(this.relativeOffsetEndOfZip64CentralDir,g.ZIP64_CENTRAL_DIRECTORY_END)&&(this.relativeOffsetEndOfZip64CentralDir=this.reader.lastIndexOfSignature(g.ZIP64_CENTRAL_DIRECTORY_END),this.relativeOffsetEndOfZip64CentralDir<0))throw new Error("Corrupted zip: can't find the ZIP64 end of central directory");this.reader.setIndex(this.relativeOffsetEndOfZip64CentralDir),this.checkSignature(g.ZIP64_CENTRAL_DIRECTORY_END),this.readBlockZip64EndOfCentral()}var d=this.centralDirOffset+this.centralDirSize;this.zip64&&(d+=20,d+=12+this.zip64EndOfCentralSize);var e=c-d;if(e>0)this.isSignature(c,g.CENTRAL_FILE_HEADER)||(this.reader.zero=e);else if(e<0)throw new Error("Corrupted zip: missing "+Math.abs(e)+" bytes.")},prepareReader:function(a){this.reader=e(a)},load:function(a){this.prepareReader(a),this.readEndOfCentral(),this.readCentralDir(),this.readLocalFiles()}},b.exports=d},{"./reader/readerFor":22,"./signature":23,"./support":30,"./utf8":31,"./utils":32,"./zipEntry":34}],34:[function(a,b,c){"use strict";function d(a,b){this.options=a,this.loadOptions=b}var e=a("./reader/readerFor"),f=a("./utils"),g=a("./compressedObject"),h=a("./crc32"),i=a("./utf8"),j=a("./compressions"),k=a("./support"),l=0,m=3,n=function(a){for(var b in j)if(j.hasOwnProperty(b)&&j[b].magic===a)return j[b];return null};d.prototype={isEncrypted:function(){return 1===(1&this.bitFlag)},useUTF8:function(){return 2048===(2048&this.bitFlag)},readLocalPart:function(a){var b,c;if(a.skip(22),this.fileNameLength=a.readInt(2),c=a.readInt(2),this.fileName=a.readData(this.fileNameLength),a.skip(c),this.compressedSize===-1||this.uncompressedSize===-1)throw new Error("Bug or corrupted zip : didn't get enough informations from the central directory (compressedSize === -1 || uncompressedSize === -1)");if(b=n(this.compressionMethod),null===b)throw new Error("Corrupted zip : compression "+f.pretty(this.compressionMethod)+" unknown (inner file : "+f.transformTo("string",this.fileName)+")");this.decompressed=new g(this.compressedSize,this.uncompressedSize,this.crc32,b,a.readData(this.compressedSize))},readCentralPart:function(a){this.versionMadeBy=a.readInt(2),a.skip(2),this.bitFlag=a.readInt(2),this.compressionMethod=a.readString(2),this.date=a.readDate(),this.crc32=a.readInt(4),this.compressedSize=a.readInt(4),this.uncompressedSize=a.readInt(4);var b=a.readInt(2);if(this.extraFieldsLength=a.readInt(2),this.fileCommentLength=a.readInt(2),this.diskNumberStart=a.readInt(2),this.internalFileAttributes=a.readInt(2),this.externalFileAttributes=a.readInt(4),this.localHeaderOffset=a.readInt(4),this.isEncrypted())throw new Error("Encrypted zip are not supported");a.skip(b),this.readExtraFields(a),this.parseZIP64ExtraField(a),this.fileComment=a.readData(this.fileCommentLength)},processAttributes:function(){this.unixPermissions=null,this.dosPermissions=null;var a=this.versionMadeBy>>8;this.dir=!!(16&this.externalFileAttributes),a===l&&(this.dosPermissions=63&this.externalFileAttributes),a===m&&(this.unixPermissions=this.externalFileAttributes>>16&65535),this.dir||"/"!==this.fileNameStr.slice(-1)||(this.dir=!0)},parseZIP64ExtraField:function(a){if(this.extraFields[1]){var b=e(this.extraFields[1].value);this.uncompressedSize===f.MAX_VALUE_32BITS&&(this.uncompressedSize=b.readInt(8)),this.compressedSize===f.MAX_VALUE_32BITS&&(this.compressedSize=b.readInt(8)),this.localHeaderOffset===f.MAX_VALUE_32BITS&&(this.localHeaderOffset=b.readInt(8)),this.diskNumberStart===f.MAX_VALUE_32BITS&&(this.diskNumberStart=b.readInt(4))}},readExtraFields:function(a){var b,c,d,e=a.index+this.extraFieldsLength;for(this.extraFields||(this.extraFields={});a.index<e;)b=a.readInt(2),c=a.readInt(2),d=a.readData(c),this.extraFields[b]={id:b,length:c,value:d}},handleUTF8:function(){var a=k.uint8array?"uint8array":"array";if(this.useUTF8())this.fileNameStr=i.utf8decode(this.fileName),this.fileCommentStr=i.utf8decode(this.fileComment);else{var b=this.findExtraFieldUnicodePath();if(null!==b)this.fileNameStr=b;else{var c=f.transformTo(a,this.fileName);this.fileNameStr=this.loadOptions.decodeFileName(c)}var d=this.findExtraFieldUnicodeComment();if(null!==d)this.fileCommentStr=d;else{var e=f.transformTo(a,this.fileComment);this.fileCommentStr=this.loadOptions.decodeFileName(e)}}},findExtraFieldUnicodePath:function(){var a=this.extraFields[28789];if(a){var b=e(a.value);return 1!==b.readInt(1)?null:h(this.fileName)!==b.readInt(4)?null:i.utf8decode(b.readData(a.length-5))}return null},findExtraFieldUnicodeComment:function(){var a=this.extraFields[25461];if(a){var b=e(a.value);return 1!==b.readInt(1)?null:h(this.fileComment)!==b.readInt(4)?null:i.utf8decode(b.readData(a.length-5))}return null}},b.exports=d},{"./compressedObject":2,"./compressions":3,"./crc32":4,"./reader/readerFor":22,"./support":30,"./utf8":31,"./utils":32}],35:[function(a,b,c){"use strict";var d=a("./stream/StreamHelper"),e=a("./stream/DataWorker"),f=a("./utf8"),g=a("./compressedObject"),h=a("./stream/GenericWorker"),i=function(a,b,c){this.name=a,this.dir=c.dir,this.date=c.date,this.comment=c.comment,this.unixPermissions=c.unixPermissions,this.dosPermissions=c.dosPermissions,this._data=b,this._dataBinary=c.binary,this.options={compression:c.compression,compressionOptions:c.compressionOptions}};i.prototype={internalStream:function(a){var b=null,c="string";try{if(!a)throw new Error("No output type specified.");c=a.toLowerCase();var e="string"===c||"text"===c;"binarystring"!==c&&"text"!==c||(c="string"),b=this._decompressWorker();var g=!this._dataBinary;g&&!e&&(b=b.pipe(new f.Utf8EncodeWorker)),!g&&e&&(b=b.pipe(new f.Utf8DecodeWorker))}catch(i){b=new h("error"),b.error(i)}return new d(b,c,"")},async:function(a,b){return this.internalStream(a).accumulate(b)},nodeStream:function(a,b){return this.internalStream(a||"nodebuffer").toNodejsStream(b)},_compressWorker:function(a,b){if(this._data instanceof g&&this._data.compression.magic===a.magic)return this._data.getCompressedWorker();var c=this._decompressWorker();return this._dataBinary||(c=c.pipe(new f.Utf8EncodeWorker)),g.createWorkerFrom(c,a,b)},_decompressWorker:function(){return this._data instanceof g?this._data.getContentWorker():this._data instanceof h?this._data:new e(this._data)}};for(var j=["asText","asBinary","asNodeBuffer","asUint8Array","asArrayBuffer"],k=function(){throw new Error("This method has been removed in JSZip 3.0, please check the upgrade guide.")},l=0;l<j.length;l++)i.prototype[j[l]]=k;b.exports=i},{"./compressedObject":2,"./stream/DataWorker":27,"./stream/GenericWorker":28,"./stream/StreamHelper":29,"./utf8":31}],36:[function(a,b,c){a("../modules/web.immediate"),b.exports=a("../modules/_core").setImmediate},{"../modules/_core":40,"../modules/web.immediate":56}],37:[function(a,b,c){b.exports=function(a){if("function"!=typeof a)throw TypeError(a+" is not a function!");return a}},{}],38:[function(a,b,c){var d=a("./_is-object");b.exports=function(a){if(!d(a))throw TypeError(a+" is not an object!");return a}},{"./_is-object":51}],39:[function(a,b,c){var d={}.toString;b.exports=function(a){return d.call(a).slice(8,-1)}},{}],40:[function(a,b,c){var d=b.exports={version:"2.3.0"};"number"==typeof __e&&(__e=d)},{}],41:[function(a,b,c){var d=a("./_a-function");b.exports=function(a,b,c){if(d(a),void 0===b)return a;switch(c){case 1:return function(c){return a.call(b,c)};case 2:return function(c,d){return a.call(b,c,d)};case 3:return function(c,d,e){return a.call(b,c,d,e)}}return function(){return a.apply(b,arguments)}}},{"./_a-function":37}],42:[function(a,b,c){b.exports=!a("./_fails")(function(){return 7!=Object.defineProperty({},"a",{get:function(){return 7}}).a})},{"./_fails":45}],43:[function(a,b,c){var d=a("./_is-object"),e=a("./_global").document,f=d(e)&&d(e.createElement);b.exports=function(a){return f?e.createElement(a):{}}},{"./_global":46,"./_is-object":51}],44:[function(a,b,c){var d=a("./_global"),e=a("./_core"),f=a("./_ctx"),g=a("./_hide"),h="prototype",i=function(a,b,c){var j,k,l,m=a&i.F,n=a&i.G,o=a&i.S,p=a&i.P,q=a&i.B,r=a&i.W,s=n?e:e[b]||(e[b]={}),t=s[h],u=n?d:o?d[b]:(d[b]||{})[h];n&&(c=b);for(j in c)k=!m&&u&&void 0!==u[j],k&&j in s||(l=k?u[j]:c[j],s[j]=n&&"function"!=typeof u[j]?c[j]:q&&k?f(l,d):r&&u[j]==l?function(a){var b=function(b,c,d){if(this instanceof a){switch(arguments.length){case 0:return new a;case 1:return new a(b);case 2:return new a(b,c)}return new a(b,c,d)}return a.apply(this,arguments)};return b[h]=a[h],b}(l):p&&"function"==typeof l?f(Function.call,l):l,p&&((s.virtual||(s.virtual={}))[j]=l,a&i.R&&t&&!t[j]&&g(t,j,l)))};i.F=1,i.G=2,i.S=4,i.P=8,i.B=16,i.W=32,i.U=64,i.R=128,b.exports=i},{"./_core":40,"./_ctx":41,"./_global":46,"./_hide":47}],45:[function(a,b,c){b.exports=function(a){try{return!!a()}catch(b){return!0}}},{}],46:[function(a,b,c){var d=b.exports="undefined"!=typeof window&&window.Math==Math?window:"undefined"!=typeof self&&self.Math==Math?self:Function("return this")();"number"==typeof __g&&(__g=d)},{}],47:[function(a,b,c){var d=a("./_object-dp"),e=a("./_property-desc");b.exports=a("./_descriptors")?function(a,b,c){return d.f(a,b,e(1,c))}:function(a,b,c){return a[b]=c,a}},{"./_descriptors":42,"./_object-dp":52,"./_property-desc":53}],48:[function(a,b,c){b.exports=a("./_global").document&&document.documentElement},{"./_global":46}],49:[function(a,b,c){b.exports=!a("./_descriptors")&&!a("./_fails")(function(){return 7!=Object.defineProperty(a("./_dom-create")("div"),"a",{get:function(){return 7}}).a})},{"./_descriptors":42,"./_dom-create":43,"./_fails":45}],50:[function(a,b,c){b.exports=function(a,b,c){var d=void 0===c;switch(b.length){case 0:return d?a():a.call(c);case 1:return d?a(b[0]):a.call(c,b[0]);case 2:return d?a(b[0],b[1]):a.call(c,b[0],b[1]);case 3:return d?a(b[0],b[1],b[2]):a.call(c,b[0],b[1],b[2]);case 4:return d?a(b[0],b[1],b[2],b[3]):a.call(c,b[0],b[1],b[2],b[3])}return a.apply(c,b)}},{}],51:[function(a,b,c){b.exports=function(a){return"object"==typeof a?null!==a:"function"==typeof a}},{}],52:[function(a,b,c){var d=a("./_an-object"),e=a("./_ie8-dom-define"),f=a("./_to-primitive"),g=Object.defineProperty;c.f=a("./_descriptors")?Object.defineProperty:function(a,b,c){if(d(a),b=f(b,!0),d(c),e)try{return g(a,b,c)}catch(h){}if("get"in c||"set"in c)throw TypeError("Accessors not supported!");return"value"in c&&(a[b]=c.value),a}},{"./_an-object":38,"./_descriptors":42,"./_ie8-dom-define":49,"./_to-primitive":55}],53:[function(a,b,c){b.exports=function(a,b){return{enumerable:!(1&a),configurable:!(2&a),writable:!(4&a),value:b}}},{}],54:[function(a,b,c){var d,e,f,g=a("./_ctx"),h=a("./_invoke"),i=a("./_html"),j=a("./_dom-create"),k=a("./_global"),l=k.process,m=k.setImmediate,n=k.clearImmediate,o=k.MessageChannel,p=0,q={},r="onreadystatechange",s=function(){var a=+this;if(q.hasOwnProperty(a)){var b=q[a];delete q[a],b()}},t=function(a){s.call(a.data)};m&&n||(m=function(a){for(var b=[],c=1;arguments.length>c;)b.push(arguments[c++]);return q[++p]=function(){h("function"==typeof a?a:Function(a),b)},d(p),p},n=function(a){delete q[a]},"process"==a("./_cof")(l)?d=function(a){l.nextTick(g(s,a,1))}:o?(e=new o,f=e.port2,e.port1.onmessage=t,d=g(f.postMessage,f,1)):k.addEventListener&&"function"==typeof postMessage&&!k.importScripts?(d=function(a){k.postMessage(a+"","*")},k.addEventListener("message",t,!1)):d=r in j("script")?function(a){i.appendChild(j("script"))[r]=function(){i.removeChild(this),s.call(a)}}:function(a){setTimeout(g(s,a,1),0)}),b.exports={set:m,clear:n}},{"./_cof":39,"./_ctx":41,"./_dom-create":43,"./_global":46,"./_html":48,"./_invoke":50}],55:[function(a,b,c){var d=a("./_is-object");b.exports=function(a,b){if(!d(a))return a;var c,e;if(b&&"function"==typeof(c=a.toString)&&!d(e=c.call(a)))return e;if("function"==typeof(c=a.valueOf)&&!d(e=c.call(a)))return e;if(!b&&"function"==typeof(c=a.toString)&&!d(e=c.call(a)))return e;throw TypeError("Can't convert object to primitive value")}},{"./_is-object":51}],56:[function(a,b,c){var d=a("./_export"),e=a("./_task");d(d.G+d.B,{setImmediate:e.set,clearImmediate:e.clear})},{"./_export":44,"./_task":54}],57:[function(a,b,c){(function(a){"use strict";function c(){k=!0;for(var a,b,c=l.length;c;){for(b=l,l=[],a=-1;++a<c;)b[a]();c=l.length}k=!1}function d(a){1!==l.push(a)||k||e()}var e,f=a.MutationObserver||a.WebKitMutationObserver;if(f){var g=0,h=new f(c),i=a.document.createTextNode("");h.observe(i,{characterData:!0}),e=function(){i.data=g=++g%2}}else if(a.setImmediate||"undefined"==typeof a.MessageChannel)e="document"in a&&"onreadystatechange"in a.document.createElement("script")?function(){var b=a.document.createElement("script");b.onreadystatechange=function(){c(),b.onreadystatechange=null,b.parentNode.removeChild(b),b=null},a.document.documentElement.appendChild(b)}:function(){setTimeout(c,0)};else{var j=new a.MessageChannel;j.port1.onmessage=c,e=function(){j.port2.postMessage(0)}}var k,l=[];b.exports=d}).call(this,"undefined"!=typeof global?global:"undefined"!=typeof self?self:"undefined"!=typeof window?window:{})},{}],58:[function(a,b,c){"use strict";function d(){}function e(a){if("function"!=typeof a)throw new TypeError("resolver must be a function");this.state=s,this.queue=[],this.outcome=void 0,a!==d&&i(this,a)}function f(a,b,c){this.promise=a,"function"==typeof b&&(this.onFulfilled=b,this.callFulfilled=this.otherCallFulfilled),"function"==typeof c&&(this.onRejected=c,this.callRejected=this.otherCallRejected)}function g(a,b,c){o(function(){var d;try{d=b(c)}catch(e){return p.reject(a,e)}d===a?p.reject(a,new TypeError("Cannot resolve promise with itself")):p.resolve(a,d)})}function h(a){var b=a&&a.then;if(a&&("object"==typeof a||"function"==typeof a)&&"function"==typeof b)return function(){b.apply(a,arguments)}}function i(a,b){function c(b){f||(f=!0,p.reject(a,b))}function d(b){f||(f=!0,p.resolve(a,b))}function e(){b(d,c)}var f=!1,g=j(e);"error"===g.status&&c(g.value)}function j(a,b){var c={};try{c.value=a(b),c.status="success"}catch(d){c.status="error",c.value=d}return c}function k(a){return a instanceof this?a:p.resolve(new this(d),a)}function l(a){var b=new this(d);return p.reject(b,a)}function m(a){function b(a,b){function d(a){g[b]=a,++h!==e||f||(f=!0,p.resolve(j,g))}c.resolve(a).then(d,function(a){f||(f=!0,p.reject(j,a))})}var c=this;if("[object Array]"!==Object.prototype.toString.call(a))return this.reject(new TypeError("must be an array"));var e=a.length,f=!1;if(!e)return this.resolve([]);for(var g=new Array(e),h=0,i=-1,j=new this(d);++i<e;)b(a[i],i);return j}function n(a){function b(a){c.resolve(a).then(function(a){f||(f=!0,p.resolve(h,a))},function(a){f||(f=!0,p.reject(h,a))})}var c=this;if("[object Array]"!==Object.prototype.toString.call(a))return this.reject(new TypeError("must be an array"));var e=a.length,f=!1;if(!e)return this.resolve([]);for(var g=-1,h=new this(d);++g<e;)b(a[g]);return h}var o=a("immediate"),p={},q=["REJECTED"],r=["FULFILLED"],s=["PENDING"];b.exports=e,e.prototype["catch"]=function(a){return this.then(null,a)},e.prototype.then=function(a,b){if("function"!=typeof a&&this.state===r||"function"!=typeof b&&this.state===q)return this;var c=new this.constructor(d);if(this.state!==s){var e=this.state===r?a:b;g(c,e,this.outcome)}else this.queue.push(new f(c,a,b));return c},f.prototype.callFulfilled=function(a){p.resolve(this.promise,a)},f.prototype.otherCallFulfilled=function(a){g(this.promise,this.onFulfilled,a)},f.prototype.callRejected=function(a){p.reject(this.promise,a)},f.prototype.otherCallRejected=function(a){g(this.promise,this.onRejected,a)},p.resolve=function(a,b){var c=j(h,b);if("error"===c.status)return p.reject(a,c.value);var d=c.value;if(d)i(a,d);else{a.state=r,a.outcome=b;for(var e=-1,f=a.queue.length;++e<f;)a.queue[e].callFulfilled(b)}return a},p.reject=function(a,b){a.state=q,a.outcome=b;for(var c=-1,d=a.queue.length;++c<d;)a.queue[c].callRejected(b);return a},e.resolve=k,e.reject=l,e.all=m,e.race=n},{immediate:57}],59:[function(a,b,c){"use strict";var d=a("./lib/utils/common").assign,e=a("./lib/deflate"),f=a("./lib/inflate"),g=a("./lib/zlib/constants"),h={};d(h,e,f,g),b.exports=h},{"./lib/deflate":60,"./lib/inflate":61,"./lib/utils/common":62,"./lib/zlib/constants":65}],60:[function(a,b,c){"use strict";function d(a){if(!(this instanceof d))return new d(a);this.options=i.assign({level:s,method:u,chunkSize:16384,windowBits:15,memLevel:8,strategy:t,to:""},a||{});var b=this.options;b.raw&&b.windowBits>0?b.windowBits=-b.windowBits:b.gzip&&b.windowBits>0&&b.windowBits<16&&(b.windowBits+=16),this.err=0,this.msg="",this.ended=!1,this.chunks=[],this.strm=new l,this.strm.avail_out=0;var c=h.deflateInit2(this.strm,b.level,b.method,b.windowBits,b.memLevel,b.strategy);if(c!==p)throw new Error(k[c]);if(b.header&&h.deflateSetHeader(this.strm,b.header),b.dictionary){var e;if(e="string"==typeof b.dictionary?j.string2buf(b.dictionary):"[object ArrayBuffer]"===m.call(b.dictionary)?new Uint8Array(b.dictionary):b.dictionary,c=h.deflateSetDictionary(this.strm,e),c!==p)throw new Error(k[c]);this._dict_set=!0}}function e(a,b){var c=new d(b);if(c.push(a,!0),c.err)throw c.msg||k[c.err];return c.result}function f(a,b){return b=b||{},b.raw=!0,e(a,b)}function g(a,b){return b=b||{},b.gzip=!0,e(a,b)}var h=a("./zlib/deflate"),i=a("./utils/common"),j=a("./utils/strings"),k=a("./zlib/messages"),l=a("./zlib/zstream"),m=Object.prototype.toString,n=0,o=4,p=0,q=1,r=2,s=-1,t=0,u=8;d.prototype.push=function(a,b){var c,d,e=this.strm,f=this.options.chunkSize;if(this.ended)return!1;d=b===~~b?b:b===!0?o:n,"string"==typeof a?e.input=j.string2buf(a):"[object ArrayBuffer]"===m.call(a)?e.input=new Uint8Array(a):e.input=a,e.next_in=0,e.avail_in=e.input.length;do{if(0===e.avail_out&&(e.output=new i.Buf8(f),e.next_out=0,e.avail_out=f),c=h.deflate(e,d),c!==q&&c!==p)return this.onEnd(c),this.ended=!0,!1;0!==e.avail_out&&(0!==e.avail_in||d!==o&&d!==r)||("string"===this.options.to?this.onData(j.buf2binstring(i.shrinkBuf(e.output,e.next_out))):this.onData(i.shrinkBuf(e.output,e.next_out)))}while((e.avail_in>0||0===e.avail_out)&&c!==q);return d===o?(c=h.deflateEnd(this.strm),this.onEnd(c),this.ended=!0,c===p):d!==r||(this.onEnd(p),e.avail_out=0,!0)},d.prototype.onData=function(a){this.chunks.push(a)},d.prototype.onEnd=function(a){a===p&&("string"===this.options.to?this.result=this.chunks.join(""):this.result=i.flattenChunks(this.chunks)),this.chunks=[],this.err=a,this.msg=this.strm.msg},c.Deflate=d,c.deflate=e,c.deflateRaw=f,c.gzip=g},{"./utils/common":62,"./utils/strings":63,"./zlib/deflate":67,"./zlib/messages":72,"./zlib/zstream":74}],61:[function(a,b,c){"use strict";function d(a){if(!(this instanceof d))return new d(a);this.options=h.assign({chunkSize:16384,windowBits:0,to:""},a||{});var b=this.options;b.raw&&b.windowBits>=0&&b.windowBits<16&&(b.windowBits=-b.windowBits,0===b.windowBits&&(b.windowBits=-15)),!(b.windowBits>=0&&b.windowBits<16)||a&&a.windowBits||(b.windowBits+=32),b.windowBits>15&&b.windowBits<48&&0===(15&b.windowBits)&&(b.windowBits|=15),this.err=0,this.msg="",this.ended=!1,this.chunks=[],this.strm=new l,this.strm.avail_out=0;var c=g.inflateInit2(this.strm,b.windowBits);if(c!==j.Z_OK)throw new Error(k[c]);this.header=new m,g.inflateGetHeader(this.strm,this.header)}function e(a,b){var c=new d(b);if(c.push(a,!0),c.err)throw c.msg||k[c.err];return c.result}function f(a,b){return b=b||{},b.raw=!0,e(a,b)}var g=a("./zlib/inflate"),h=a("./utils/common"),i=a("./utils/strings"),j=a("./zlib/constants"),k=a("./zlib/messages"),l=a("./zlib/zstream"),m=a("./zlib/gzheader"),n=Object.prototype.toString;d.prototype.push=function(a,b){var c,d,e,f,k,l,m=this.strm,o=this.options.chunkSize,p=this.options.dictionary,q=!1;if(this.ended)return!1;d=b===~~b?b:b===!0?j.Z_FINISH:j.Z_NO_FLUSH,"string"==typeof a?m.input=i.binstring2buf(a):"[object ArrayBuffer]"===n.call(a)?m.input=new Uint8Array(a):m.input=a,m.next_in=0,m.avail_in=m.input.length;do{if(0===m.avail_out&&(m.output=new h.Buf8(o),m.next_out=0,m.avail_out=o),c=g.inflate(m,j.Z_NO_FLUSH),c===j.Z_NEED_DICT&&p&&(l="string"==typeof p?i.string2buf(p):"[object ArrayBuffer]"===n.call(p)?new Uint8Array(p):p,c=g.inflateSetDictionary(this.strm,l)),c===j.Z_BUF_ERROR&&q===!0&&(c=j.Z_OK,q=!1),c!==j.Z_STREAM_END&&c!==j.Z_OK)return this.onEnd(c),this.ended=!0,!1;m.next_out&&(0!==m.avail_out&&c!==j.Z_STREAM_END&&(0!==m.avail_in||d!==j.Z_FINISH&&d!==j.Z_SYNC_FLUSH)||("string"===this.options.to?(e=i.utf8border(m.output,m.next_out),f=m.next_out-e,k=i.buf2string(m.output,e),m.next_out=f,m.avail_out=o-f,f&&h.arraySet(m.output,m.output,e,f,0),this.onData(k)):this.onData(h.shrinkBuf(m.output,m.next_out)))),0===m.avail_in&&0===m.avail_out&&(q=!0)}while((m.avail_in>0||0===m.avail_out)&&c!==j.Z_STREAM_END);return c===j.Z_STREAM_END&&(d=j.Z_FINISH),d===j.Z_FINISH?(c=g.inflateEnd(this.strm),this.onEnd(c),this.ended=!0,c===j.Z_OK):d!==j.Z_SYNC_FLUSH||(this.onEnd(j.Z_OK),m.avail_out=0,!0)},d.prototype.onData=function(a){this.chunks.push(a)},d.prototype.onEnd=function(a){a===j.Z_OK&&("string"===this.options.to?this.result=this.chunks.join(""):this.result=h.flattenChunks(this.chunks)),this.chunks=[],this.err=a,this.msg=this.strm.msg},c.Inflate=d,c.inflate=e,c.inflateRaw=f,c.ungzip=e},{"./utils/common":62,"./utils/strings":63,"./zlib/constants":65,"./zlib/gzheader":68,"./zlib/inflate":70,"./zlib/messages":72,"./zlib/zstream":74}],62:[function(a,b,c){"use strict";var d="undefined"!=typeof Uint8Array&&"undefined"!=typeof Uint16Array&&"undefined"!=typeof Int32Array;c.assign=function(a){for(var b=Array.prototype.slice.call(arguments,1);b.length;){var c=b.shift();if(c){if("object"!=typeof c)throw new TypeError(c+"must be non-object");for(var d in c)c.hasOwnProperty(d)&&(a[d]=c[d])}}return a},c.shrinkBuf=function(a,b){return a.length===b?a:a.subarray?a.subarray(0,b):(a.length=b,a)};var e={arraySet:function(a,b,c,d,e){if(b.subarray&&a.subarray)return void a.set(b.subarray(c,c+d),e);for(var f=0;f<d;f++)a[e+f]=b[c+f]},flattenChunks:function(a){var b,c,d,e,f,g;for(d=0,b=0,c=a.length;b<c;b++)d+=a[b].length;for(g=new Uint8Array(d),e=0,b=0,c=a.length;b<c;b++)f=a[b],g.set(f,e),e+=f.length;return g}},f={arraySet:function(a,b,c,d,e){for(var f=0;f<d;f++)a[e+f]=b[c+f]},flattenChunks:function(a){return[].concat.apply([],a)}};c.setTyped=function(a){a?(c.Buf8=Uint8Array,c.Buf16=Uint16Array,c.Buf32=Int32Array,c.assign(c,e)):(c.Buf8=Array,c.Buf16=Array,c.Buf32=Array,c.assign(c,f))},c.setTyped(d)},{}],63:[function(a,b,c){"use strict";function d(a,b){if(b<65537&&(a.subarray&&g||!a.subarray&&f))return String.fromCharCode.apply(null,e.shrinkBuf(a,b));for(var c="",d=0;d<b;d++)c+=String.fromCharCode(a[d]);return c}var e=a("./common"),f=!0,g=!0;try{String.fromCharCode.apply(null,[0])}catch(h){f=!1}try{String.fromCharCode.apply(null,new Uint8Array(1))}catch(h){g=!1}for(var i=new e.Buf8(256),j=0;j<256;j++)i[j]=j>=252?6:j>=248?5:j>=240?4:j>=224?3:j>=192?2:1;i[254]=i[254]=1,c.string2buf=function(a){var b,c,d,f,g,h=a.length,i=0;for(f=0;f<h;f++)c=a.charCodeAt(f),55296===(64512&c)&&f+1<h&&(d=a.charCodeAt(f+1),56320===(64512&d)&&(c=65536+(c-55296<<10)+(d-56320),f++)),i+=c<128?1:c<2048?2:c<65536?3:4;for(b=new e.Buf8(i),g=0,f=0;g<i;f++)c=a.charCodeAt(f),55296===(64512&c)&&f+1<h&&(d=a.charCodeAt(f+1),56320===(64512&d)&&(c=65536+(c-55296<<10)+(d-56320),f++)),c<128?b[g++]=c:c<2048?(b[g++]=192|c>>>6,b[g++]=128|63&c):c<65536?(b[g++]=224|c>>>12,b[g++]=128|c>>>6&63,b[g++]=128|63&c):(b[g++]=240|c>>>18,b[g++]=128|c>>>12&63,b[g++]=128|c>>>6&63,b[g++]=128|63&c);return b},c.buf2binstring=function(a){return d(a,a.length)},c.binstring2buf=function(a){for(var b=new e.Buf8(a.length),c=0,d=b.length;c<d;c++)b[c]=a.charCodeAt(c);return b},c.buf2string=function(a,b){var c,e,f,g,h=b||a.length,j=new Array(2*h);for(e=0,c=0;c<h;)if(f=a[c++],f<128)j[e++]=f;else if(g=i[f],g>4)j[e++]=65533,c+=g-1;else{for(f&=2===g?31:3===g?15:7;g>1&&c<h;)f=f<<6|63&a[c++],g--;g>1?j[e++]=65533:f<65536?j[e++]=f:(f-=65536,j[e++]=55296|f>>10&1023,j[e++]=56320|1023&f)}return d(j,e)},c.utf8border=function(a,b){var c;for(b=b||a.length,b>a.length&&(b=a.length),c=b-1;c>=0&&128===(192&a[c]);)c--;return c<0?b:0===c?b:c+i[a[c]]>b?c:b}},{"./common":62}],64:[function(a,b,c){"use strict";function d(a,b,c,d){for(var e=65535&a|0,f=a>>>16&65535|0,g=0;0!==c;){g=c>2e3?2e3:c,c-=g;do e=e+b[d++]|0,f=f+e|0;while(--g);e%=65521,f%=65521}return e|f<<16|0;
}b.exports=d},{}],65:[function(a,b,c){"use strict";b.exports={Z_NO_FLUSH:0,Z_PARTIAL_FLUSH:1,Z_SYNC_FLUSH:2,Z_FULL_FLUSH:3,Z_FINISH:4,Z_BLOCK:5,Z_TREES:6,Z_OK:0,Z_STREAM_END:1,Z_NEED_DICT:2,Z_ERRNO:-1,Z_STREAM_ERROR:-2,Z_DATA_ERROR:-3,Z_BUF_ERROR:-5,Z_NO_COMPRESSION:0,Z_BEST_SPEED:1,Z_BEST_COMPRESSION:9,Z_DEFAULT_COMPRESSION:-1,Z_FILTERED:1,Z_HUFFMAN_ONLY:2,Z_RLE:3,Z_FIXED:4,Z_DEFAULT_STRATEGY:0,Z_BINARY:0,Z_TEXT:1,Z_UNKNOWN:2,Z_DEFLATED:8}},{}],66:[function(a,b,c){"use strict";function d(){for(var a,b=[],c=0;c<256;c++){a=c;for(var d=0;d<8;d++)a=1&a?3988292384^a>>>1:a>>>1;b[c]=a}return b}function e(a,b,c,d){var e=f,g=d+c;a^=-1;for(var h=d;h<g;h++)a=a>>>8^e[255&(a^b[h])];return a^-1}var f=d();b.exports=e},{}],67:[function(a,b,c){"use strict";function d(a,b){return a.msg=I[b],b}function e(a){return(a<<1)-(a>4?9:0)}function f(a){for(var b=a.length;--b>=0;)a[b]=0}function g(a){var b=a.state,c=b.pending;c>a.avail_out&&(c=a.avail_out),0!==c&&(E.arraySet(a.output,b.pending_buf,b.pending_out,c,a.next_out),a.next_out+=c,b.pending_out+=c,a.total_out+=c,a.avail_out-=c,b.pending-=c,0===b.pending&&(b.pending_out=0))}function h(a,b){F._tr_flush_block(a,a.block_start>=0?a.block_start:-1,a.strstart-a.block_start,b),a.block_start=a.strstart,g(a.strm)}function i(a,b){a.pending_buf[a.pending++]=b}function j(a,b){a.pending_buf[a.pending++]=b>>>8&255,a.pending_buf[a.pending++]=255&b}function k(a,b,c,d){var e=a.avail_in;return e>d&&(e=d),0===e?0:(a.avail_in-=e,E.arraySet(b,a.input,a.next_in,e,c),1===a.state.wrap?a.adler=G(a.adler,b,e,c):2===a.state.wrap&&(a.adler=H(a.adler,b,e,c)),a.next_in+=e,a.total_in+=e,e)}function l(a,b){var c,d,e=a.max_chain_length,f=a.strstart,g=a.prev_length,h=a.nice_match,i=a.strstart>a.w_size-la?a.strstart-(a.w_size-la):0,j=a.window,k=a.w_mask,l=a.prev,m=a.strstart+ka,n=j[f+g-1],o=j[f+g];a.prev_length>=a.good_match&&(e>>=2),h>a.lookahead&&(h=a.lookahead);do if(c=b,j[c+g]===o&&j[c+g-1]===n&&j[c]===j[f]&&j[++c]===j[f+1]){f+=2,c++;do;while(j[++f]===j[++c]&&j[++f]===j[++c]&&j[++f]===j[++c]&&j[++f]===j[++c]&&j[++f]===j[++c]&&j[++f]===j[++c]&&j[++f]===j[++c]&&j[++f]===j[++c]&&f<m);if(d=ka-(m-f),f=m-ka,d>g){if(a.match_start=b,g=d,d>=h)break;n=j[f+g-1],o=j[f+g]}}while((b=l[b&k])>i&&0!==--e);return g<=a.lookahead?g:a.lookahead}function m(a){var b,c,d,e,f,g=a.w_size;do{if(e=a.window_size-a.lookahead-a.strstart,a.strstart>=g+(g-la)){E.arraySet(a.window,a.window,g,g,0),a.match_start-=g,a.strstart-=g,a.block_start-=g,c=a.hash_size,b=c;do d=a.head[--b],a.head[b]=d>=g?d-g:0;while(--c);c=g,b=c;do d=a.prev[--b],a.prev[b]=d>=g?d-g:0;while(--c);e+=g}if(0===a.strm.avail_in)break;if(c=k(a.strm,a.window,a.strstart+a.lookahead,e),a.lookahead+=c,a.lookahead+a.insert>=ja)for(f=a.strstart-a.insert,a.ins_h=a.window[f],a.ins_h=(a.ins_h<<a.hash_shift^a.window[f+1])&a.hash_mask;a.insert&&(a.ins_h=(a.ins_h<<a.hash_shift^a.window[f+ja-1])&a.hash_mask,a.prev[f&a.w_mask]=a.head[a.ins_h],a.head[a.ins_h]=f,f++,a.insert--,!(a.lookahead+a.insert<ja)););}while(a.lookahead<la&&0!==a.strm.avail_in)}function n(a,b){var c=65535;for(c>a.pending_buf_size-5&&(c=a.pending_buf_size-5);;){if(a.lookahead<=1){if(m(a),0===a.lookahead&&b===J)return ua;if(0===a.lookahead)break}a.strstart+=a.lookahead,a.lookahead=0;var d=a.block_start+c;if((0===a.strstart||a.strstart>=d)&&(a.lookahead=a.strstart-d,a.strstart=d,h(a,!1),0===a.strm.avail_out))return ua;if(a.strstart-a.block_start>=a.w_size-la&&(h(a,!1),0===a.strm.avail_out))return ua}return a.insert=0,b===M?(h(a,!0),0===a.strm.avail_out?wa:xa):a.strstart>a.block_start&&(h(a,!1),0===a.strm.avail_out)?ua:ua}function o(a,b){for(var c,d;;){if(a.lookahead<la){if(m(a),a.lookahead<la&&b===J)return ua;if(0===a.lookahead)break}if(c=0,a.lookahead>=ja&&(a.ins_h=(a.ins_h<<a.hash_shift^a.window[a.strstart+ja-1])&a.hash_mask,c=a.prev[a.strstart&a.w_mask]=a.head[a.ins_h],a.head[a.ins_h]=a.strstart),0!==c&&a.strstart-c<=a.w_size-la&&(a.match_length=l(a,c)),a.match_length>=ja)if(d=F._tr_tally(a,a.strstart-a.match_start,a.match_length-ja),a.lookahead-=a.match_length,a.match_length<=a.max_lazy_match&&a.lookahead>=ja){a.match_length--;do a.strstart++,a.ins_h=(a.ins_h<<a.hash_shift^a.window[a.strstart+ja-1])&a.hash_mask,c=a.prev[a.strstart&a.w_mask]=a.head[a.ins_h],a.head[a.ins_h]=a.strstart;while(0!==--a.match_length);a.strstart++}else a.strstart+=a.match_length,a.match_length=0,a.ins_h=a.window[a.strstart],a.ins_h=(a.ins_h<<a.hash_shift^a.window[a.strstart+1])&a.hash_mask;else d=F._tr_tally(a,0,a.window[a.strstart]),a.lookahead--,a.strstart++;if(d&&(h(a,!1),0===a.strm.avail_out))return ua}return a.insert=a.strstart<ja-1?a.strstart:ja-1,b===M?(h(a,!0),0===a.strm.avail_out?wa:xa):a.last_lit&&(h(a,!1),0===a.strm.avail_out)?ua:va}function p(a,b){for(var c,d,e;;){if(a.lookahead<la){if(m(a),a.lookahead<la&&b===J)return ua;if(0===a.lookahead)break}if(c=0,a.lookahead>=ja&&(a.ins_h=(a.ins_h<<a.hash_shift^a.window[a.strstart+ja-1])&a.hash_mask,c=a.prev[a.strstart&a.w_mask]=a.head[a.ins_h],a.head[a.ins_h]=a.strstart),a.prev_length=a.match_length,a.prev_match=a.match_start,a.match_length=ja-1,0!==c&&a.prev_length<a.max_lazy_match&&a.strstart-c<=a.w_size-la&&(a.match_length=l(a,c),a.match_length<=5&&(a.strategy===U||a.match_length===ja&&a.strstart-a.match_start>4096)&&(a.match_length=ja-1)),a.prev_length>=ja&&a.match_length<=a.prev_length){e=a.strstart+a.lookahead-ja,d=F._tr_tally(a,a.strstart-1-a.prev_match,a.prev_length-ja),a.lookahead-=a.prev_length-1,a.prev_length-=2;do++a.strstart<=e&&(a.ins_h=(a.ins_h<<a.hash_shift^a.window[a.strstart+ja-1])&a.hash_mask,c=a.prev[a.strstart&a.w_mask]=a.head[a.ins_h],a.head[a.ins_h]=a.strstart);while(0!==--a.prev_length);if(a.match_available=0,a.match_length=ja-1,a.strstart++,d&&(h(a,!1),0===a.strm.avail_out))return ua}else if(a.match_available){if(d=F._tr_tally(a,0,a.window[a.strstart-1]),d&&h(a,!1),a.strstart++,a.lookahead--,0===a.strm.avail_out)return ua}else a.match_available=1,a.strstart++,a.lookahead--}return a.match_available&&(d=F._tr_tally(a,0,a.window[a.strstart-1]),a.match_available=0),a.insert=a.strstart<ja-1?a.strstart:ja-1,b===M?(h(a,!0),0===a.strm.avail_out?wa:xa):a.last_lit&&(h(a,!1),0===a.strm.avail_out)?ua:va}function q(a,b){for(var c,d,e,f,g=a.window;;){if(a.lookahead<=ka){if(m(a),a.lookahead<=ka&&b===J)return ua;if(0===a.lookahead)break}if(a.match_length=0,a.lookahead>=ja&&a.strstart>0&&(e=a.strstart-1,d=g[e],d===g[++e]&&d===g[++e]&&d===g[++e])){f=a.strstart+ka;do;while(d===g[++e]&&d===g[++e]&&d===g[++e]&&d===g[++e]&&d===g[++e]&&d===g[++e]&&d===g[++e]&&d===g[++e]&&e<f);a.match_length=ka-(f-e),a.match_length>a.lookahead&&(a.match_length=a.lookahead)}if(a.match_length>=ja?(c=F._tr_tally(a,1,a.match_length-ja),a.lookahead-=a.match_length,a.strstart+=a.match_length,a.match_length=0):(c=F._tr_tally(a,0,a.window[a.strstart]),a.lookahead--,a.strstart++),c&&(h(a,!1),0===a.strm.avail_out))return ua}return a.insert=0,b===M?(h(a,!0),0===a.strm.avail_out?wa:xa):a.last_lit&&(h(a,!1),0===a.strm.avail_out)?ua:va}function r(a,b){for(var c;;){if(0===a.lookahead&&(m(a),0===a.lookahead)){if(b===J)return ua;break}if(a.match_length=0,c=F._tr_tally(a,0,a.window[a.strstart]),a.lookahead--,a.strstart++,c&&(h(a,!1),0===a.strm.avail_out))return ua}return a.insert=0,b===M?(h(a,!0),0===a.strm.avail_out?wa:xa):a.last_lit&&(h(a,!1),0===a.strm.avail_out)?ua:va}function s(a,b,c,d,e){this.good_length=a,this.max_lazy=b,this.nice_length=c,this.max_chain=d,this.func=e}function t(a){a.window_size=2*a.w_size,f(a.head),a.max_lazy_match=D[a.level].max_lazy,a.good_match=D[a.level].good_length,a.nice_match=D[a.level].nice_length,a.max_chain_length=D[a.level].max_chain,a.strstart=0,a.block_start=0,a.lookahead=0,a.insert=0,a.match_length=a.prev_length=ja-1,a.match_available=0,a.ins_h=0}function u(){this.strm=null,this.status=0,this.pending_buf=null,this.pending_buf_size=0,this.pending_out=0,this.pending=0,this.wrap=0,this.gzhead=null,this.gzindex=0,this.method=$,this.last_flush=-1,this.w_size=0,this.w_bits=0,this.w_mask=0,this.window=null,this.window_size=0,this.prev=null,this.head=null,this.ins_h=0,this.hash_size=0,this.hash_bits=0,this.hash_mask=0,this.hash_shift=0,this.block_start=0,this.match_length=0,this.prev_match=0,this.match_available=0,this.strstart=0,this.match_start=0,this.lookahead=0,this.prev_length=0,this.max_chain_length=0,this.max_lazy_match=0,this.level=0,this.strategy=0,this.good_match=0,this.nice_match=0,this.dyn_ltree=new E.Buf16(2*ha),this.dyn_dtree=new E.Buf16(2*(2*fa+1)),this.bl_tree=new E.Buf16(2*(2*ga+1)),f(this.dyn_ltree),f(this.dyn_dtree),f(this.bl_tree),this.l_desc=null,this.d_desc=null,this.bl_desc=null,this.bl_count=new E.Buf16(ia+1),this.heap=new E.Buf16(2*ea+1),f(this.heap),this.heap_len=0,this.heap_max=0,this.depth=new E.Buf16(2*ea+1),f(this.depth),this.l_buf=0,this.lit_bufsize=0,this.last_lit=0,this.d_buf=0,this.opt_len=0,this.static_len=0,this.matches=0,this.insert=0,this.bi_buf=0,this.bi_valid=0}function v(a){var b;return a&&a.state?(a.total_in=a.total_out=0,a.data_type=Z,b=a.state,b.pending=0,b.pending_out=0,b.wrap<0&&(b.wrap=-b.wrap),b.status=b.wrap?na:sa,a.adler=2===b.wrap?0:1,b.last_flush=J,F._tr_init(b),O):d(a,Q)}function w(a){var b=v(a);return b===O&&t(a.state),b}function x(a,b){return a&&a.state?2!==a.state.wrap?Q:(a.state.gzhead=b,O):Q}function y(a,b,c,e,f,g){if(!a)return Q;var h=1;if(b===T&&(b=6),e<0?(h=0,e=-e):e>15&&(h=2,e-=16),f<1||f>_||c!==$||e<8||e>15||b<0||b>9||g<0||g>X)return d(a,Q);8===e&&(e=9);var i=new u;return a.state=i,i.strm=a,i.wrap=h,i.gzhead=null,i.w_bits=e,i.w_size=1<<i.w_bits,i.w_mask=i.w_size-1,i.hash_bits=f+7,i.hash_size=1<<i.hash_bits,i.hash_mask=i.hash_size-1,i.hash_shift=~~((i.hash_bits+ja-1)/ja),i.window=new E.Buf8(2*i.w_size),i.head=new E.Buf16(i.hash_size),i.prev=new E.Buf16(i.w_size),i.lit_bufsize=1<<f+6,i.pending_buf_size=4*i.lit_bufsize,i.pending_buf=new E.Buf8(i.pending_buf_size),i.d_buf=1*i.lit_bufsize,i.l_buf=3*i.lit_bufsize,i.level=b,i.strategy=g,i.method=c,w(a)}function z(a,b){return y(a,b,$,aa,ba,Y)}function A(a,b){var c,h,k,l;if(!a||!a.state||b>N||b<0)return a?d(a,Q):Q;if(h=a.state,!a.output||!a.input&&0!==a.avail_in||h.status===ta&&b!==M)return d(a,0===a.avail_out?S:Q);if(h.strm=a,c=h.last_flush,h.last_flush=b,h.status===na)if(2===h.wrap)a.adler=0,i(h,31),i(h,139),i(h,8),h.gzhead?(i(h,(h.gzhead.text?1:0)+(h.gzhead.hcrc?2:0)+(h.gzhead.extra?4:0)+(h.gzhead.name?8:0)+(h.gzhead.comment?16:0)),i(h,255&h.gzhead.time),i(h,h.gzhead.time>>8&255),i(h,h.gzhead.time>>16&255),i(h,h.gzhead.time>>24&255),i(h,9===h.level?2:h.strategy>=V||h.level<2?4:0),i(h,255&h.gzhead.os),h.gzhead.extra&&h.gzhead.extra.length&&(i(h,255&h.gzhead.extra.length),i(h,h.gzhead.extra.length>>8&255)),h.gzhead.hcrc&&(a.adler=H(a.adler,h.pending_buf,h.pending,0)),h.gzindex=0,h.status=oa):(i(h,0),i(h,0),i(h,0),i(h,0),i(h,0),i(h,9===h.level?2:h.strategy>=V||h.level<2?4:0),i(h,ya),h.status=sa);else{var m=$+(h.w_bits-8<<4)<<8,n=-1;n=h.strategy>=V||h.level<2?0:h.level<6?1:6===h.level?2:3,m|=n<<6,0!==h.strstart&&(m|=ma),m+=31-m%31,h.status=sa,j(h,m),0!==h.strstart&&(j(h,a.adler>>>16),j(h,65535&a.adler)),a.adler=1}if(h.status===oa)if(h.gzhead.extra){for(k=h.pending;h.gzindex<(65535&h.gzhead.extra.length)&&(h.pending!==h.pending_buf_size||(h.gzhead.hcrc&&h.pending>k&&(a.adler=H(a.adler,h.pending_buf,h.pending-k,k)),g(a),k=h.pending,h.pending!==h.pending_buf_size));)i(h,255&h.gzhead.extra[h.gzindex]),h.gzindex++;h.gzhead.hcrc&&h.pending>k&&(a.adler=H(a.adler,h.pending_buf,h.pending-k,k)),h.gzindex===h.gzhead.extra.length&&(h.gzindex=0,h.status=pa)}else h.status=pa;if(h.status===pa)if(h.gzhead.name){k=h.pending;do{if(h.pending===h.pending_buf_size&&(h.gzhead.hcrc&&h.pending>k&&(a.adler=H(a.adler,h.pending_buf,h.pending-k,k)),g(a),k=h.pending,h.pending===h.pending_buf_size)){l=1;break}l=h.gzindex<h.gzhead.name.length?255&h.gzhead.name.charCodeAt(h.gzindex++):0,i(h,l)}while(0!==l);h.gzhead.hcrc&&h.pending>k&&(a.adler=H(a.adler,h.pending_buf,h.pending-k,k)),0===l&&(h.gzindex=0,h.status=qa)}else h.status=qa;if(h.status===qa)if(h.gzhead.comment){k=h.pending;do{if(h.pending===h.pending_buf_size&&(h.gzhead.hcrc&&h.pending>k&&(a.adler=H(a.adler,h.pending_buf,h.pending-k,k)),g(a),k=h.pending,h.pending===h.pending_buf_size)){l=1;break}l=h.gzindex<h.gzhead.comment.length?255&h.gzhead.comment.charCodeAt(h.gzindex++):0,i(h,l)}while(0!==l);h.gzhead.hcrc&&h.pending>k&&(a.adler=H(a.adler,h.pending_buf,h.pending-k,k)),0===l&&(h.status=ra)}else h.status=ra;if(h.status===ra&&(h.gzhead.hcrc?(h.pending+2>h.pending_buf_size&&g(a),h.pending+2<=h.pending_buf_size&&(i(h,255&a.adler),i(h,a.adler>>8&255),a.adler=0,h.status=sa)):h.status=sa),0!==h.pending){if(g(a),0===a.avail_out)return h.last_flush=-1,O}else if(0===a.avail_in&&e(b)<=e(c)&&b!==M)return d(a,S);if(h.status===ta&&0!==a.avail_in)return d(a,S);if(0!==a.avail_in||0!==h.lookahead||b!==J&&h.status!==ta){var o=h.strategy===V?r(h,b):h.strategy===W?q(h,b):D[h.level].func(h,b);if(o!==wa&&o!==xa||(h.status=ta),o===ua||o===wa)return 0===a.avail_out&&(h.last_flush=-1),O;if(o===va&&(b===K?F._tr_align(h):b!==N&&(F._tr_stored_block(h,0,0,!1),b===L&&(f(h.head),0===h.lookahead&&(h.strstart=0,h.block_start=0,h.insert=0))),g(a),0===a.avail_out))return h.last_flush=-1,O}return b!==M?O:h.wrap<=0?P:(2===h.wrap?(i(h,255&a.adler),i(h,a.adler>>8&255),i(h,a.adler>>16&255),i(h,a.adler>>24&255),i(h,255&a.total_in),i(h,a.total_in>>8&255),i(h,a.total_in>>16&255),i(h,a.total_in>>24&255)):(j(h,a.adler>>>16),j(h,65535&a.adler)),g(a),h.wrap>0&&(h.wrap=-h.wrap),0!==h.pending?O:P)}function B(a){var b;return a&&a.state?(b=a.state.status,b!==na&&b!==oa&&b!==pa&&b!==qa&&b!==ra&&b!==sa&&b!==ta?d(a,Q):(a.state=null,b===sa?d(a,R):O)):Q}function C(a,b){var c,d,e,g,h,i,j,k,l=b.length;if(!a||!a.state)return Q;if(c=a.state,g=c.wrap,2===g||1===g&&c.status!==na||c.lookahead)return Q;for(1===g&&(a.adler=G(a.adler,b,l,0)),c.wrap=0,l>=c.w_size&&(0===g&&(f(c.head),c.strstart=0,c.block_start=0,c.insert=0),k=new E.Buf8(c.w_size),E.arraySet(k,b,l-c.w_size,c.w_size,0),b=k,l=c.w_size),h=a.avail_in,i=a.next_in,j=a.input,a.avail_in=l,a.next_in=0,a.input=b,m(c);c.lookahead>=ja;){d=c.strstart,e=c.lookahead-(ja-1);do c.ins_h=(c.ins_h<<c.hash_shift^c.window[d+ja-1])&c.hash_mask,c.prev[d&c.w_mask]=c.head[c.ins_h],c.head[c.ins_h]=d,d++;while(--e);c.strstart=d,c.lookahead=ja-1,m(c)}return c.strstart+=c.lookahead,c.block_start=c.strstart,c.insert=c.lookahead,c.lookahead=0,c.match_length=c.prev_length=ja-1,c.match_available=0,a.next_in=i,a.input=j,a.avail_in=h,c.wrap=g,O}var D,E=a("../utils/common"),F=a("./trees"),G=a("./adler32"),H=a("./crc32"),I=a("./messages"),J=0,K=1,L=3,M=4,N=5,O=0,P=1,Q=-2,R=-3,S=-5,T=-1,U=1,V=2,W=3,X=4,Y=0,Z=2,$=8,_=9,aa=15,ba=8,ca=29,da=256,ea=da+1+ca,fa=30,ga=19,ha=2*ea+1,ia=15,ja=3,ka=258,la=ka+ja+1,ma=32,na=42,oa=69,pa=73,qa=91,ra=103,sa=113,ta=666,ua=1,va=2,wa=3,xa=4,ya=3;D=[new s(0,0,0,0,n),new s(4,4,8,4,o),new s(4,5,16,8,o),new s(4,6,32,32,o),new s(4,4,16,16,p),new s(8,16,32,32,p),new s(8,16,128,128,p),new s(8,32,128,256,p),new s(32,128,258,1024,p),new s(32,258,258,4096,p)],c.deflateInit=z,c.deflateInit2=y,c.deflateReset=w,c.deflateResetKeep=v,c.deflateSetHeader=x,c.deflate=A,c.deflateEnd=B,c.deflateSetDictionary=C,c.deflateInfo="pako deflate (from Nodeca project)"},{"../utils/common":62,"./adler32":64,"./crc32":66,"./messages":72,"./trees":73}],68:[function(a,b,c){"use strict";function d(){this.text=0,this.time=0,this.xflags=0,this.os=0,this.extra=null,this.extra_len=0,this.name="",this.comment="",this.hcrc=0,this.done=!1}b.exports=d},{}],69:[function(a,b,c){"use strict";var d=30,e=12;b.exports=function(a,b){var c,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,A,B,C;c=a.state,f=a.next_in,B=a.input,g=f+(a.avail_in-5),h=a.next_out,C=a.output,i=h-(b-a.avail_out),j=h+(a.avail_out-257),k=c.dmax,l=c.wsize,m=c.whave,n=c.wnext,o=c.window,p=c.hold,q=c.bits,r=c.lencode,s=c.distcode,t=(1<<c.lenbits)-1,u=(1<<c.distbits)-1;a:do{q<15&&(p+=B[f++]<<q,q+=8,p+=B[f++]<<q,q+=8),v=r[p&t];b:for(;;){if(w=v>>>24,p>>>=w,q-=w,w=v>>>16&255,0===w)C[h++]=65535&v;else{if(!(16&w)){if(0===(64&w)){v=r[(65535&v)+(p&(1<<w)-1)];continue b}if(32&w){c.mode=e;break a}a.msg="invalid literal/length code",c.mode=d;break a}x=65535&v,w&=15,w&&(q<w&&(p+=B[f++]<<q,q+=8),x+=p&(1<<w)-1,p>>>=w,q-=w),q<15&&(p+=B[f++]<<q,q+=8,p+=B[f++]<<q,q+=8),v=s[p&u];c:for(;;){if(w=v>>>24,p>>>=w,q-=w,w=v>>>16&255,!(16&w)){if(0===(64&w)){v=s[(65535&v)+(p&(1<<w)-1)];continue c}a.msg="invalid distance code",c.mode=d;break a}if(y=65535&v,w&=15,q<w&&(p+=B[f++]<<q,q+=8,q<w&&(p+=B[f++]<<q,q+=8)),y+=p&(1<<w)-1,y>k){a.msg="invalid distance too far back",c.mode=d;break a}if(p>>>=w,q-=w,w=h-i,y>w){if(w=y-w,w>m&&c.sane){a.msg="invalid distance too far back",c.mode=d;break a}if(z=0,A=o,0===n){if(z+=l-w,w<x){x-=w;do C[h++]=o[z++];while(--w);z=h-y,A=C}}else if(n<w){if(z+=l+n-w,w-=n,w<x){x-=w;do C[h++]=o[z++];while(--w);if(z=0,n<x){w=n,x-=w;do C[h++]=o[z++];while(--w);z=h-y,A=C}}}else if(z+=n-w,w<x){x-=w;do C[h++]=o[z++];while(--w);z=h-y,A=C}for(;x>2;)C[h++]=A[z++],C[h++]=A[z++],C[h++]=A[z++],x-=3;x&&(C[h++]=A[z++],x>1&&(C[h++]=A[z++]))}else{z=h-y;do C[h++]=C[z++],C[h++]=C[z++],C[h++]=C[z++],x-=3;while(x>2);x&&(C[h++]=C[z++],x>1&&(C[h++]=C[z++]))}break}}break}}while(f<g&&h<j);x=q>>3,f-=x,q-=x<<3,p&=(1<<q)-1,a.next_in=f,a.next_out=h,a.avail_in=f<g?5+(g-f):5-(f-g),a.avail_out=h<j?257+(j-h):257-(h-j),c.hold=p,c.bits=q}},{}],70:[function(a,b,c){"use strict";function d(a){return(a>>>24&255)+(a>>>8&65280)+((65280&a)<<8)+((255&a)<<24)}function e(){this.mode=0,this.last=!1,this.wrap=0,this.havedict=!1,this.flags=0,this.dmax=0,this.check=0,this.total=0,this.head=null,this.wbits=0,this.wsize=0,this.whave=0,this.wnext=0,this.window=null,this.hold=0,this.bits=0,this.length=0,this.offset=0,this.extra=0,this.lencode=null,this.distcode=null,this.lenbits=0,this.distbits=0,this.ncode=0,this.nlen=0,this.ndist=0,this.have=0,this.next=null,this.lens=new s.Buf16(320),this.work=new s.Buf16(288),this.lendyn=null,this.distdyn=null,this.sane=0,this.back=0,this.was=0}function f(a){var b;return a&&a.state?(b=a.state,a.total_in=a.total_out=b.total=0,a.msg="",b.wrap&&(a.adler=1&b.wrap),b.mode=L,b.last=0,b.havedict=0,b.dmax=32768,b.head=null,b.hold=0,b.bits=0,b.lencode=b.lendyn=new s.Buf32(pa),b.distcode=b.distdyn=new s.Buf32(qa),b.sane=1,b.back=-1,D):G}function g(a){var b;return a&&a.state?(b=a.state,b.wsize=0,b.whave=0,b.wnext=0,f(a)):G}function h(a,b){var c,d;return a&&a.state?(d=a.state,b<0?(c=0,b=-b):(c=(b>>4)+1,b<48&&(b&=15)),b&&(b<8||b>15)?G:(null!==d.window&&d.wbits!==b&&(d.window=null),d.wrap=c,d.wbits=b,g(a))):G}function i(a,b){var c,d;return a?(d=new e,a.state=d,d.window=null,c=h(a,b),c!==D&&(a.state=null),c):G}function j(a){return i(a,sa)}function k(a){if(ta){var b;for(q=new s.Buf32(512),r=new s.Buf32(32),b=0;b<144;)a.lens[b++]=8;for(;b<256;)a.lens[b++]=9;for(;b<280;)a.lens[b++]=7;for(;b<288;)a.lens[b++]=8;for(w(y,a.lens,0,288,q,0,a.work,{bits:9}),b=0;b<32;)a.lens[b++]=5;w(z,a.lens,0,32,r,0,a.work,{bits:5}),ta=!1}a.lencode=q,a.lenbits=9,a.distcode=r,a.distbits=5}function l(a,b,c,d){var e,f=a.state;return null===f.window&&(f.wsize=1<<f.wbits,f.wnext=0,f.whave=0,f.window=new s.Buf8(f.wsize)),d>=f.wsize?(s.arraySet(f.window,b,c-f.wsize,f.wsize,0),f.wnext=0,f.whave=f.wsize):(e=f.wsize-f.wnext,e>d&&(e=d),s.arraySet(f.window,b,c-d,e,f.wnext),d-=e,d?(s.arraySet(f.window,b,c-d,d,0),f.wnext=d,f.whave=f.wsize):(f.wnext+=e,f.wnext===f.wsize&&(f.wnext=0),f.whave<f.wsize&&(f.whave+=e))),0}function m(a,b){var c,e,f,g,h,i,j,m,n,o,p,q,r,pa,qa,ra,sa,ta,ua,va,wa,xa,ya,za,Aa=0,Ba=new s.Buf8(4),Ca=[16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15];if(!a||!a.state||!a.output||!a.input&&0!==a.avail_in)return G;c=a.state,c.mode===W&&(c.mode=X),h=a.next_out,f=a.output,j=a.avail_out,g=a.next_in,e=a.input,i=a.avail_in,m=c.hold,n=c.bits,o=i,p=j,xa=D;a:for(;;)switch(c.mode){case L:if(0===c.wrap){c.mode=X;break}for(;n<16;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(2&c.wrap&&35615===m){c.check=0,Ba[0]=255&m,Ba[1]=m>>>8&255,c.check=u(c.check,Ba,2,0),m=0,n=0,c.mode=M;break}if(c.flags=0,c.head&&(c.head.done=!1),!(1&c.wrap)||(((255&m)<<8)+(m>>8))%31){a.msg="incorrect header check",c.mode=ma;break}if((15&m)!==K){a.msg="unknown compression method",c.mode=ma;break}if(m>>>=4,n-=4,wa=(15&m)+8,0===c.wbits)c.wbits=wa;else if(wa>c.wbits){a.msg="invalid window size",c.mode=ma;break}c.dmax=1<<wa,a.adler=c.check=1,c.mode=512&m?U:W,m=0,n=0;break;case M:for(;n<16;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(c.flags=m,(255&c.flags)!==K){a.msg="unknown compression method",c.mode=ma;break}if(57344&c.flags){a.msg="unknown header flags set",c.mode=ma;break}c.head&&(c.head.text=m>>8&1),512&c.flags&&(Ba[0]=255&m,Ba[1]=m>>>8&255,c.check=u(c.check,Ba,2,0)),m=0,n=0,c.mode=N;case N:for(;n<32;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}c.head&&(c.head.time=m),512&c.flags&&(Ba[0]=255&m,Ba[1]=m>>>8&255,Ba[2]=m>>>16&255,Ba[3]=m>>>24&255,c.check=u(c.check,Ba,4,0)),m=0,n=0,c.mode=O;case O:for(;n<16;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}c.head&&(c.head.xflags=255&m,c.head.os=m>>8),512&c.flags&&(Ba[0]=255&m,Ba[1]=m>>>8&255,c.check=u(c.check,Ba,2,0)),m=0,n=0,c.mode=P;case P:if(1024&c.flags){for(;n<16;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}c.length=m,c.head&&(c.head.extra_len=m),512&c.flags&&(Ba[0]=255&m,Ba[1]=m>>>8&255,c.check=u(c.check,Ba,2,0)),m=0,n=0}else c.head&&(c.head.extra=null);c.mode=Q;case Q:if(1024&c.flags&&(q=c.length,q>i&&(q=i),q&&(c.head&&(wa=c.head.extra_len-c.length,c.head.extra||(c.head.extra=new Array(c.head.extra_len)),s.arraySet(c.head.extra,e,g,q,wa)),512&c.flags&&(c.check=u(c.check,e,q,g)),i-=q,g+=q,c.length-=q),c.length))break a;c.length=0,c.mode=R;case R:if(2048&c.flags){if(0===i)break a;q=0;do wa=e[g+q++],c.head&&wa&&c.length<65536&&(c.head.name+=String.fromCharCode(wa));while(wa&&q<i);if(512&c.flags&&(c.check=u(c.check,e,q,g)),i-=q,g+=q,wa)break a}else c.head&&(c.head.name=null);c.length=0,c.mode=S;case S:if(4096&c.flags){if(0===i)break a;q=0;do wa=e[g+q++],c.head&&wa&&c.length<65536&&(c.head.comment+=String.fromCharCode(wa));while(wa&&q<i);if(512&c.flags&&(c.check=u(c.check,e,q,g)),i-=q,g+=q,wa)break a}else c.head&&(c.head.comment=null);c.mode=T;case T:if(512&c.flags){for(;n<16;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(m!==(65535&c.check)){a.msg="header crc mismatch",c.mode=ma;break}m=0,n=0}c.head&&(c.head.hcrc=c.flags>>9&1,c.head.done=!0),a.adler=c.check=0,c.mode=W;break;case U:for(;n<32;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}a.adler=c.check=d(m),m=0,n=0,c.mode=V;case V:if(0===c.havedict)return a.next_out=h,a.avail_out=j,a.next_in=g,a.avail_in=i,c.hold=m,c.bits=n,F;a.adler=c.check=1,c.mode=W;case W:if(b===B||b===C)break a;case X:if(c.last){m>>>=7&n,n-=7&n,c.mode=ja;break}for(;n<3;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}switch(c.last=1&m,m>>>=1,n-=1,3&m){case 0:c.mode=Y;break;case 1:if(k(c),c.mode=ca,b===C){m>>>=2,n-=2;break a}break;case 2:c.mode=_;break;case 3:a.msg="invalid block type",c.mode=ma}m>>>=2,n-=2;break;case Y:for(m>>>=7&n,n-=7&n;n<32;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if((65535&m)!==(m>>>16^65535)){a.msg="invalid stored block lengths",c.mode=ma;break}if(c.length=65535&m,m=0,n=0,c.mode=Z,b===C)break a;case Z:c.mode=$;case $:if(q=c.length){if(q>i&&(q=i),q>j&&(q=j),0===q)break a;s.arraySet(f,e,g,q,h),i-=q,g+=q,j-=q,h+=q,c.length-=q;break}c.mode=W;break;case _:for(;n<14;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(c.nlen=(31&m)+257,m>>>=5,n-=5,c.ndist=(31&m)+1,m>>>=5,n-=5,c.ncode=(15&m)+4,m>>>=4,n-=4,c.nlen>286||c.ndist>30){a.msg="too many length or distance symbols",c.mode=ma;break}c.have=0,c.mode=aa;case aa:for(;c.have<c.ncode;){for(;n<3;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}c.lens[Ca[c.have++]]=7&m,m>>>=3,n-=3}for(;c.have<19;)c.lens[Ca[c.have++]]=0;if(c.lencode=c.lendyn,c.lenbits=7,ya={bits:c.lenbits},xa=w(x,c.lens,0,19,c.lencode,0,c.work,ya),c.lenbits=ya.bits,xa){a.msg="invalid code lengths set",c.mode=ma;break}c.have=0,c.mode=ba;case ba:for(;c.have<c.nlen+c.ndist;){for(;Aa=c.lencode[m&(1<<c.lenbits)-1],qa=Aa>>>24,ra=Aa>>>16&255,sa=65535&Aa,!(qa<=n);){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(sa<16)m>>>=qa,n-=qa,c.lens[c.have++]=sa;else{if(16===sa){for(za=qa+2;n<za;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(m>>>=qa,n-=qa,0===c.have){a.msg="invalid bit length repeat",c.mode=ma;break}wa=c.lens[c.have-1],q=3+(3&m),m>>>=2,n-=2}else if(17===sa){for(za=qa+3;n<za;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}m>>>=qa,n-=qa,wa=0,q=3+(7&m),m>>>=3,n-=3}else{for(za=qa+7;n<za;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}m>>>=qa,n-=qa,wa=0,q=11+(127&m),m>>>=7,n-=7}if(c.have+q>c.nlen+c.ndist){a.msg="invalid bit length repeat",c.mode=ma;break}for(;q--;)c.lens[c.have++]=wa}}if(c.mode===ma)break;if(0===c.lens[256]){a.msg="invalid code -- missing end-of-block",c.mode=ma;break}if(c.lenbits=9,ya={bits:c.lenbits},xa=w(y,c.lens,0,c.nlen,c.lencode,0,c.work,ya),c.lenbits=ya.bits,xa){a.msg="invalid literal/lengths set",c.mode=ma;break}if(c.distbits=6,c.distcode=c.distdyn,ya={bits:c.distbits},xa=w(z,c.lens,c.nlen,c.ndist,c.distcode,0,c.work,ya),c.distbits=ya.bits,xa){a.msg="invalid distances set",c.mode=ma;break}if(c.mode=ca,b===C)break a;case ca:c.mode=da;case da:if(i>=6&&j>=258){a.next_out=h,a.avail_out=j,a.next_in=g,a.avail_in=i,c.hold=m,c.bits=n,v(a,p),h=a.next_out,f=a.output,j=a.avail_out,g=a.next_in,e=a.input,i=a.avail_in,m=c.hold,n=c.bits,c.mode===W&&(c.back=-1);break}for(c.back=0;Aa=c.lencode[m&(1<<c.lenbits)-1],qa=Aa>>>24,ra=Aa>>>16&255,sa=65535&Aa,!(qa<=n);){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(ra&&0===(240&ra)){for(ta=qa,ua=ra,va=sa;Aa=c.lencode[va+((m&(1<<ta+ua)-1)>>ta)],qa=Aa>>>24,ra=Aa>>>16&255,sa=65535&Aa,!(ta+qa<=n);){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}m>>>=ta,n-=ta,c.back+=ta}if(m>>>=qa,n-=qa,c.back+=qa,c.length=sa,0===ra){c.mode=ia;break}if(32&ra){c.back=-1,c.mode=W;break}if(64&ra){a.msg="invalid literal/length code",c.mode=ma;break}c.extra=15&ra,c.mode=ea;case ea:if(c.extra){for(za=c.extra;n<za;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}c.length+=m&(1<<c.extra)-1,m>>>=c.extra,n-=c.extra,c.back+=c.extra}c.was=c.length,c.mode=fa;case fa:for(;Aa=c.distcode[m&(1<<c.distbits)-1],qa=Aa>>>24,ra=Aa>>>16&255,sa=65535&Aa,!(qa<=n);){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(0===(240&ra)){for(ta=qa,ua=ra,va=sa;Aa=c.distcode[va+((m&(1<<ta+ua)-1)>>ta)],qa=Aa>>>24,ra=Aa>>>16&255,sa=65535&Aa,!(ta+qa<=n);){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}m>>>=ta,n-=ta,c.back+=ta}if(m>>>=qa,n-=qa,c.back+=qa,64&ra){a.msg="invalid distance code",c.mode=ma;break}c.offset=sa,c.extra=15&ra,c.mode=ga;case ga:if(c.extra){for(za=c.extra;n<za;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}c.offset+=m&(1<<c.extra)-1,m>>>=c.extra,n-=c.extra,c.back+=c.extra}if(c.offset>c.dmax){a.msg="invalid distance too far back",c.mode=ma;break}c.mode=ha;case ha:if(0===j)break a;if(q=p-j,c.offset>q){if(q=c.offset-q,q>c.whave&&c.sane){a.msg="invalid distance too far back",c.mode=ma;break}q>c.wnext?(q-=c.wnext,r=c.wsize-q):r=c.wnext-q,q>c.length&&(q=c.length),pa=c.window}else pa=f,r=h-c.offset,q=c.length;q>j&&(q=j),j-=q,c.length-=q;do f[h++]=pa[r++];while(--q);0===c.length&&(c.mode=da);break;case ia:if(0===j)break a;f[h++]=c.length,j--,c.mode=da;break;case ja:if(c.wrap){for(;n<32;){if(0===i)break a;i--,m|=e[g++]<<n,n+=8}if(p-=j,a.total_out+=p,c.total+=p,p&&(a.adler=c.check=c.flags?u(c.check,f,p,h-p):t(c.check,f,p,h-p)),p=j,(c.flags?m:d(m))!==c.check){a.msg="incorrect data check",c.mode=ma;break}m=0,n=0}c.mode=ka;case ka:if(c.wrap&&c.flags){for(;n<32;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(m!==(4294967295&c.total)){a.msg="incorrect length check",c.mode=ma;break}m=0,n=0}c.mode=la;case la:xa=E;break a;case ma:xa=H;break a;case na:return I;case oa:default:return G}return a.next_out=h,a.avail_out=j,a.next_in=g,a.avail_in=i,c.hold=m,c.bits=n,(c.wsize||p!==a.avail_out&&c.mode<ma&&(c.mode<ja||b!==A))&&l(a,a.output,a.next_out,p-a.avail_out)?(c.mode=na,I):(o-=a.avail_in,p-=a.avail_out,a.total_in+=o,a.total_out+=p,c.total+=p,c.wrap&&p&&(a.adler=c.check=c.flags?u(c.check,f,p,a.next_out-p):t(c.check,f,p,a.next_out-p)),a.data_type=c.bits+(c.last?64:0)+(c.mode===W?128:0)+(c.mode===ca||c.mode===Z?256:0),(0===o&&0===p||b===A)&&xa===D&&(xa=J),xa)}function n(a){if(!a||!a.state)return G;var b=a.state;return b.window&&(b.window=null),a.state=null,D}function o(a,b){var c;return a&&a.state?(c=a.state,0===(2&c.wrap)?G:(c.head=b,b.done=!1,D)):G}function p(a,b){var c,d,e,f=b.length;return a&&a.state?(c=a.state,0!==c.wrap&&c.mode!==V?G:c.mode===V&&(d=1,d=t(d,b,f,0),d!==c.check)?H:(e=l(a,b,f,f))?(c.mode=na,I):(c.havedict=1,D)):G}var q,r,s=a("../utils/common"),t=a("./adler32"),u=a("./crc32"),v=a("./inffast"),w=a("./inftrees"),x=0,y=1,z=2,A=4,B=5,C=6,D=0,E=1,F=2,G=-2,H=-3,I=-4,J=-5,K=8,L=1,M=2,N=3,O=4,P=5,Q=6,R=7,S=8,T=9,U=10,V=11,W=12,X=13,Y=14,Z=15,$=16,_=17,aa=18,ba=19,ca=20,da=21,ea=22,fa=23,ga=24,ha=25,ia=26,ja=27,ka=28,la=29,ma=30,na=31,oa=32,pa=852,qa=592,ra=15,sa=ra,ta=!0;c.inflateReset=g,c.inflateReset2=h,c.inflateResetKeep=f,c.inflateInit=j,c.inflateInit2=i,c.inflate=m,c.inflateEnd=n,c.inflateGetHeader=o,c.inflateSetDictionary=p,c.inflateInfo="pako inflate (from Nodeca project)"},{"../utils/common":62,"./adler32":64,"./crc32":66,"./inffast":69,"./inftrees":71}],71:[function(a,b,c){"use strict";var d=a("../utils/common"),e=15,f=852,g=592,h=0,i=1,j=2,k=[3,4,5,6,7,8,9,10,11,13,15,17,19,23,27,31,35,43,51,59,67,83,99,115,131,163,195,227,258,0,0],l=[16,16,16,16,16,16,16,16,17,17,17,17,18,18,18,18,19,19,19,19,20,20,20,20,21,21,21,21,16,72,78],m=[1,2,3,4,5,7,9,13,17,25,33,49,65,97,129,193,257,385,513,769,1025,1537,2049,3073,4097,6145,8193,12289,16385,24577,0,0],n=[16,16,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26,27,27,28,28,29,29,64,64];b.exports=function(a,b,c,o,p,q,r,s){var t,u,v,w,x,y,z,A,B,C=s.bits,D=0,E=0,F=0,G=0,H=0,I=0,J=0,K=0,L=0,M=0,N=null,O=0,P=new d.Buf16(e+1),Q=new d.Buf16(e+1),R=null,S=0;for(D=0;D<=e;D++)P[D]=0;for(E=0;E<o;E++)P[b[c+E]]++;for(H=C,G=e;G>=1&&0===P[G];G--);if(H>G&&(H=G),0===G)return p[q++]=20971520,p[q++]=20971520,s.bits=1,0;for(F=1;F<G&&0===P[F];F++);for(H<F&&(H=F),K=1,D=1;D<=e;D++)if(K<<=1,K-=P[D],K<0)return-1;if(K>0&&(a===h||1!==G))return-1;for(Q[1]=0,D=1;D<e;D++)Q[D+1]=Q[D]+P[D];for(E=0;E<o;E++)0!==b[c+E]&&(r[Q[b[c+E]]++]=E);if(a===h?(N=R=r,y=19):a===i?(N=k,O-=257,R=l,S-=257,y=256):(N=m,R=n,y=-1),M=0,E=0,D=F,x=q,I=H,J=0,v=-1,L=1<<H,w=L-1,a===i&&L>f||a===j&&L>g)return 1;for(;;){z=D-J,r[E]<y?(A=0,B=r[E]):r[E]>y?(A=R[S+r[E]],B=N[O+r[E]]):(A=96,B=0),t=1<<D-J,u=1<<I,F=u;do u-=t,p[x+(M>>J)+u]=z<<24|A<<16|B|0;while(0!==u);for(t=1<<D-1;M&t;)t>>=1;if(0!==t?(M&=t-1,M+=t):M=0,E++,0===--P[D]){if(D===G)break;D=b[c+r[E]]}if(D>H&&(M&w)!==v){for(0===J&&(J=H),x+=F,I=D-J,K=1<<I;I+J<G&&(K-=P[I+J],!(K<=0));)I++,K<<=1;if(L+=1<<I,a===i&&L>f||a===j&&L>g)return 1;v=M&w,p[v]=H<<24|I<<16|x-q|0}}return 0!==M&&(p[x+M]=D-J<<24|64<<16|0),s.bits=H,0}},{"../utils/common":62}],72:[function(a,b,c){"use strict";b.exports={2:"need dictionary",1:"stream end",0:"","-1":"file error","-2":"stream error","-3":"data error","-4":"insufficient memory","-5":"buffer error","-6":"incompatible version"}},{}],73:[function(a,b,c){"use strict";function d(a){for(var b=a.length;--b>=0;)a[b]=0}function e(a,b,c,d,e){this.static_tree=a,this.extra_bits=b,this.extra_base=c,this.elems=d,this.max_length=e,this.has_stree=a&&a.length}function f(a,b){this.dyn_tree=a,this.max_code=0,this.stat_desc=b}function g(a){return a<256?ia[a]:ia[256+(a>>>7)]}function h(a,b){a.pending_buf[a.pending++]=255&b,a.pending_buf[a.pending++]=b>>>8&255}function i(a,b,c){a.bi_valid>X-c?(a.bi_buf|=b<<a.bi_valid&65535,h(a,a.bi_buf),a.bi_buf=b>>X-a.bi_valid,a.bi_valid+=c-X):(a.bi_buf|=b<<a.bi_valid&65535,a.bi_valid+=c)}function j(a,b,c){i(a,c[2*b],c[2*b+1])}function k(a,b){var c=0;do c|=1&a,a>>>=1,c<<=1;while(--b>0);return c>>>1}function l(a){16===a.bi_valid?(h(a,a.bi_buf),a.bi_buf=0,a.bi_valid=0):a.bi_valid>=8&&(a.pending_buf[a.pending++]=255&a.bi_buf,a.bi_buf>>=8,a.bi_valid-=8)}function m(a,b){var c,d,e,f,g,h,i=b.dyn_tree,j=b.max_code,k=b.stat_desc.static_tree,l=b.stat_desc.has_stree,m=b.stat_desc.extra_bits,n=b.stat_desc.extra_base,o=b.stat_desc.max_length,p=0;for(f=0;f<=W;f++)a.bl_count[f]=0;for(i[2*a.heap[a.heap_max]+1]=0,
c=a.heap_max+1;c<V;c++)d=a.heap[c],f=i[2*i[2*d+1]+1]+1,f>o&&(f=o,p++),i[2*d+1]=f,d>j||(a.bl_count[f]++,g=0,d>=n&&(g=m[d-n]),h=i[2*d],a.opt_len+=h*(f+g),l&&(a.static_len+=h*(k[2*d+1]+g)));if(0!==p){do{for(f=o-1;0===a.bl_count[f];)f--;a.bl_count[f]--,a.bl_count[f+1]+=2,a.bl_count[o]--,p-=2}while(p>0);for(f=o;0!==f;f--)for(d=a.bl_count[f];0!==d;)e=a.heap[--c],e>j||(i[2*e+1]!==f&&(a.opt_len+=(f-i[2*e+1])*i[2*e],i[2*e+1]=f),d--)}}function n(a,b,c){var d,e,f=new Array(W+1),g=0;for(d=1;d<=W;d++)f[d]=g=g+c[d-1]<<1;for(e=0;e<=b;e++){var h=a[2*e+1];0!==h&&(a[2*e]=k(f[h]++,h))}}function o(){var a,b,c,d,f,g=new Array(W+1);for(c=0,d=0;d<Q-1;d++)for(ka[d]=c,a=0;a<1<<ba[d];a++)ja[c++]=d;for(ja[c-1]=d,f=0,d=0;d<16;d++)for(la[d]=f,a=0;a<1<<ca[d];a++)ia[f++]=d;for(f>>=7;d<T;d++)for(la[d]=f<<7,a=0;a<1<<ca[d]-7;a++)ia[256+f++]=d;for(b=0;b<=W;b++)g[b]=0;for(a=0;a<=143;)ga[2*a+1]=8,a++,g[8]++;for(;a<=255;)ga[2*a+1]=9,a++,g[9]++;for(;a<=279;)ga[2*a+1]=7,a++,g[7]++;for(;a<=287;)ga[2*a+1]=8,a++,g[8]++;for(n(ga,S+1,g),a=0;a<T;a++)ha[2*a+1]=5,ha[2*a]=k(a,5);ma=new e(ga,ba,R+1,S,W),na=new e(ha,ca,0,T,W),oa=new e(new Array(0),da,0,U,Y)}function p(a){var b;for(b=0;b<S;b++)a.dyn_ltree[2*b]=0;for(b=0;b<T;b++)a.dyn_dtree[2*b]=0;for(b=0;b<U;b++)a.bl_tree[2*b]=0;a.dyn_ltree[2*Z]=1,a.opt_len=a.static_len=0,a.last_lit=a.matches=0}function q(a){a.bi_valid>8?h(a,a.bi_buf):a.bi_valid>0&&(a.pending_buf[a.pending++]=a.bi_buf),a.bi_buf=0,a.bi_valid=0}function r(a,b,c,d){q(a),d&&(h(a,c),h(a,~c)),G.arraySet(a.pending_buf,a.window,b,c,a.pending),a.pending+=c}function s(a,b,c,d){var e=2*b,f=2*c;return a[e]<a[f]||a[e]===a[f]&&d[b]<=d[c]}function t(a,b,c){for(var d=a.heap[c],e=c<<1;e<=a.heap_len&&(e<a.heap_len&&s(b,a.heap[e+1],a.heap[e],a.depth)&&e++,!s(b,d,a.heap[e],a.depth));)a.heap[c]=a.heap[e],c=e,e<<=1;a.heap[c]=d}function u(a,b,c){var d,e,f,h,k=0;if(0!==a.last_lit)do d=a.pending_buf[a.d_buf+2*k]<<8|a.pending_buf[a.d_buf+2*k+1],e=a.pending_buf[a.l_buf+k],k++,0===d?j(a,e,b):(f=ja[e],j(a,f+R+1,b),h=ba[f],0!==h&&(e-=ka[f],i(a,e,h)),d--,f=g(d),j(a,f,c),h=ca[f],0!==h&&(d-=la[f],i(a,d,h)));while(k<a.last_lit);j(a,Z,b)}function v(a,b){var c,d,e,f=b.dyn_tree,g=b.stat_desc.static_tree,h=b.stat_desc.has_stree,i=b.stat_desc.elems,j=-1;for(a.heap_len=0,a.heap_max=V,c=0;c<i;c++)0!==f[2*c]?(a.heap[++a.heap_len]=j=c,a.depth[c]=0):f[2*c+1]=0;for(;a.heap_len<2;)e=a.heap[++a.heap_len]=j<2?++j:0,f[2*e]=1,a.depth[e]=0,a.opt_len--,h&&(a.static_len-=g[2*e+1]);for(b.max_code=j,c=a.heap_len>>1;c>=1;c--)t(a,f,c);e=i;do c=a.heap[1],a.heap[1]=a.heap[a.heap_len--],t(a,f,1),d=a.heap[1],a.heap[--a.heap_max]=c,a.heap[--a.heap_max]=d,f[2*e]=f[2*c]+f[2*d],a.depth[e]=(a.depth[c]>=a.depth[d]?a.depth[c]:a.depth[d])+1,f[2*c+1]=f[2*d+1]=e,a.heap[1]=e++,t(a,f,1);while(a.heap_len>=2);a.heap[--a.heap_max]=a.heap[1],m(a,b),n(f,j,a.bl_count)}function w(a,b,c){var d,e,f=-1,g=b[1],h=0,i=7,j=4;for(0===g&&(i=138,j=3),b[2*(c+1)+1]=65535,d=0;d<=c;d++)e=g,g=b[2*(d+1)+1],++h<i&&e===g||(h<j?a.bl_tree[2*e]+=h:0!==e?(e!==f&&a.bl_tree[2*e]++,a.bl_tree[2*$]++):h<=10?a.bl_tree[2*_]++:a.bl_tree[2*aa]++,h=0,f=e,0===g?(i=138,j=3):e===g?(i=6,j=3):(i=7,j=4))}function x(a,b,c){var d,e,f=-1,g=b[1],h=0,k=7,l=4;for(0===g&&(k=138,l=3),d=0;d<=c;d++)if(e=g,g=b[2*(d+1)+1],!(++h<k&&e===g)){if(h<l){do j(a,e,a.bl_tree);while(0!==--h)}else 0!==e?(e!==f&&(j(a,e,a.bl_tree),h--),j(a,$,a.bl_tree),i(a,h-3,2)):h<=10?(j(a,_,a.bl_tree),i(a,h-3,3)):(j(a,aa,a.bl_tree),i(a,h-11,7));h=0,f=e,0===g?(k=138,l=3):e===g?(k=6,l=3):(k=7,l=4)}}function y(a){var b;for(w(a,a.dyn_ltree,a.l_desc.max_code),w(a,a.dyn_dtree,a.d_desc.max_code),v(a,a.bl_desc),b=U-1;b>=3&&0===a.bl_tree[2*ea[b]+1];b--);return a.opt_len+=3*(b+1)+5+5+4,b}function z(a,b,c,d){var e;for(i(a,b-257,5),i(a,c-1,5),i(a,d-4,4),e=0;e<d;e++)i(a,a.bl_tree[2*ea[e]+1],3);x(a,a.dyn_ltree,b-1),x(a,a.dyn_dtree,c-1)}function A(a){var b,c=4093624447;for(b=0;b<=31;b++,c>>>=1)if(1&c&&0!==a.dyn_ltree[2*b])return I;if(0!==a.dyn_ltree[18]||0!==a.dyn_ltree[20]||0!==a.dyn_ltree[26])return J;for(b=32;b<R;b++)if(0!==a.dyn_ltree[2*b])return J;return I}function B(a){pa||(o(),pa=!0),a.l_desc=new f(a.dyn_ltree,ma),a.d_desc=new f(a.dyn_dtree,na),a.bl_desc=new f(a.bl_tree,oa),a.bi_buf=0,a.bi_valid=0,p(a)}function C(a,b,c,d){i(a,(L<<1)+(d?1:0),3),r(a,b,c,!0)}function D(a){i(a,M<<1,3),j(a,Z,ga),l(a)}function E(a,b,c,d){var e,f,g=0;a.level>0?(a.strm.data_type===K&&(a.strm.data_type=A(a)),v(a,a.l_desc),v(a,a.d_desc),g=y(a),e=a.opt_len+3+7>>>3,f=a.static_len+3+7>>>3,f<=e&&(e=f)):e=f=c+5,c+4<=e&&b!==-1?C(a,b,c,d):a.strategy===H||f===e?(i(a,(M<<1)+(d?1:0),3),u(a,ga,ha)):(i(a,(N<<1)+(d?1:0),3),z(a,a.l_desc.max_code+1,a.d_desc.max_code+1,g+1),u(a,a.dyn_ltree,a.dyn_dtree)),p(a),d&&q(a)}function F(a,b,c){return a.pending_buf[a.d_buf+2*a.last_lit]=b>>>8&255,a.pending_buf[a.d_buf+2*a.last_lit+1]=255&b,a.pending_buf[a.l_buf+a.last_lit]=255&c,a.last_lit++,0===b?a.dyn_ltree[2*c]++:(a.matches++,b--,a.dyn_ltree[2*(ja[c]+R+1)]++,a.dyn_dtree[2*g(b)]++),a.last_lit===a.lit_bufsize-1}var G=a("../utils/common"),H=4,I=0,J=1,K=2,L=0,M=1,N=2,O=3,P=258,Q=29,R=256,S=R+1+Q,T=30,U=19,V=2*S+1,W=15,X=16,Y=7,Z=256,$=16,_=17,aa=18,ba=[0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0],ca=[0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13],da=[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7],ea=[16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15],fa=512,ga=new Array(2*(S+2));d(ga);var ha=new Array(2*T);d(ha);var ia=new Array(fa);d(ia);var ja=new Array(P-O+1);d(ja);var ka=new Array(Q);d(ka);var la=new Array(T);d(la);var ma,na,oa,pa=!1;c._tr_init=B,c._tr_stored_block=C,c._tr_flush_block=E,c._tr_tally=F,c._tr_align=D},{"../utils/common":62}],74:[function(a,b,c){"use strict";function d(){this.input=null,this.next_in=0,this.avail_in=0,this.total_in=0,this.output=null,this.next_out=0,this.avail_out=0,this.total_out=0,this.msg="",this.state=null,this.data_type=2,this.adler=0}b.exports=d},{}]},{},[10])(10)}); | yuehui-miji | /yuehui-miji-2022.10.11.0.tar.gz/yuehui-miji-2022.10.11.0/YuehuiMiji/js/libs/zip.min.js | zip.min.js |
(function () {
'use strict';
var isCommonjs = typeof module !== 'undefined' && module.exports;
var keyboardAllowed = typeof Element !== 'undefined' && 'ALLOW_KEYBOARD_INPUT' in Element;
var fn = (function () {
var val;
var valLength;
var fnMap = [
[
'requestFullscreen',
'exitFullscreen',
'fullscreenElement',
'fullscreenEnabled',
'fullscreenchange',
'fullscreenerror'
],
// new WebKit
[
'webkitRequestFullscreen',
'webkitExitFullscreen',
'webkitFullscreenElement',
'webkitFullscreenEnabled',
'webkitfullscreenchange',
'webkitfullscreenerror'
],
// old WebKit (Safari 5.1)
[
'webkitRequestFullScreen',
'webkitCancelFullScreen',
'webkitCurrentFullScreenElement',
'webkitCancelFullScreen',
'webkitfullscreenchange',
'webkitfullscreenerror'
],
[
'mozRequestFullScreen',
'mozCancelFullScreen',
'mozFullScreenElement',
'mozFullScreenEnabled',
'mozfullscreenchange',
'mozfullscreenerror'
],
[
'msRequestFullscreen',
'msExitFullscreen',
'msFullscreenElement',
'msFullscreenEnabled',
'MSFullscreenChange',
'MSFullscreenError'
]
];
var i = 0;
var l = fnMap.length;
var ret = {};
for (; i < l; i++) {
val = fnMap[i];
if (val && val[1] in document) {
for (i = 0, valLength = val.length; i < valLength; i++) {
ret[fnMap[0][i]] = val[i];
}
return ret;
}
}
return false;
})();
var screenfull = {
request: function (elem) {
var request = fn.requestFullscreen;
elem = elem || document.documentElement;
// Work around Safari 5.1 bug: reports support for
// keyboard in fullscreen even though it doesn't.
// Browser sniffing, since the alternative with
// setTimeout is even worse.
if (/5\.1[\.\d]* Safari/.test(navigator.userAgent)) {
elem[request]();
} else {
elem[request](keyboardAllowed && Element.ALLOW_KEYBOARD_INPUT);
}
},
exit: function () {
document[fn.exitFullscreen]();
},
toggle: function (elem) {
if (this.isFullscreen) {
this.exit();
} else {
this.request(elem);
}
},
raw: fn
};
if (!fn) {
if (isCommonjs) {
module.exports = false;
} else {
window.screenfull = false;
}
return;
}
Object.defineProperties(screenfull, {
isFullscreen: {
get: function () {
return !!document[fn.fullscreenElement];
}
},
element: {
enumerable: true,
get: function () {
return document[fn.fullscreenElement];
}
},
enabled: {
enumerable: true,
get: function () {
// Coerce to boolean in case of old WebKit
return !!document[fn.fullscreenEnabled];
}
}
});
if (isCommonjs) {
module.exports = screenfull;
} else {
window.screenfull = screenfull;
}
})(); | yuehui-miji | /yuehui-miji-2022.10.11.0.tar.gz/yuehui-miji-2022.10.11.0/YuehuiMiji/js/libs/screenfull.js | screenfull.js |
!function(a){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=a();else if("function"==typeof define&&define.amd)define([],a);else{var b;b="undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this,b.localforage=a()}}(function(){return function a(b,c,d){function e(g,h){if(!c[g]){if(!b[g]){var i="function"==typeof require&&require;if(!h&&i)return i(g,!0);if(f)return f(g,!0);var j=new Error("Cannot find module '"+g+"'");throw j.code="MODULE_NOT_FOUND",j}var k=c[g]={exports:{}};b[g][0].call(k.exports,function(a){var c=b[g][1][a];return e(c?c:a)},k,k.exports,a,b,c,d)}return c[g].exports}for(var f="function"==typeof require&&require,g=0;g<d.length;g++)e(d[g]);return e}({1:[function(a,b,c){(function(a){"use strict";function c(){k=!0;for(var a,b,c=l.length;c;){for(b=l,l=[],a=-1;++a<c;)b[a]();c=l.length}k=!1}function d(a){1!==l.push(a)||k||e()}var e,f=a.MutationObserver||a.WebKitMutationObserver;if(f){var g=0,h=new f(c),i=a.document.createTextNode("");h.observe(i,{characterData:!0}),e=function(){i.data=g=++g%2}}else if(a.setImmediate||"undefined"==typeof a.MessageChannel)e="document"in a&&"onreadystatechange"in a.document.createElement("script")?function(){var b=a.document.createElement("script");b.onreadystatechange=function(){c(),b.onreadystatechange=null,b.parentNode.removeChild(b),b=null},a.document.documentElement.appendChild(b)}:function(){setTimeout(c,0)};else{var j=new a.MessageChannel;j.port1.onmessage=c,e=function(){j.port2.postMessage(0)}}var k,l=[];b.exports=d}).call(this,"undefined"!=typeof global?global:"undefined"!=typeof self?self:"undefined"!=typeof window?window:{})},{}],2:[function(a,b,c){"use strict";function d(){}function e(a){if("function"!=typeof a)throw new TypeError("resolver must be a function");this.state=s,this.queue=[],this.outcome=void 0,a!==d&&i(this,a)}function f(a,b,c){this.promise=a,"function"==typeof b&&(this.onFulfilled=b,this.callFulfilled=this.otherCallFulfilled),"function"==typeof c&&(this.onRejected=c,this.callRejected=this.otherCallRejected)}function g(a,b,c){o(function(){var d;try{d=b(c)}catch(b){return p.reject(a,b)}d===a?p.reject(a,new TypeError("Cannot resolve promise with itself")):p.resolve(a,d)})}function h(a){var b=a&&a.then;if(a&&"object"==typeof a&&"function"==typeof b)return function(){b.apply(a,arguments)}}function i(a,b){function c(b){f||(f=!0,p.reject(a,b))}function d(b){f||(f=!0,p.resolve(a,b))}function e(){b(d,c)}var f=!1,g=j(e);"error"===g.status&&c(g.value)}function j(a,b){var c={};try{c.value=a(b),c.status="success"}catch(a){c.status="error",c.value=a}return c}function k(a){return a instanceof this?a:p.resolve(new this(d),a)}function l(a){var b=new this(d);return p.reject(b,a)}function m(a){function b(a,b){function d(a){g[b]=a,++h!==e||f||(f=!0,p.resolve(j,g))}c.resolve(a).then(d,function(a){f||(f=!0,p.reject(j,a))})}var c=this;if("[object Array]"!==Object.prototype.toString.call(a))return this.reject(new TypeError("must be an array"));var e=a.length,f=!1;if(!e)return this.resolve([]);for(var g=new Array(e),h=0,i=-1,j=new this(d);++i<e;)b(a[i],i);return j}function n(a){function b(a){c.resolve(a).then(function(a){f||(f=!0,p.resolve(h,a))},function(a){f||(f=!0,p.reject(h,a))})}var c=this;if("[object Array]"!==Object.prototype.toString.call(a))return this.reject(new TypeError("must be an array"));var e=a.length,f=!1;if(!e)return this.resolve([]);for(var g=-1,h=new this(d);++g<e;)b(a[g]);return h}var o=a(1),p={},q=["REJECTED"],r=["FULFILLED"],s=["PENDING"];b.exports=c=e,e.prototype.catch=function(a){return this.then(null,a)},e.prototype.then=function(a,b){if("function"!=typeof a&&this.state===r||"function"!=typeof b&&this.state===q)return this;var c=new this.constructor(d);if(this.state!==s){var e=this.state===r?a:b;g(c,e,this.outcome)}else this.queue.push(new f(c,a,b));return c},f.prototype.callFulfilled=function(a){p.resolve(this.promise,a)},f.prototype.otherCallFulfilled=function(a){g(this.promise,this.onFulfilled,a)},f.prototype.callRejected=function(a){p.reject(this.promise,a)},f.prototype.otherCallRejected=function(a){g(this.promise,this.onRejected,a)},p.resolve=function(a,b){var c=j(h,b);if("error"===c.status)return p.reject(a,c.value);var d=c.value;if(d)i(a,d);else{a.state=r,a.outcome=b;for(var e=-1,f=a.queue.length;++e<f;)a.queue[e].callFulfilled(b)}return a},p.reject=function(a,b){a.state=q,a.outcome=b;for(var c=-1,d=a.queue.length;++c<d;)a.queue[c].callRejected(b);return a},c.resolve=k,c.reject=l,c.all=m,c.race=n},{1:1}],3:[function(a,b,c){(function(b){"use strict";"function"!=typeof b.Promise&&(b.Promise=a(2))}).call(this,"undefined"!=typeof global?global:"undefined"!=typeof self?self:"undefined"!=typeof window?window:{})},{2:2}],4:[function(a,b,c){"use strict";function d(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")}function e(){try{if("undefined"!=typeof indexedDB)return indexedDB;if("undefined"!=typeof webkitIndexedDB)return webkitIndexedDB;if("undefined"!=typeof mozIndexedDB)return mozIndexedDB;if("undefined"!=typeof OIndexedDB)return OIndexedDB;if("undefined"!=typeof msIndexedDB)return msIndexedDB}catch(a){}}function f(){try{if(!ga)return!1;var a="undefined"!=typeof openDatabase&&/(Safari|iPhone|iPad|iPod)/.test(navigator.userAgent)&&!/Chrome/.test(navigator.userAgent)&&!/BlackBerry/.test(navigator.platform),b="function"==typeof fetch&&fetch.toString().indexOf("[native code")!==-1;return(!a||b)&&"undefined"!=typeof indexedDB&&"undefined"!=typeof IDBKeyRange}catch(a){return!1}}function g(){return"function"==typeof openDatabase}function h(){try{return"undefined"!=typeof localStorage&&"setItem"in localStorage&&localStorage.setItem}catch(a){return!1}}function i(a,b){a=a||[],b=b||{};try{return new Blob(a,b)}catch(f){if("TypeError"!==f.name)throw f;for(var c="undefined"!=typeof BlobBuilder?BlobBuilder:"undefined"!=typeof MSBlobBuilder?MSBlobBuilder:"undefined"!=typeof MozBlobBuilder?MozBlobBuilder:WebKitBlobBuilder,d=new c,e=0;e<a.length;e+=1)d.append(a[e]);return d.getBlob(b.type)}}function j(a,b){b&&a.then(function(a){b(null,a)},function(a){b(a)})}function k(a,b,c){"function"==typeof b&&a.then(b),"function"==typeof c&&a.catch(c)}function l(a){for(var b=a.length,c=new ArrayBuffer(b),d=new Uint8Array(c),e=0;e<b;e++)d[e]=a.charCodeAt(e);return c}function m(a){return new ja(function(b){var c=a.transaction(ka,"readwrite"),d=i([""]);c.objectStore(ka).put(d,"key"),c.onabort=function(a){a.preventDefault(),a.stopPropagation(),b(!1)},c.oncomplete=function(){var a=navigator.userAgent.match(/Chrome\/(\d+)/),c=navigator.userAgent.match(/Edge\//);b(c||!a||parseInt(a[1],10)>=43)}}).catch(function(){return!1})}function n(a){return"boolean"==typeof ha?ja.resolve(ha):m(a).then(function(a){return ha=a})}function o(a){var b=ia[a.name],c={};c.promise=new ja(function(a){c.resolve=a}),b.deferredOperations.push(c),b.dbReady?b.dbReady=b.dbReady.then(function(){return c.promise}):b.dbReady=c.promise}function p(a){var b=ia[a.name],c=b.deferredOperations.pop();c&&c.resolve()}function q(a,b){return new ja(function(c,d){if(a.db){if(!b)return c(a.db);o(a),a.db.close()}var e=[a.name];b&&e.push(a.version);var f=ga.open.apply(ga,e);b&&(f.onupgradeneeded=function(b){var c=f.result;try{c.createObjectStore(a.storeName),b.oldVersion<=1&&c.createObjectStore(ka)}catch(c){if("ConstraintError"!==c.name)throw c;console.warn('The database "'+a.name+'" has been upgraded from version '+b.oldVersion+" to version "+b.newVersion+', but the storage "'+a.storeName+'" already exists.')}}),f.onerror=function(a){a.preventDefault(),d(f.error)},f.onsuccess=function(){c(f.result),p(a)}})}function r(a){return q(a,!1)}function s(a){return q(a,!0)}function t(a,b){if(!a.db)return!0;var c=!a.db.objectStoreNames.contains(a.storeName),d=a.version<a.db.version,e=a.version>a.db.version;if(d&&(a.version!==b&&console.warn('The database "'+a.name+"\" can't be downgraded from version "+a.db.version+" to version "+a.version+"."),a.version=a.db.version),e||c){if(c){var f=a.db.version+1;f>a.version&&(a.version=f)}return!0}return!1}function u(a){return new ja(function(b,c){var d=new FileReader;d.onerror=c,d.onloadend=function(c){var d=btoa(c.target.result||"");b({__local_forage_encoded_blob:!0,data:d,type:a.type})},d.readAsBinaryString(a)})}function v(a){var b=l(atob(a.data));return i([b],{type:a.type})}function w(a){return a&&a.__local_forage_encoded_blob}function x(a){var b=this,c=b._initReady().then(function(){var a=ia[b._dbInfo.name];if(a&&a.dbReady)return a.dbReady});return k(c,a,a),c}function y(a){function b(){return ja.resolve()}var c=this,d={db:null};if(a)for(var e in a)d[e]=a[e];ia||(ia={});var f=ia[d.name];f||(f={forages:[],db:null,dbReady:null,deferredOperations:[]},ia[d.name]=f),f.forages.push(c),c._initReady||(c._initReady=c.ready,c.ready=x);for(var g=[],h=0;h<f.forages.length;h++){var i=f.forages[h];i!==c&&g.push(i._initReady().catch(b))}var j=f.forages.slice(0);return ja.all(g).then(function(){return d.db=f.db,r(d)}).then(function(a){return d.db=a,t(d,c._defaultConfig.version)?s(d):a}).then(function(a){d.db=f.db=a,c._dbInfo=d;for(var b=0;b<j.length;b++){var e=j[b];e!==c&&(e._dbInfo.db=d.db,e._dbInfo.version=d.version)}})}function z(a,b){var c=this;"string"!=typeof a&&(console.warn(a+" used as a key, but it is not a string."),a=String(a));var d=new ja(function(b,d){c.ready().then(function(){var e=c._dbInfo,f=e.db.transaction(e.storeName,"readonly").objectStore(e.storeName),g=f.get(a);g.onsuccess=function(){var a=g.result;void 0===a&&(a=null),w(a)&&(a=v(a)),b(a)},g.onerror=function(){d(g.error)}}).catch(d)});return j(d,b),d}function A(a,b){var c=this,d=new ja(function(b,d){c.ready().then(function(){var e=c._dbInfo,f=e.db.transaction(e.storeName,"readonly").objectStore(e.storeName),g=f.openCursor(),h=1;g.onsuccess=function(){var c=g.result;if(c){var d=c.value;w(d)&&(d=v(d));var e=a(d,c.key,h++);void 0!==e?b(e):c.continue()}else b()},g.onerror=function(){d(g.error)}}).catch(d)});return j(d,b),d}function B(a,b,c){var d=this;"string"!=typeof a&&(console.warn(a+" used as a key, but it is not a string."),a=String(a));var e=new ja(function(c,e){var f;d.ready().then(function(){return f=d._dbInfo,"[object Blob]"===la.call(b)?n(f.db).then(function(a){return a?b:u(b)}):b}).then(function(b){var d=f.db.transaction(f.storeName,"readwrite"),g=d.objectStore(f.storeName),h=g.put(b,a);null===b&&(b=void 0),d.oncomplete=function(){void 0===b&&(b=null),c(b)},d.onabort=d.onerror=function(){var a=h.error?h.error:h.transaction.error;e(a)}}).catch(e)});return j(e,c),e}function C(a,b){var c=this;"string"!=typeof a&&(console.warn(a+" used as a key, but it is not a string."),a=String(a));var d=new ja(function(b,d){c.ready().then(function(){var e=c._dbInfo,f=e.db.transaction(e.storeName,"readwrite"),g=f.objectStore(e.storeName),h=g.delete(a);f.oncomplete=function(){b()},f.onerror=function(){d(h.error)},f.onabort=function(){var a=h.error?h.error:h.transaction.error;d(a)}}).catch(d)});return j(d,b),d}function D(a){var b=this,c=new ja(function(a,c){b.ready().then(function(){var d=b._dbInfo,e=d.db.transaction(d.storeName,"readwrite"),f=e.objectStore(d.storeName),g=f.clear();e.oncomplete=function(){a()},e.onabort=e.onerror=function(){var a=g.error?g.error:g.transaction.error;c(a)}}).catch(c)});return j(c,a),c}function E(a){var b=this,c=new ja(function(a,c){b.ready().then(function(){var d=b._dbInfo,e=d.db.transaction(d.storeName,"readonly").objectStore(d.storeName),f=e.count();f.onsuccess=function(){a(f.result)},f.onerror=function(){c(f.error)}}).catch(c)});return j(c,a),c}function F(a,b){var c=this,d=new ja(function(b,d){return a<0?void b(null):void c.ready().then(function(){var e=c._dbInfo,f=e.db.transaction(e.storeName,"readonly").objectStore(e.storeName),g=!1,h=f.openCursor();h.onsuccess=function(){var c=h.result;return c?void(0===a?b(c.key):g?b(c.key):(g=!0,c.advance(a))):void b(null)},h.onerror=function(){d(h.error)}}).catch(d)});return j(d,b),d}function G(a){var b=this,c=new ja(function(a,c){b.ready().then(function(){var d=b._dbInfo,e=d.db.transaction(d.storeName,"readonly").objectStore(d.storeName),f=e.openCursor(),g=[];f.onsuccess=function(){var b=f.result;return b?(g.push(b.key),void b.continue()):void a(g)},f.onerror=function(){c(f.error)}}).catch(c)});return j(c,a),c}function H(a){var b,c,d,e,f,g=.75*a.length,h=a.length,i=0;"="===a[a.length-1]&&(g--,"="===a[a.length-2]&&g--);var j=new ArrayBuffer(g),k=new Uint8Array(j);for(b=0;b<h;b+=4)c=na.indexOf(a[b]),d=na.indexOf(a[b+1]),e=na.indexOf(a[b+2]),f=na.indexOf(a[b+3]),k[i++]=c<<2|d>>4,k[i++]=(15&d)<<4|e>>2,k[i++]=(3&e)<<6|63&f;return j}function I(a){var b,c=new Uint8Array(a),d="";for(b=0;b<c.length;b+=3)d+=na[c[b]>>2],d+=na[(3&c[b])<<4|c[b+1]>>4],d+=na[(15&c[b+1])<<2|c[b+2]>>6],d+=na[63&c[b+2]];return c.length%3===2?d=d.substring(0,d.length-1)+"=":c.length%3===1&&(d=d.substring(0,d.length-2)+"=="),d}function J(a,b){var c="";if(a&&(c=Ea.call(a)),a&&("[object ArrayBuffer]"===c||a.buffer&&"[object ArrayBuffer]"===Ea.call(a.buffer))){var d,e=qa;a instanceof ArrayBuffer?(d=a,e+=sa):(d=a.buffer,"[object Int8Array]"===c?e+=ua:"[object Uint8Array]"===c?e+=va:"[object Uint8ClampedArray]"===c?e+=wa:"[object Int16Array]"===c?e+=xa:"[object Uint16Array]"===c?e+=za:"[object Int32Array]"===c?e+=ya:"[object Uint32Array]"===c?e+=Aa:"[object Float32Array]"===c?e+=Ba:"[object Float64Array]"===c?e+=Ca:b(new Error("Failed to get type for BinaryArray"))),b(e+I(d))}else if("[object Blob]"===c){var f=new FileReader;f.onload=function(){var c=oa+a.type+"~"+I(this.result);b(qa+ta+c)},f.readAsArrayBuffer(a)}else try{b(JSON.stringify(a))}catch(c){console.error("Couldn't convert value into a JSON string: ",a),b(null,c)}}function K(a){if(a.substring(0,ra)!==qa)return JSON.parse(a);var b,c=a.substring(Da),d=a.substring(ra,Da);if(d===ta&&pa.test(c)){var e=c.match(pa);b=e[1],c=c.substring(e[0].length)}var f=H(c);switch(d){case sa:return f;case ta:return i([f],{type:b});case ua:return new Int8Array(f);case va:return new Uint8Array(f);case wa:return new Uint8ClampedArray(f);case xa:return new Int16Array(f);case za:return new Uint16Array(f);case ya:return new Int32Array(f);case Aa:return new Uint32Array(f);case Ba:return new Float32Array(f);case Ca:return new Float64Array(f);default:throw new Error("Unkown type: "+d)}}function L(a){var b=this,c={db:null};if(a)for(var d in a)c[d]="string"!=typeof a[d]?a[d].toString():a[d];var e=new ja(function(a,d){try{c.db=openDatabase(c.name,String(c.version),c.description,c.size)}catch(a){return d(a)}c.db.transaction(function(e){e.executeSql("CREATE TABLE IF NOT EXISTS "+c.storeName+" (id INTEGER PRIMARY KEY, key unique, value)",[],function(){b._dbInfo=c,a()},function(a,b){d(b)})})});return c.serializer=Fa,e}function M(a,b){var c=this;"string"!=typeof a&&(console.warn(a+" used as a key, but it is not a string."),a=String(a));var d=new ja(function(b,d){c.ready().then(function(){var e=c._dbInfo;e.db.transaction(function(c){c.executeSql("SELECT * FROM "+e.storeName+" WHERE key = ? LIMIT 1",[a],function(a,c){var d=c.rows.length?c.rows.item(0).value:null;d&&(d=e.serializer.deserialize(d)),b(d)},function(a,b){d(b)})})}).catch(d)});return j(d,b),d}function N(a,b){var c=this,d=new ja(function(b,d){c.ready().then(function(){var e=c._dbInfo;e.db.transaction(function(c){c.executeSql("SELECT * FROM "+e.storeName,[],function(c,d){for(var f=d.rows,g=f.length,h=0;h<g;h++){var i=f.item(h),j=i.value;if(j&&(j=e.serializer.deserialize(j)),j=a(j,i.key,h+1),void 0!==j)return void b(j)}b()},function(a,b){d(b)})})}).catch(d)});return j(d,b),d}function O(a,b,c,d){var e=this;"string"!=typeof a&&(console.warn(a+" used as a key, but it is not a string."),a=String(a));var f=new ja(function(f,g){e.ready().then(function(){void 0===b&&(b=null);var h=b,i=e._dbInfo;i.serializer.serialize(b,function(b,j){j?g(j):i.db.transaction(function(c){c.executeSql("INSERT OR REPLACE INTO "+i.storeName+" (key, value) VALUES (?, ?)",[a,b],function(){f(h)},function(a,b){g(b)})},function(b){if(b.code===b.QUOTA_ERR){if(d>0)return void f(O.apply(e,[a,h,c,d-1]));g(b)}})})}).catch(g)});return j(f,c),f}function P(a,b,c){return O.apply(this,[a,b,c,1])}function Q(a,b){var c=this;"string"!=typeof a&&(console.warn(a+" used as a key, but it is not a string."),a=String(a));var d=new ja(function(b,d){c.ready().then(function(){var e=c._dbInfo;e.db.transaction(function(c){c.executeSql("DELETE FROM "+e.storeName+" WHERE key = ?",[a],function(){b()},function(a,b){d(b)})})}).catch(d)});return j(d,b),d}function R(a){var b=this,c=new ja(function(a,c){b.ready().then(function(){var d=b._dbInfo;d.db.transaction(function(b){b.executeSql("DELETE FROM "+d.storeName,[],function(){a()},function(a,b){c(b)})})}).catch(c)});return j(c,a),c}function S(a){var b=this,c=new ja(function(a,c){b.ready().then(function(){var d=b._dbInfo;d.db.transaction(function(b){b.executeSql("SELECT COUNT(key) as c FROM "+d.storeName,[],function(b,c){var d=c.rows.item(0).c;a(d)},function(a,b){c(b)})})}).catch(c)});return j(c,a),c}function T(a,b){var c=this,d=new ja(function(b,d){c.ready().then(function(){var e=c._dbInfo;e.db.transaction(function(c){c.executeSql("SELECT key FROM "+e.storeName+" WHERE id = ? LIMIT 1",[a+1],function(a,c){var d=c.rows.length?c.rows.item(0).key:null;b(d)},function(a,b){d(b)})})}).catch(d)});return j(d,b),d}function U(a){var b=this,c=new ja(function(a,c){b.ready().then(function(){var d=b._dbInfo;d.db.transaction(function(b){b.executeSql("SELECT key FROM "+d.storeName,[],function(b,c){for(var d=[],e=0;e<c.rows.length;e++)d.push(c.rows.item(e).key);a(d)},function(a,b){c(b)})})}).catch(c)});return j(c,a),c}function V(a){var b=this,c={};if(a)for(var d in a)c[d]=a[d];return c.keyPrefix=c.name+"/",c.storeName!==b._defaultConfig.storeName&&(c.keyPrefix+=c.storeName+"/"),b._dbInfo=c,c.serializer=Fa,ja.resolve()}function W(a){var b=this,c=b.ready().then(function(){for(var a=b._dbInfo.keyPrefix,c=localStorage.length-1;c>=0;c--){var d=localStorage.key(c);0===d.indexOf(a)&&localStorage.removeItem(d)}});return j(c,a),c}function X(a,b){var c=this;"string"!=typeof a&&(console.warn(a+" used as a key, but it is not a string."),a=String(a));var d=c.ready().then(function(){var b=c._dbInfo,d=localStorage.getItem(b.keyPrefix+a);return d&&(d=b.serializer.deserialize(d)),d});return j(d,b),d}function Y(a,b){var c=this,d=c.ready().then(function(){for(var b=c._dbInfo,d=b.keyPrefix,e=d.length,f=localStorage.length,g=1,h=0;h<f;h++){var i=localStorage.key(h);if(0===i.indexOf(d)){var j=localStorage.getItem(i);if(j&&(j=b.serializer.deserialize(j)),j=a(j,i.substring(e),g++),void 0!==j)return j}}});return j(d,b),d}function Z(a,b){var c=this,d=c.ready().then(function(){var b,d=c._dbInfo;try{b=localStorage.key(a)}catch(a){b=null}return b&&(b=b.substring(d.keyPrefix.length)),b});return j(d,b),d}function $(a){var b=this,c=b.ready().then(function(){for(var a=b._dbInfo,c=localStorage.length,d=[],e=0;e<c;e++)0===localStorage.key(e).indexOf(a.keyPrefix)&&d.push(localStorage.key(e).substring(a.keyPrefix.length));return d});return j(c,a),c}function _(a){var b=this,c=b.keys().then(function(a){return a.length});return j(c,a),c}function aa(a,b){var c=this;"string"!=typeof a&&(console.warn(a+" used as a key, but it is not a string."),a=String(a));var d=c.ready().then(function(){var b=c._dbInfo;localStorage.removeItem(b.keyPrefix+a)});return j(d,b),d}function ba(a,b,c){var d=this;"string"!=typeof a&&(console.warn(a+" used as a key, but it is not a string."),a=String(a));var e=d.ready().then(function(){void 0===b&&(b=null);var c=b;return new ja(function(e,f){var g=d._dbInfo;g.serializer.serialize(b,function(b,d){if(d)f(d);else try{localStorage.setItem(g.keyPrefix+a,b),e(c)}catch(a){"QuotaExceededError"!==a.name&&"NS_ERROR_DOM_QUOTA_REACHED"!==a.name||f(a),f(a)}})})});return j(e,c),e}function ca(a,b){a[b]=function(){var c=arguments;return a.ready().then(function(){return a[b].apply(a,c)})}}function da(){for(var a=1;a<arguments.length;a++){var b=arguments[a];if(b)for(var c in b)b.hasOwnProperty(c)&&(Oa(b[c])?arguments[0][c]=b[c].slice():arguments[0][c]=b[c])}return arguments[0]}function ea(a){for(var b in Ja)if(Ja.hasOwnProperty(b)&&Ja[b]===a)return!0;return!1}var fa="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(a){return typeof a}:function(a){return a&&"function"==typeof Symbol&&a.constructor===Symbol&&a!==Symbol.prototype?"symbol":typeof a},ga=e();"undefined"==typeof Promise&&a(3);var ha,ia,ja=Promise,ka="local-forage-detect-blob-support",la=Object.prototype.toString,ma={_driver:"asyncStorage",_initStorage:y,iterate:A,getItem:z,setItem:B,removeItem:C,clear:D,length:E,key:F,keys:G},na="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",oa="~~local_forage_type~",pa=/^~~local_forage_type~([^~]+)~/,qa="__lfsc__:",ra=qa.length,sa="arbf",ta="blob",ua="si08",va="ui08",wa="uic8",xa="si16",ya="si32",za="ur16",Aa="ui32",Ba="fl32",Ca="fl64",Da=ra+sa.length,Ea=Object.prototype.toString,Fa={serialize:J,deserialize:K,stringToBuffer:H,bufferToString:I},Ga={_driver:"webSQLStorage",_initStorage:L,iterate:N,getItem:M,setItem:P,removeItem:Q,clear:R,length:S,key:T,keys:U},Ha={_driver:"localStorageWrapper",_initStorage:V,iterate:Y,getItem:X,setItem:ba,removeItem:aa,clear:W,length:_,key:Z,keys:$},Ia={},Ja={INDEXEDDB:"asyncStorage",LOCALSTORAGE:"localStorageWrapper",WEBSQL:"webSQLStorage"},Ka=[Ja.INDEXEDDB,Ja.WEBSQL,Ja.LOCALSTORAGE],La=["clear","getItem","iterate","key","keys","length","removeItem","setItem"],Ma={description:"",driver:Ka.slice(),name:"localforage",size:4980736,storeName:"keyvaluepairs",version:1},Na={};Na[Ja.INDEXEDDB]=f(),Na[Ja.WEBSQL]=g(),Na[Ja.LOCALSTORAGE]=h();var Oa=Array.isArray||function(a){return"[object Array]"===Object.prototype.toString.call(a)},Pa=function(){function a(b){d(this,a),this.INDEXEDDB=Ja.INDEXEDDB,this.LOCALSTORAGE=Ja.LOCALSTORAGE,this.WEBSQL=Ja.WEBSQL,this._defaultConfig=da({},Ma),this._config=da({},this._defaultConfig,b),this._driverSet=null,this._initDriver=null,this._ready=!1,this._dbInfo=null,this._wrapLibraryMethodsWithReady(),this.setDriver(this._config.driver).catch(function(){})}return a.prototype.config=function(a){if("object"===("undefined"==typeof a?"undefined":fa(a))){if(this._ready)return new Error("Can't call config() after localforage has been used.");for(var b in a){if("storeName"===b&&(a[b]=a[b].replace(/\W/g,"_")),"version"===b&&"number"!=typeof a[b])return new Error("Database version must be a number.");this._config[b]=a[b]}return!("driver"in a&&a.driver)||this.setDriver(this._config.driver)}return"string"==typeof a?this._config[a]:this._config},a.prototype.defineDriver=function(a,b,c){var d=new ja(function(b,c){try{var d=a._driver,e=new Error("Custom driver not compliant; see https://mozilla.github.io/localForage/#definedriver"),f=new Error("Custom driver name already in use: "+a._driver);if(!a._driver)return void c(e);if(ea(a._driver))return void c(f);for(var g=La.concat("_initStorage"),h=0;h<g.length;h++){var i=g[h];if(!i||!a[i]||"function"!=typeof a[i])return void c(e)}var j=ja.resolve(!0);"_support"in a&&(j=a._support&&"function"==typeof a._support?a._support():ja.resolve(!!a._support)),j.then(function(c){Na[d]=c,Ia[d]=a,b()},c)}catch(a){c(a)}});return k(d,b,c),d},a.prototype.driver=function(){return this._driver||null},a.prototype.getDriver=function(a,b,c){var d=this,e=ja.resolve().then(function(){if(!ea(a)){if(Ia[a])return Ia[a];throw new Error("Driver not found.")}switch(a){case d.INDEXEDDB:return ma;case d.LOCALSTORAGE:return Ha;case d.WEBSQL:return Ga}});return k(e,b,c),e},a.prototype.getSerializer=function(a){var b=ja.resolve(Fa);return k(b,a),b},a.prototype.ready=function(a){var b=this,c=b._driverSet.then(function(){return null===b._ready&&(b._ready=b._initDriver()),b._ready});return k(c,a,a),c},a.prototype.setDriver=function(a,b,c){function d(){g._config.driver=g.driver()}function e(a){return g._extend(a),d(),g._ready=g._initStorage(g._config),g._ready}function f(a){return function(){function b(){for(;c<a.length;){var f=a[c];return c++,g._dbInfo=null,g._ready=null,g.getDriver(f).then(e).catch(b)}d();var h=new Error("No available storage method found.");return g._driverSet=ja.reject(h),g._driverSet}var c=0;return b()}}var g=this;Oa(a)||(a=[a]);var h=this._getSupportedDrivers(a),i=null!==this._driverSet?this._driverSet.catch(function(){return ja.resolve()}):ja.resolve();return this._driverSet=i.then(function(){var a=h[0];return g._dbInfo=null,g._ready=null,g.getDriver(a).then(function(a){g._driver=a._driver,d(),g._wrapLibraryMethodsWithReady(),g._initDriver=f(h)})}).catch(function(){d();var a=new Error("No available storage method found.");return g._driverSet=ja.reject(a),g._driverSet}),k(this._driverSet,b,c),this._driverSet},a.prototype.supports=function(a){return!!Na[a]},a.prototype._extend=function(a){da(this,a)},a.prototype._getSupportedDrivers=function(a){for(var b=[],c=0,d=a.length;c<d;c++){var e=a[c];this.supports(e)&&b.push(e)}return b},a.prototype._wrapLibraryMethodsWithReady=function(){for(var a=0;a<La.length;a++)ca(this,La[a])},a.prototype.createInstance=function(b){return new a(b)},a}(),Qa=new Pa;b.exports=Qa},{3:3}]},{},[4])(4)}); | yuehui-miji | /yuehui-miji-2022.10.11.0.tar.gz/yuehui-miji-2022.10.11.0/YuehuiMiji/js/libs/localforage.min.js | localforage.min.js |
import requests
import json
import urllib.request as urllib2
def login():
data = {"username": "admin",
"password": "123456"
}
# 1qaz2wsx
url3 = 'https://test-hechun.cloud.sensorsdata.cn/api/auth/login?project=yuejianzhong'
url4 ='https://sdk-test.cloud.sensorsdata.cn/api/auth/login?project=yuejianzhong'
headers = {'Content-Type': 'application/json'} ## headers中添加上content-type这个参数,指定为json格式
response = requests.post(url=url4,
headers=headers,
data=json.dumps(data)) ## post的时候,将data字典形式的参数用json包转换成json格式。
return response
# print(response.json())
# json = json.dumps(response.text)
# print('\n')
# print(response.text)
# __send_request(
# url='https://sdk-test.cloud.sensorsdata.cn/api/auth/login?project=yuejianzhong'
# content={'username': "admin", 'password': "123456"}
# headers={'Content-Type': 'application/json'}
def login_format_importer():
# __send_request(
# url='https://sdk-test.cloud.sensorsdata.cn/api/auth/login?project=yuejianzhong'
# content={'username': "admin", 'password': "123456"}
# headers={'Content-Type': 'application/json'}
# try:
headers = {'Content-Type': 'application/json'}
data4 = {"username": "admin",
"password": "123456"
}
data3 = {"username": "admin",
"password": "1qaz2wsx"
}
data = {
"username": "admin",
"password": "ca90fa94"
}
# 8
# f4703591d0b7e4c37619656b2d38bc9e5acfc0091e589785114bbd435fe5e41
url3 = 'https://test-hechun.cloud.sensorsdata.cn/api/auth/login?project=yuejianzhong'
url4 = 'https://sdk-test.cloud.sensorsdata.cn/api/auth/login?project=yuejianzhong'
request = urllib2.Request(url='https://juyanwang.cloud.sensorsdata.cn/api/auth/login?project=production',
# data=bytes(json.dumps(data), encoding="utf-8"),
data=json.dumps(data).encode(),
headers=headers)
response = urllib2.urlopen(request)
response_content = json.loads(response.read().decode('utf-8'))
# print(response.read().decode('utf-8'))
return response_content
def saapi(token):
urla = 'https://test-hechun.cloud.sensorsdata.cn/api/events/report?token=' + token + '&project=yuejianzhong'
print(urla)
urlb = 'https://test-hechun.cloud.sensorsdata.cn/api/account?token=570cff04c9bc7b802d11abccd2035bb558e8b5ec221943c1e35b10ec269caed0&project=yuejianzhong'
data2 = {"measures": [{"event_name": "$AppStart", "aggregator": "unique"}], "unit": "day", "sampling_factor": 64,
"axis_config": {"isNormalize": False, "left": [], "right": []}, "from_date": "2019-03-19",
"to_date": "2019-03-25", "tType": "n", "ratio": "n", "approx": False, "by_fields": [], "filter": {},
"detail_and_rollup": True, "request_id": "1553596932966:761148", "use_cache": True}
headers = {'Content-Type': 'application/json'} ## headers中添加上content-type这个参数,指定为json格式
response = requests.post(
url=urlb,
headers=headers,
data=json.dumps(data2))
return response
# print(response.json())
# json = json.dumps(response.text)
# print(response.status_code)
# print('\n')
#
# print(response.text)
def check_url(url):
'''
导入url: http://xxxx:8106/sa?project=xxx
确认url: http://xxxx:8106/debug
'''
debug_url = urllib.urlparse(url)
## 将 URI Path 替换成 Debug 模式的 '/debug'
debug_url = debug_url._replace(path = '/debug')
logger.debug('debug url: %s' % debug_url.geturl())
with urllib2.urlopen(debug_url.geturl()) as f:
response = f.read().decode('utf8').strip()
if response != 'Sensors Analytics is ready to receive your data!':
raise Exception('invalid url %s' % url)
def sauser():
data2 = [{"username":"[email protected]","role":"64","password":"12333@xee"}]
headers = {'Content-Type': 'application/json'} ## headers中添加上content-type这个参数,指定为json格式
response = requests.put(
url='https://test-hechun.cloud.sensorsdata.cn/api/account?token=570cff04c9bc7b802d11abccd2035bb558e8b5ec221943c1e35b10ec269caed0&project=yuejianzhong',
headers=headers,
data=json.dumps(data2))
return response
def token_death():
data3 = {"username": "admin",
"expired_interval": "2"
}
headers = {'Content-Type': 'application/json'} ## headers中添加上content-type这个参数,指定为json格式
response = requests.post(
url='https://test-hechun.cloud.sensorsdata.cn/api/auth/login?token=570cff04c9bc7b802d11abccd2035bb558e8b5ec221943c1e35b10ec269caed0&project=yuejianzhong',
headers=headers,
data=json.dumps(data3))
return response.text
if __name__ == '__main__':
# re = login()
# print(re.text)
# j = json.loads(re.text)
# print(re.text)
# print('\n')
# print(type(j))
# print(j['token'])
# print('\n')
# re1 = saapi()
# print(re1.text)
# logtoken = login().text
# loginjson = json.loads(logtoken)
# print(saapi(j))
sap = token_death()
# jsonsa = json.dumps(sap)
j = json.loads(sap)
# token=j['token']
# print(type)
print(j['token'])
# lo = sauser()
# print(lo)
print(saapi(j['token']).status_code) | yuejzDemo | /yuejzDemo-0.0.8-py3-none-any.whl/SADemo/SA_API.py | SA_API.py |
import sensorsanalytics
# print('hello world')
# print(2**10)
# name = input()
# print(name)
#print(10//3)
# sum = 0
# for x in range(101):
# sum = sum + x
# print(sum)
# a = set([4,5,6])
# print(a)
# a.add((1, 2, 3))
# print(a)
# consumer = sensorsanalytics.LoggingConsumer("/Applications/logagent/data/py/access.log")
# sa = sensorsanalytics.SensorsAnalytics(consumer)
#
#
# sa.track("abc","abc")
# sa.flush()
# def odd():
# print('step 1')
# yield 1
# print('step 2')
# yield(2)
# print('step 3')
# yield(2)
#
# o = odd()
# next(o)
# next(o)
#next(o)
# def count():
# def f(j):
# def g():
# return j*j
# return g
# fs = []
# for i in range(1, 4):
# fs.append(f(i)) # f(i)立刻被执行,因此i的当前值被传入f()
# return fs
# f1, f2, f3 = count()
#
# print(f1(),f2(),f3())
#
# def log(func):
# def wrapper(*args, **kw):
# print('call %s():' % func.__name__)
# return func(*args, **kw)
# return wrapper
#
# @log
# def now():
# print('2015-3-25')
#
# now()
# def log(text):
# def decorator(func):
# def wrapper(*args, **kw):
# print('%s %s():' % (text, func.__name__))
# return func(*args, **kw)
# return wrapper
# return decorator
#
# @log('execute')
# def now():
# print('2015-3-25')
#
# now()
# class Student(object):
# def __init__(self,name):
# self.name= name
# def __str__(self):
# return 'Student object (name: %s)' % self.name
#
#
# print(Student('ac'))
#
# from enum import Enum
#
# class Gender(Enum):
# male = 0
# female = 1
#
# class student(object):
# def __init__(self,name,gender):
# self.name = name
# self.gender = gender
#
# bart = student("acb",Gender.male)
#
# if bart.gender == Gender.male:
# print("yes")
# else:
# print("no")
# from functools import reduce
# import logging
#
# def str2num(s):
# if '.' in s:
# return float(s)
# else:
# return int(s)
#
# def calc(exp):
# try:
# ss = exp.split('+')
# ns = map(str2num, ss)
# return reduce(lambda acc, x: acc + x, ns)
# except ValueError as e:
# logging.info(e)
#
#
# def main():
# r = calc('100 + 200 + 345')
# print('100 + 200 + 345 =', r)
# r = calc('99 + 88 + 7.6')
# print('99 + 88 + 7.6 =', r)
# r = calc('a + b + c')
# print('a + b + c =', r)
#
# main()
#
# with open('/Users/yuejz/Downloads/log.txt') as f:
# for line in f.readlines():
# print(line.strip())
import os
# print(os.path.abspath('.'))
# os.path.join('/Users/yuejz/pythonSrc/FirstDemo','test')
# os.mkdir('/Users/yuejz/pythonSrc/FirstDemo/test')
# os.rmdir('/Users/yuejz/pythonSrc/FirstDemo/test')
# os.chdir('/Users/yuejz/pythonSrc/FirstDemo')
# for name in os.listdir('/Users/yuejz/pythonSrc/FirstDemo'):
# print(name)
import os
# import os
# def search_file(dir,sname):
# if sname in os.path.split(dir)[1]: #检验文件名里是否包含sname
# print(os.path.relpath(dir)) #打印相对路径,相对指相对于当前路径
# if os.path.isfile(dir): # 如果传入的dir直接是一个文件目录 他就没有子目录,就不用再遍历它的子目录了
# return
#
# for dire in os.listdir(dir): # 遍历子目录 这里的dire为当前文件名
# search_file(os.path.join(dir,dire),sname) #jion一下就变成了当前文件的绝对路径
# # 对每个子目录路劲执行同样的操作
#
# def search(filename,dir):
# for root, dirs, files in os.walk(dir):
# for name in files:
# if filename in name:
# print(os.path.join(root, name))
#
# # search_file('/Users/yuejz/pythonSrc/FirstDemo','he')
#
# search('init','/Users/yuejz/pythonSrc/FirstDemo')
# import urllib.request
# import urllib
#
# value={"username":"admin","password":"1qaz2wsx"}
# data=urllib.urlencode(value)
# url="https://test-hechun.cloud.sensorsdata.cn/api/auth/login?project=yuejianzhong"
# request=urllib.request.Request(url,data)
# RES=urllib2.urlopen(request)
# print(RES.read())
#
# import urllib.request
# import urllib.parse
# data = urllib.parse.urlencode({"username":"admin","password":"1qaz2wsx"})
# data = data.encode('utf-8')
# request = urllib.request.Request("https://test-hechun.cloud.sensorsdata.cn/api/auth/login?project=yuejianzhong")
# # adding charset parameter to the Content-Type header.
# request.add_header("Content-Type","application/x-www-form-urlencoded;charset=utf-8")
# f = urllib.request.urlopen(request, data)
# print(f.read().decode('utf-8'))
import requests
import json
# data = {"username":"admin",
# "password":"1qaz2wsx"
# }
#
# headers = {'Content-Type': 'application/json'} ## headers中添加上content-type这个参数,指定为json格式
# response = requests.post(url='https://test-hechun.cloud.sensorsdata.cn/api/auth/login?project=yuejianzhong',
# headers=headers,
# data=json.dumps(data)) ## post的时候,将data字典形式的参数用json包转换成json格式。
#
# # print(response.json())
# # json = json.dumps(response.text)
# print('\n')
# print(response.text)
data1 = {
"users": [
'a5227c8e9bdd52fe'
],
"from_date": "2019-03-22",
"to_date": "2019-03-26",
# // false 表示 users 参数指定的是内部的 user_id,true 表示传入的是 distinct_id
"distinct_id": True
}
data2 ={"measures":[{"event_name":"$AppStart","aggregator":"unique"}],"unit":"day","sampling_factor":64,"axis_config":{"isNormalize":False,"left":[],"right":[]},"from_date":"2019-03-19","to_date":"2019-03-25","tType":"n","ratio":"n","approx":False,"by_fields":[],"filter":{},"detail_and_rollup":True,"request_id":"1553596932966:761148","use_cache":True}
url='https://test-hechun.cloud.sensorsdata.cn/api/events/report?token=yx6u1f4cDqVlzuZlcfDddbXFdBGtz4gXcEgBNbGfQvHunZ8joxCtjtciRcQyPKKqyvnkvpsL19Ft7xx7iU3FTLoYo2gEWmrwOjFOsmsNbIFxE28fesibLpmRA6mQLnB5&project=yuejianzhong',
url1='https://test-hechun.cloud.sensorsdata.cn/api/events/report?token=570cff04c9bc7b802d11abccd2035bb558e8b5ec221943c1e35b10ec269caed0&project=yuejianzhong',
headers = {'Content-Type': 'application/json'} ## headers中添加上content-type这个参数,指定为json格式
response = requests.post(url='https://test-hechun.cloud.sensorsdata.cn/api/events/report?token=570cff04c9bc7b802d11abccd2035bb558e8b5ec221943c1e35b10ec269caed0&project=yuejianzhong',
headers=headers,
data=json.dumps(data2))
# print(response.json())
# json = json.dumps(response.text)
print(response.status_code)
print('\n')
print(response.text)
customer_name = 'admin'
sa_site = 'https://test-hechun.cloud.sensorsdata.cn/'
sa_project = 'yuejianzhong'
sa_cookie = 'sensorsdata-token-flag=flag; sa_jssdk_2015_sensors-cms=%7B%22distinct_id%22%3A%22haofenqistandalone%3A19%22%2C%22props%22%3A%7B%22customer_id%22%3A%22haofenqistandalone%22%2C%22project_name%22%3A%22default%22%2C%22username%22%3A%22admin%22%2C%22isDemo%22%3Afalse%2C%22version1%22%3A%221.13%22%2C%22version2%22%3A%221.13.5207%22%2C%22%24latest_traffic_source_type%22%3A%22%E7%9B%B4%E6%8E%A5%E6%B5%81%E9%87%8F%22%2C%22%24latest_referrer%22%3A%22%22%2C%22%24latest_referrer_host%22%3A%22%22%2C%22%24latest_search_keyword%22%3A%22%E6%9C%AA%E5%8F%96%E5%88%B0%E5%80%BC_%E7%9B%B4%E6%8E%A5%E6%89%93%E5%BC%80%22%7D%7D; sensorsdata2015jssdkcross=%7B%22%24device_id%22%3A%22169717fd0afe6-064ebcf4af2e5a-42554133-2073600-169717fd0b01c9%22%7D; csrftoken=DaAc1I2WQ7c126XASW5sq2MS68rsev9n; api_server_id=qbWbSd1c; sensorsdata-token_default=570cff04c9bc7b802d11abccd2035bb558e8b5ec221943c1e35b10ec269caed0'
event_file_name = '事件设计.xlsx'
# df_raw = pd.read_excel(event_file_name, sheet_name='事件表', header=None)
# df_raw_user = pd.read_excel(event_file_name, sheet_name='用户表', header=None)
# df1 = fetch_track_cols(df_raw)
def obtain_events(site, cookie, project_name):
print('进入读取神策环境中的事件&属性信息...')
api_site = site + '/api/events/all'
# results = requests.get(api_site, params={'project': project_name}, headers={'Cookie': cookie}).text
results = requests.get(api_site, params={'project': project_name}).text
if 'error' in results:
print('请求神策 API 参数错误: 请确认地址、项目名称及 Cookie 是否准确')
return None
print(results)
df = pd.read_json(results)
df_result_list = []
for index, item in df.iterrows():
initial_property_num = 0
# 去除虚拟事件的检查
if item['virtual']:
continue
api_url = site + '/api/event/{}/properties'.format(item['id'])
properties = requests.get(api_url, params={'project': project_name}, headers={'Cookie': cookie}).json()['event']
tmp = []
for property in properties:
# 去除带 $ 符号的预置属性,计算通用预置属性的个数
if property['name'][0] == '$' and property['name'] not in not_initial_property:
initial_property_num += 1
continue
tmp_list = [property['event_name'], item['cname'], property['name'], property['cname'],
property['data_type']]
tmp.append(tmp_list)
for value in tmp:
value.append(initial_property_num)
df_result_list += tmp
df_result = pd.DataFrame(df_result_list, columns=['event_en', 'event_cn', 'property_en', 'property_cn', 'data_type',
'initial_p_num'])
df_result.replace(['string', 'number', 'bool', 'datetime', 'date', 'list'],
['字符串', '数值', 'BOOL', '时间', '日期', '列表'], inplace=True)
print('神策分析环境中数据信息已获取...')
return df_result
df_sa = obtain_events(sa_site, sa_cookie, sa_project)
# import sensorsanalytics
#
# # 神策分析数据接收的 URL
# SA_SERVER_URL = 'https://test-hechun.datasink.sensorsdata.cn/sa?project=yuejianzhong&token=d28b875ed9ac268f'
# # 发送数据的超时时间,单位毫秒
# SA_REQUEST_TIMEOUT = 100000
# # Debug 模式下,是否将数据导入神策分析
# # True - 校验数据,并将数据导入到神策分析中
# # False - 校验数据,但不进行数据导入
# SA_DEBUG_WRITE_DATA = True
#
# # 初始化 Debug Consumer
# consumer = sensorsanalytics.DebugConsumer(SA_SERVER_URL, SA_DEBUG_WRITE_DATA, SA_REQUEST_TIMEOUT)
#
# # 初始化 Logging Consumer
# #consumer = sensorsanalytics.LoggingConsumer('/Applications/logagent/data/py.log')
#
# # consumer = sensorsanalytics.LoggingConsumer('/Applications/logagent/data/py.log')
#
# # 使用 Consumer 来构造 SensorsAnalytics 对象
# sa = sensorsanalytics.SensorsAnalytics(consumer)
# properties = {
# # 用户性别属性(Sex)为男性
# '$time' : 1553487334,
# # 用户等级属性(Level)为 VIP
# 'UserLevel' : 'Elite VIP',
# }
#
#
# # print(properties['time'])
#
# sa.track(distinct_id="acn",
# event_name="test",
# properties=properties,
# is_login_id=False)
# sa.flush()
# import os
#
# print(os.name) | yuejzDemo | /yuejzDemo-0.0.8-py3-none-any.whl/SADemo/HelloWorld.py | HelloWorld.py |
<!-- Improved compatibility of back to top link: See: https://github.com/othneildrew/Best-README-Template/pull/73 -->
<a name="readme-top"></a>
<div>
<h1 align="center">WiseSpot Python package build-up documentation <h1>
</br>
</br>
<p align="center">
This is an easy python Package tutorial for WiseSpot staff to build their own Python packages.
</p>
</div>
</br>
</br>
</br>
<!-- TABLE OF CONTENTS -->
<details>
<summary>Table of Contents</summary>
<ol>
<li>
<a href="#introduction">Introduction</a>
<ul>
<li><a href="#built-with">Built With</a></li>
</ul>
</li>
<li>
<a href="#getting-started">Getting Started</a>
<ul>
<li><a href="#prerequisites">Prerequisites</a></li>
<li><a href="#installation">Installation</a></li>
</ul>
</li>
<li><a href="#usage">Usage</a></li>
<li>
<a href="#step1-build-python-package">Step1: Build python package</a>
</li>
<li>
<a href="#step2-upload-package-to-wisespot-sonatype-nexus">Step2: Upload source code to WiseSpot Sonatype Nexus</a>
<ul>
<li><a href="#make-sure-the-environment-install-twine-package">2.1 make sure the environment install `twine` package</a></li>
</ul>
</li>
<li>
<a href="#step3-build-a-ci-cd-python-package-in-gitlab">Step3: Build a Ci & Cd python package in Gitlab</a>
<ul>
<li><a href="#set-up-gitlab">3.1 Set up Gitlab</a></li>
<li><a href="#set-up-gitlab-ci-and-cd">3.2 Set up Gitlab CI/CD</a></li>
</ul>
</li>
<li><a href="#contributing">Contributing</a></li>
<li><a href="#license">License</a></li>
<li><a href="#credits">Credits</a></li>
<li><a href="#acknowledgments">Acknowledgments</a></li>
</ol>
</details>
<!-- Introduction -->
# Introduction
Here is tutorial about how to creat a Python Package for WiseSpot staff.
## Why build Python package to develop Python software?
Python packages are the fundamental units of shareable code in Python. Packages make it easy to organize, reuse, and maintain your code, as well as share it between projects, with your colleagues, and with the wider Python community.
In this project, we will demonstrate methods and tools you can use to develop and maintain packages quickly, reproducibly, and with as much automation as possible — so you can focus on writing and sharing code!
## Not only package
This tutorial also concludes with how to build an HTML document at the same time. `Sphinx` is a popular tool for creating documentation for Python and other computer languages. It is intended to generate high-quality documentation from source code and other textual sources. `Sphinx`'s markup language is reStructuredText, which allows you to write documentation in a simple and readable way.
`Sphinx` is widely used in the Python community and is the preferred documentation tool for many well-known Python applications and packages. It makes it easier for developers to produce comprehensive and up-to-date documentation for their projects by simplifying the process of developing and managing documentation.
## Tutorial Schedule
We have 3 major part in this project:
* `Build python package`
* `Gitlab CiCd`
* `Upload package to Nexus or Pypi`
## At last
Thanks a lot to WiseSpot for giving this project the opportunity. Ming, and Michael, would like to express their gratitude! Special thanks to Jason and Brian for their great help! Enjoy!
<!-- Built With -->
# Built With
* [![Python][Python]][Python-url]
* [Cookiecutter](https://github.com/cookiecutter/cookiecutter)
* [Sphinx](https://github.com/sphinx-doc/sphinx)
* [Sonatype Nexus](https://www.sonatype.com/products/sonatype-nexus-repository)
<!-- Getting Started -->
# Getting Started
This is an example of how you may give instructions on setting up your project locally. To get a local copy up and running follow these simple example steps.
## Prerequisites
This is an example of how to list things you need to use the software and how to install them.
### Python
* You may directly access the `Python` official website for installation:
https://www.python.org/downloads/
### Anaconda
* `Anaconda`'s feature enables you to have multiple independent environments with different package versions, avoiding conflicts between different projects or dependencies.
https://www.anaconda.com/download/
### Poetry
* `Poetry` is a dependency management and packaging tool for Python. It is designed to simplify the process of managing project dependencies and creating distributable packages.
https://python-poetry.org/
## Installation
### Anaconda installation
After installation check and make sure the environment configuration is correct. Before start check the conda version or update.
```zsh
conda update conda
```
Check the created virtual environment
```zsh
conda env list
```
Create virtual environment
```zsh
conda create -n {your_env_name} python={version}
```
Start Virtual Environment
```zsh
conda activate {your_env_name}
```
### Poetry installation
* After `Poetry` install you will see the package folder have `pyproject.toml`file. the pyproject.toml file stores all the metadata and install instructions for the package.
```zsh
$ poetry install
Installing dependencies from lock file
No dependencies to install or update
Installing the current project: yuenpycounts (0.1.13)
```
<!-- Usage -->
## Usage
<div>
Here have a sample of our created package function usage sample.
<div>
- pip install the package, import `yuenpycounts` package and then call `count_words()` function or `plot_words()` function.
Step1: Open the Mac terminal enter the command to get `PIP`
```bash
python3 get-pip.py
```
Step2: install `yuenpycounts` package
```bash
pip install yuenpycounts
```
Demo yuenpycounts's `count_words()`
```zsh
$ python3
>>> from yuenpycounts.yuenpycounts import count_words
>>> count_words("zen.txt")
Counter({'is': 10, 'better': 8, 'than': 8, 'the': 6, 'to': 5, 'of': 3, 'although': 3, 'never': 3, 'be': 3, 'one': 3, 'idea': 3, 'complex': 2, 'special': 2, 'should': 2, 'unless': 2, 'obvious': 2, 'way': 2, 'do': 2, 'it': 2, 'may': 2, 'now': 2, 'if': 2, 'implementation': 2, 'explain': 2, 'a': 2, 'zen': 1, 'python': 1, 'by': 1, 'tim': 1, 'peters': 1, 'beautiful': 1, 'ugly': 1, 'explicit': 1, 'implicit': 1, 'simple': 1, 'complicated': 1, 'flat': 1, 'nested': 1, 'sparse': 1, 'dense': 1, 'readability': 1, 'counts': 1, 'cases': 1, 'arent': 1, 'enough': 1, 'break': 1, 'rules': 1, 'practicality': 1, 'beats': 1, 'purity': 1, 'errors': 1, 'pass': 1, 'silently': 1, 'explicitly': 1, 'silenced': 1, 'in': 1, 'face': 1, 'ambiguity': 1, 'refuse': 1, 'temptation': 1, 'guess': 1, 'there': 1, 'and': 1, 'preferably': 1, 'only': 1, 'that': 1, 'not': 1, 'at': 1, 'first': 1, 'youre': 1, 'dutch': 1, 'often': 1, 'right': 1, 'hard': 1, 'its': 1, 'bad': 1, 'easy': 1, 'good': 1, 'namespaces': 1, 'are': 1, 'honking': 1, 'great': 1, 'lets': 1, 'more': 1, 'those': 1})
```
<p align="right">(<a href="#readme-top">back to top</a>)</p>
</br>
<br>
<br>
# Step1 Build Python Package
Run cookiecutter command to build package structure
```bash
cookiecutter https://github.com/py-pkgs/py-pkgs-cookiecutter.git
```
Enter package name
```bash
author_name [Monty Python]: 'Your name'
package_name [mypkg]: 'Your package name'
package_short_description []: Calculate word counts in a text file!
package_version [0.1.0]:
python_version [3.9]:
Select open_source_license:
1 - MIT
2 - Apache License 2.0
3 - GNU General Public License v3.0
4 - Creative Commons Attribution 4.0
5 - BSD 3-Clause
6 - Proprietary
7 - None
Choose from 1, 2, 3, 4, 5, 6 [1]:
Select include_github_actions:
1 - no
2 - ci
3 - ci+cd
Choose from 1, 2, 3 [1]: 3
```
* After creating the package, run the cmd `poetry build`.
```zsh
$ poetry build
Building yuenpycounts (0.1.13)
- Building sdist
- Built yuenpycounts-0.1.13.tar.gz
- Building wheel
- Built yuenpycounts-0.1.13-py3-none-any.whl
```
In `Python`, both `sdist` and `wheel` are packaging formats used to distribute Python projects and libraries. They serve as standardized formats that package your code and its dependencies, making it easier to distribute and install Python packages.
## Create Auto Documentation With `Sphinx`
Sphinx is a popular documentation generation tool used primarily for documenting Python projects.
```zsh
conda install sphinx
```
* If you want to add some function make sure add extensions to `conf.py` in `docs` folder.
```zsh
poetry add --dev myst-nb --python "^3.9"
poetry add --dev sphinx-autoapi sphinx-rtd-theme
```
* If you have some change, please use `make clean html`
```zsh
cd docs
make html
```
<p align="right">(<a href="#readme-top">back to top</a>)</p>
</br>
<br>
<br>
# Step2 Upload Package To Wisespot Sonatype Nexus
<div align="center">
<a href="">
<img src="images/index.jpg">
</div>
`Sonatype Nexus` is a well-known repository manager in the software development industry. Nexus is a centralized storage and management system for software artifacts like as libraries, frameworks, and other dependencies.
`Nexus repository` managers are popular in organizations that use build automation technologies such as Apache Maven, Python Package, Gradle, or npm. Nexus is used by these tools to retrieve dependencies from a trusted and controlled source, ensuring consistent and reproducible builds.
<div>
If you want to login to WiseSpot Nexus repository, you may ask for IT support team a avliable account for access repository.
</div>
</br>
<br>
<br>
* Notice:
If you want to log in to the WiseSpot Nexus repository, you may ask for IT support team for an available account to access the repository. Of course, using the `twine upload` python package function the username and password is necessary. As an aside, if username, password, and token are directly run in `ci-cd. yml`, it may cause security issues. In the `. yml` file, you can use `${secrets. YOUR_PASSWORD_OR_TOKEN}}` to replace insert the password or token in the `. yml` file
<p align="right">(<a href="#readme-top">back to top</a>)</p>
</br>
<br>
<br>
## Make Sure The Environment Install Twine Package
If you not confirme that whether the package installed. You can use `pip show PACKAGE_NAME` to check.
```zsh
$ pip show twine
Name: twine
Version: 4.0.2
Summary: Collection of utilities for publishing packages on PyPI
Home-page: https://twine.readthedocs.io/
Author: Donald Stufft and individual contributors
Author-email: [email protected]
License:
Location: /Users/username/opt/anaconda3/envs/package/lib/python3.9/site-packages
Requires: importlib-metadata, keyring, pkginfo, readme-renderer, requests, requests-toolbelt, rfc3986, rich, urllib3
Required-by:
```
<p align="right">(<a href="#readme-top">back to top</a>)</p>
</br>
<br>
<br>
# Step3 Build A Ci Cd Python Package In Gitlab
## Set Up Gitlab
First, set up local version control and initialize a Git repository:
```bash
git init
```
Next, set up git remote to our Gitlab repository:
```bash
git remote add origin https://git.wisespotgroup.com.hk/wisespot/example/jetdemo.git
git branch -M main
```
Check remote localtion:
```bash
git remote -v
```
When you `git clone` or `git push` project at Gitlab, Gitlab requires you to provide `Personal Access Tokens` for authentication of your identity.
Now, we will generate the personal access token:
Click your name -> `Edit Profile` -> `Access Tokens` -> enter token name -> select all scopes -> `Create personal access token` -> copy your new personal access token.
We will use this token in next step
Now, you can enter `git clone origin main` command access to GitLab:
```bash
git clone origin main
```
Gitlab will requires you enter `Username` and `Password` in VsCode:
```bash
`Username` = `Your login Gitlab account name`
`Password` = `Your personal access token`
```
Now, you can git `pull` `push` `clone`!
<p align="right">(<a href="#readme-top">back to top</a>)</p>
</br>
<br>
<br>
## Set Up Gitlab CI And CD
Create a file call `.gitlab-ci.yml` in the root of your repository, which contains the CI/CD configuration.
This file will control Gitlab CI/CD workflow. For example:
```bash
stages:
- install
- test
- deploy
install:
rules:
- if: '$CI_COMMIT_BRANCH == "main"'
image: python:3.9-slim-buster
before_script:
- apt-get update && apt-get install make
stage: install
script:
- echo "this is install stage"
- pip install poetry
- poetry install
test:
rules:
- if: '$CI_COMMIT_BRANCH == "main"'
image: python:3.9-slim-buster
stage: test
script:
- echo "this is test stage"
- pip install poetry
- pip install pytest
- poetry install
- poetry run pytest tests/ --cov=yuenpycounts
deploy:
rules:
- if: '$CI_COMMIT_BRANCH == "main"'
image: python:3.9-slim-buster
stage: deploy
script:
- echo "this is deploy stage!"
- pip install poetry
- pip install twine
- poetry install
- poetry build
- python -m build
- echo "start time"
- twine upload -u __token__ -p pypi-AgENdGVzdC5weXBpLm9yZwIkYzQwMzIwYmQtMDU2ZS00YjNiLWEyNjYtNTdlNWFjMzg0ZDRlAAIUWzEsWyJ5dWVucHljb3VudHMiXV0AAixbMixbImNiNjIyMmQ4LWQ1ODctNDE2OC04ZDI0LTY0NWYwZTIzNTA2ZCJdXQAABiB0URGZP_BhY_GqAXQOf8xsOWrNIBfGBov7sHbxjRHzqg --repository-url https://test.pypi.org/legacy/ dist/*
- twine upload -u __token__ -p pypi-AgEIcHlwaS5vcmcCJGVhN2UxOWU4LTk2NTUtNDVlZi1iNmI1LTk5NDcxZDRhNDIxOQACKlszLCI2OTQ5MjNjYi1iZGIwLTRkMDctYjdjZS1lZDg3NmQ2N2NmMDYiXQAABiAWYsh-c-1Z7e2o803cuq1M3_rczfFxprNB5BEtFWrcLA --repository-url https://upload.pypi.org/legacy/ dist/*
- echo "end time"
- echo "start Nexus"
- twine upload --repository-url https://repo.wisespotgroup.com.hk/repository/PiPy-release/ -u student -p student@Sep dist/*
```
First, `stages` this part you should define how many `jobs` in your project. In this case, you can see 3 jobs `install`,`test`,`deploy`. Also, `stages` has `priority`. Ci/Cd must pass first stage to enter next stage. In this case, Ci/Cd must pass `install` stage to enter `test` stage. If `install` stage was fail, Ci/Cd return fail message skip `test` and `deploy` stage.
```bash
stages:
- install
- test
- deploy
```
Next, we will dicuss `install` this job structure.
`install` is the job name, `rules` define with constraints stating under what conditions they should be executed. In this case, `rules` defined this job will execute when your repository main branch was changed in GitLab.
`image` is the name of the Docker image the Docker executor uses to run CI/CD jobs. In this case, we will use python3.9 environment to execute our script
`stage` define which job you doing now.
`script` will execute all commands included in the script. In this case, we will install `poetry` tool after we will use it. Also, we execute `poetry install` to install our package.
```bash
install:
rules:
- if: '$CI_COMMIT_BRANCH == "main"'
image: python:3.9-slim-buster
before_script:
- apt-get update && apt-get install make
stage: install
script:
- echo "this is install stage"
- pip install poetry
- poetry install
```
In `test` stage, we use `pytest` tool to execute our test case which we prepare in the `tests` folder.
We execute `pip install poetry` and `pip install pytest` to install `poetry` and `pytest`. And then install our package use `poetry install`.
Now, we can execute `poetry run pytest tests/ --cov=yuenpycounts` command to test our package. `pytest test/` command mean `pytest` will execute all name `test_xxxxxx.py` file in `tests` folder. `--cov=yuenpycounts` command display coverage of our package. In this case, `pytest` will execute `test_Yuen_pycount.py` file in `tests` folder.
```bash
test:
rules:
- if: '$CI_COMMIT_BRANCH == "main"'
image: python:3.9-slim-buster
stage: test
script:
- echo "this is test stage"
- pip install poetry
- pip install pytest
- poetry install
- poetry run pytest tests/ --cov=yuenpycounts
```
In `deploy` stage, we will distribute packages for our project. And then we upload our package to `Nexus` and `Pypi`.
First, we execute command `pip install poetry` and `pip install twine` to install `poetry` and `twine` these two tools. We can use `poetry install` to install our package. Now, we can use `poetry build` command to create distribution packages for our project.
`twine` can upload our package to `Nexus` and `Pypi` through `Username` , `Password` or `Token`. In this case, we use `twine upload --repository-url https://repo.wisespotgroup.com.hk/repository/PiPy-release/ -u student -p student@Sep dist/*` command upload our package to our `Nexus` repository.
Command `twine upload --repository-url https://repo.wisespotgroup.com.hk/repository/PiPy-release/` mean `twine` upload our package to repository url. `-u` mean username, `-p` mean password, `dist/*` is folder contant your distribution package.
If you want upload package to `Pypi` repository, the command syntax is same to`Nexus`,`-u` change to `__token__`, `-p` change to your Pypi account API token.
```bash
deploy:
rules:
- if: '$CI_COMMIT_BRANCH == "main"'
image: python:3.9-slim-buster
stage: deploy
script:
- echo "this is deploy stage!"
- pip install poetry
- pip install twine
- poetry install
- poetry build
- python -m build
- echo "start time"
- twine upload -u __token__ -p pypi-AgENdGVzdC5weXBpLm9yZwIkYzQwMzIwYmQtMDU2ZS00YjNiLWEyNjYtNTdlNWFjMzg0ZDRlAAIUWzEsWyJ5dWVucHljb3VudHMiXV0AAixbMixbImNiNjIyMmQ4LWQ1ODctNDE2OC04ZDI0LTY0NWYwZTIzNTA2ZCJdXQAABiB0URGZP_BhY_GqAXQOf8xsOWrNIBfGBov7sHbxjRHzqg --repository-url https://test.pypi.org/legacy/ dist/*
- twine upload -u __token__ -p pypi-AgEIcHlwaS5vcmcCJGVhN2UxOWU4LTk2NTUtNDVlZi1iNmI1LTk5NDcxZDRhNDIxOQACKlszLCI2OTQ5MjNjYi1iZGIwLTRkMDctYjdjZS1lZDg3NmQ2N2NmMDYiXQAABiAWYsh-c-1Z7e2o803cuq1M3_rczfFxprNB5BEtFWrcLA --repository-url https://upload.pypi.org/legacy/ dist/*
- echo "end time"
- echo "start Nexus"
- twine upload --repository-url https://repo.wisespotgroup.com.hk/repository/PiPy-release/ -u student -p student@Sep dist/*
```
<p align="right">(<a href="#readme-top">back to top</a>)</p>
</br>
<br>
<br>
# Version Control
`pyproject.toml` this file will control our package version. When you updata your package, you need to config your `pyproject.toml`:
```bash
[tool.poetry]
name = "yuenpycounts"
version = "0.1.13"
description = "Calculate word counts in a text file!"
authors = ["Micheal","Ming"]
license = "MIT"
readme = "README.md"
[tool.poetry.dependencies]
python = "^3.9"
matplotlib = "^3.7.2"
```
In this case, we updata our package `yuenpycounts` patch, we should config `version= "0.1.13"` to `version= "0.1.14"`.
if you don't want to hand config `pyproject.toml` file, you can use following the command to automatically config your `pyproject.toml` file:
A <type> of fix triggers a patch version bump, e.g.:
```bash
git commit -m "fix(mod_plotting): fix confusing error message in \
plot_words"
```
A <type> of feat triggers a minor version bump, e.g.:
```bash
git commit -m "feat(package): add example data and new module to \
package"
```
The text BREAKING CHANGE: in the footer will trigger a major release, e.g.:
```bash
git commit -m "feat(mod_plotting): move code from plotting module \
to pycounts module
BREAKING CHANGE: plotting module wont exist after this release."
```
<p align="right">(<a href="#readme-top">back to top</a>)</p>
</br>
<br>
<br>
# Contributing
Interested in contributing? Check out the contributing guidelines. Please note that this project is released with a Code of Conduct. By contributing to this project, you agree to abide by its terms.
</br>
<br>
<br>
# License
`yuenpycounts` was created by Micheal and Ming. It is licensed under the terms of the MIT license.
<p align="right">(<a href="#readme-top">back to top</a>)</p>
</br>
<br>
<br>
# Credits
`yuenpycounts` was created with [`cookiecutter`](https://cookiecutter.readthedocs.io/en/latest/) and the `py-pkgs-cookiecutter` [template](https://github.com/py-pkgs/py-pkgs-cookiecutter).
<p align="right">(<a href="#readme-top">back to top</a>)</p>
</br>
<br>
<br>
<!-- Acknowledgments -->
# Acknowledgments
Use this space to list resources you find helpful and would like to give credit to. I've included a few of my favorites to kick things off!
* [Python Package Reference Document](https://py-pkgs.org/welcome)
* [Twine Reference Document](https://twine.readthedocs.io/en/stable/changelog.html)
<p align="right">(<a href="#readme-top">back to top</a>)</p>
<!-- Markdown Links & Images -->
<!-- https://www.markdownguide.org/basic-syntax/#reference-style-links -->
[Python]: https://img.shields.io/pypi/pyversions/yuenpycounts
[Python-url]: https://www.python.org/ | yuenpycounts | /yuenpycounts-0.1.16.tar.gz/yuenpycounts-0.1.16/README.md | README.md |
# yufuquant SDK
yufuquant SDK 封装了用于和 yufuquant 后端进行交互的常用方法。
yufuquant SDK 目前只支持 Python,由于大多数 API 都是基于 asyncio 的异步 API,因此要求 Python 的最低版本为 Python 3.7,推荐使用 Python 3.8。
## 安装
```bash
$ pip install yufuquantsdk
```
## REST API 客户端
REST API 客户端用于和 yufuquant 后端的 RESTful API 进行交互。
```python
from yufuquantsdk.clients import RESTAPIClient
base_url="https://yufuquant.cc/api/v1" # 系统后端接口地址
auth_token="xxxxx" # 认证令牌
robot_id = 1 # 机器人 id
rest_api_client = RESTAPIClient(base_url=base_url, auth_token=auth_token)
# 获取机器人配置
await rest_api_client.get_robot_config(robot_id)
# 更新机器人的资产信息
data = {
"total_balance": 5.8
}
await rest_api_client.patch_robot_asset_record(robot_id, data)
# 发送 ping
await rest_api_client.post_robot_ping(robot_id)
```
以下是一个完整的示例:
```python
import logging
from pprint import pprint
from yufuquantsdk.clients import RESTAPIClient
# 开启日志
logger = logging.getLogger("yufuquantsdk")
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.DEBUG)
async def main():
http_client = RESTAPIClient(
base_url="https://yufuquant.cc/api/v1",
auth_token="8g2e470579ba14ea69000859eba6c421b69ff95d",
)
result = await http_client.get_robot_config(robot_id=1)
pprint(result)
result = await http_client.post_robot_ping(robot_id=1)
pprint(result)
result = await http_client.patch_robot_asset_record(
robot_id=1, data={"total_balance": 10000}
)
pprint(result)
if __name__ == "__main__":
import asyncio
asyncio.run(main())
```
## Websocket API 客户端
Websocket API 客户端用于和 yufuquant 后端的 Websocket API 进行交互。
```python
from yufuquantsdk.clients import WebsocketAPIClient
uri="wss://yufuquant.cc/ws/v1/streams/" # 系统后端接口地址
auth_token="xxxxx" # 认证令牌
topics = ["robot#1.ping", "robot#1.log"] # 订阅的话题
ws_api_client = WebsocketAPIClient(uri=uri)
# 认证
await ws_api_client.auth(auth_token)
# 订阅话题
await ws_api_client.sub(topics)
# 取消话题订阅
await ws_api_client.unsub(topics)
# 发送机器人 ping
await ws_api_client.robot_ping()
# 发送机器人日志
await ws_api_client.robot_log()
```
以下是一个完整的示例:
```python
import logging
from yufuquantsdk.clients import WebsocketAPIClient
# 开启日志
logger = logging.getLogger("yufuquantsdk")
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.DEBUG)
async def main():
ws_api_client = WebsocketAPIClient(uri="wss://yufuquant.cc/ws/v1/streams/")
await ws_api_client.auth("8d2e470575ba04ea69000859eba6c421a69ff95c")
await ws_api_client.sub(topics=["robot#1.log"])
while True:
await ws_api_client.robot_ping()
await ws_api_client.robot_log("Test robot log...", level="INFO")
await asyncio.sleep(1)
if __name__ == "__main__":
import asyncio
asyncio.run(main())
```
| yufuquantsdk | /yufuquantsdk-0.1.1.tar.gz/yufuquantsdk-0.1.1/README.md | README.md |
import os
import re
def quote_for_bash(s):
if s == '':
return "''"
if re.search(r"""[ ;'"${}()\\.*]""", s):
return "'" + s.replace("'", r"'\''") + "'"
return s
def safe_path_join(*args):
"""
Like os.path.join, but allows arguments to be None. If all arguments are None, returns None.
A special case: if the first argument is None, always return None. That allows to set a number
of constants as relative paths under a certain path which may itself be None.
>>> safe_path_join()
>>> safe_path_join(None)
>>> safe_path_join(None, None)
>>> safe_path_join('/a', None, 'b')
'/a/b'
>>> safe_path_join(None, '/a', None, 'b') # special case: first arg is None
"""
if not args or args[0] is None:
return None
args = [arg for arg in args if arg is not None]
return os.path.join(*args)
def cmd_line_args_to_str(args):
"""
Converts a list of command-line arguments, including an executable program in the beginning,
to a single command-line string suitable for pasing into Bash.
:param args: an array with a program path and command-line arguments
:return: a string suitable for pasing into Bash
"""
return ' '.join([quote_for_bash(arg) for arg in args])
def trim_long_text(text, max_lines):
"""
Trim a potentially long multi-line message at the given number of lines.
:param text: the input text
:param max_lines: maximum number of lines
:return: the trimmed message
"""
max_lines = max(max_lines, 3)
lines = text.splitlines()
if len(lines) <= max_lines:
return text
# Here is the math involved:
# lines_at_each_end * 2 + 1 <= max_lines
# lines_at_each_end <= (max_lines - 1) / 2
lines_to_keep_at_each_end = int((max_lines - 1) / 2)
num_lines_skipped = len(lines) - lines_to_keep_at_each_end * 2
if num_lines_skipped <= 0:
return text
return "\n".join(
lines[:lines_to_keep_at_each_end] +
['({} lines skipped)'.format(num_lines_skipped)] +
lines[-lines_to_keep_at_each_end:]
)
def decode_utf8(bytes):
if isinstance(bytes, str):
return bytes
return bytes.decode('utf-8')
def get_bool_env_var(env_var_name):
"""
>>> os.environ['YB_TEST_VAR'] = ' 1 '
>>> get_bool_env_var('YB_TEST_VAR')
True
>>> os.environ['YB_TEST_VAR'] = ' 0 '
>>> get_bool_env_var('YB_TEST_VAR')
False
>>> os.environ['YB_TEST_VAR'] = ' TrUe'
>>> get_bool_env_var('YB_TEST_VAR')
True
>>> os.environ['YB_TEST_VAR'] = 'fAlSe '
>>> get_bool_env_var('YB_TEST_VAR')
False
>>> os.environ['YB_TEST_VAR'] = ' YeS '
>>> get_bool_env_var('YB_TEST_VAR')
True
>>> os.environ['YB_TEST_VAR'] = 'No'
>>> get_bool_env_var('YB_TEST_VAR')
False
>>> os.environ['YB_TEST_VAR'] = ''
>>> get_bool_env_var('YB_TEST_VAR')
False
"""
value = os.environ.get(env_var_name, None)
if value is None:
return False
return value.strip().lower() in ['1', 't', 'true', 'y', 'yes'] | yugabyte-pycommon | /yugabyte_pycommon-1.9.15.tar.gz/yugabyte_pycommon-1.9.15/yugabyte_pycommon/text_manipulation.py | text_manipulation.py |
import itertools
def group_by_to_list(arr, key_fn):
"""
Group the given list-like collection by the key computed using the given function. The
collection does not have to be sorted in advance.
:return: a list of (key, list_of_values) tuples where keys are sorted
>>> group_by_to_list([100, 201, 300, 301, 400], lambda x: x % 2)
[(0, [100, 300, 400]), (1, [201, 301])]
>>> group_by_to_list([100, 201, 300, 301, 400, 401, 402], lambda x: x % 3)
[(0, [201, 300, 402]), (1, [100, 301, 400]), (2, [401])]
"""
return [(k, list(v)) for (k, v) in itertools.groupby(sorted(arr, key=key_fn), key_fn)]
def group_by_to_dict(arr, key_fn):
"""
Given a list-like collection and a function that computes a key, returns a map from keys to all
values with that key.
>>> group_by_to_dict([100, 201, 300, 301, 400], lambda x: x % 2)
{0: [100, 300, 400], 1: [201, 301]}
>>> group_by_to_dict([100, 201, 300, 301, 400, 401, 402], lambda x: x % 3)
{0: [201, 300, 402], 1: [100, 301, 400], 2: [401]}
"""
return dict(group_by_to_list(arr, key_fn))
def make_list(obj):
"""
Convert the given object to a list. Strings get converted to a list of one string, not to a
list of their characters. Sets are sorted.
>>> make_list('asdf')
['asdf']
>>> make_list(['a', 'b', 'c'])
['a', 'b', 'c']
>>> make_list(set(['z', 'a', 'b']))
['a', 'b', 'z']
>>> make_list(set(['z', 'a', 10, 20]))
[10, 20, 'a', 'z']
>>> make_list(set([10, 20, None, 'a', 'z']))
[10, 20, None, 'a', 'z']
"""
if isinstance(obj, str):
return [obj]
if isinstance(obj, set):
# Sort by string representation because objects of different types are not comparable in
# Python 3.
return sorted(obj, key=lambda item: str(item))
return list(obj)
def make_set(obj):
"""
Makes a set of a given object. Returns the same object if it is already a set. Otherwise
follows the same logic as :py:meth:`make_list`.
:param obj: a collection object
:return: a set created from the given object
"""
if isinstance(obj, set):
return obj
return set(make_list(obj)) | yugabyte-pycommon | /yugabyte_pycommon-1.9.15.tar.gz/yugabyte_pycommon-1.9.15/yugabyte_pycommon/collection_util.py | collection_util.py |
# Copyright (c) 2019 YugaByte, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and limitations under
# the License.
import subprocess
import os
import sys
import semver
ALLOW_LOCAL_CHANGES = False
if __name__ == '__main__':
local_changes = subprocess.check_output(
['git', 'diff-index', '--name-only', 'HEAD', '--']).strip()
if not ALLOW_LOCAL_CHANGES and local_changes:
raise RuntimeError('Local changes found!')
subprocess.check_output(['git', 'fetch'])
changes_vs_master = subprocess.check_output(
['git', 'diff', '--name-only', 'HEAD', 'origin/master']).strip()
if not ALLOW_LOCAL_CHANGES and changes_vs_master:
raise RuntimeError("Local changes not pushed to origin/master")
tags_str = subprocess.check_output(['git', 'tag']).decode('utf-8')
tags = [tag.strip() for tag in tags_str.split("\n") if tag.strip()]
max_version = None
for tag in tags:
if tag.startswith('v'):
version = tag[1:]
if max_version is None or semver.compare(version, max_version) > 0:
max_version = version
if max_version is None:
max_version = '0.1.0'
diff_vs_max_version_tag = subprocess.check_output(
['git', 'diff', '--name-only', 'v%s' % max_version, 'HEAD']).strip().decode('utf-8')
version_file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'version.py')
if not diff_vs_max_version_tag:
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
from yugabyte_pycommon import version
if version.__version__ == max_version:
print("HEAD is already tagged as %s, no need to create a new tag" % max_version)
sys.exit(0)
else:
print("The version.py file has version %s but the max version from tags is %s" %
(version.__version__, max_version))
else:
print("Found differences between max version from tag %s and HEAD:\n%s" % (
max_version, diff_vs_max_version_tag))
new_version = semver.bump_patch(max_version)
with open(version_file_path, 'w') as version_file:
version_file.write('__version__ = "%s"\n' % new_version)
subprocess.check_call(['git', 'add', version_file_path])
changes_needed = subprocess.check_output(
['git', 'diff', '--name-only', 'HEAD', version_file_path])
if changes_needed:
subprocess.check_call(
['git', 'commit', version_file_path, '-m', "Updating version to " + new_version])
else:
print("Version file is already up-to-date")
subprocess.check_call(['git', 'push', 'origin', 'HEAD:master'])
new_tag = 'v' + new_version
subprocess.check_call(['git', 'tag', new_tag])
subprocess.check_call(['git', 'push', 'origin', new_tag]) | yugabyte-pycommon | /yugabyte_pycommon-1.9.15.tar.gz/yugabyte_pycommon-1.9.15/yugabyte_pycommon/update_version.py | update_version.py |
import os
import sys
import logging
import subprocess
import tempfile
import threading
from yugabyte_pycommon.text_manipulation import cmd_line_args_to_str, decode_utf8, trim_long_text, \
quote_for_bash
from yugabyte_pycommon.logging_util import is_verbose_mode
# Default number of lines to shorten long stdout/stderr to.
DEFAULT_MAX_LINES_TO_SHOW = 1000
DEFAULT_UNIX_SHELL = 'bash'
class ProgramResult:
"""
This represents the result of executing an external program.
"""
def __init__(self, cmd_line, cmd_line_str, returncode, stdout, stdout_path, stderr,
stderr_path, program_path, invocation_details_str, max_lines_to_show,
output_captured):
self.cmd_line = cmd_line
self.cmd_line_str = cmd_line_str
self.returncode = returncode
self.stdout = stdout
self.stderr = stderr
self.stdout_path = stdout_path
self.stderr_path = stderr_path
self.program_path = program_path
self.invocation_details_str = invocation_details_str
self.max_lines_to_show = max_lines_to_show
self.output_captured = output_captured
self._set_error_msg()
def success(self):
"""
:return: whether the external program exited with a success
"""
return self.returncode == 0
def failure(self):
"""
:return: whether the external program exited with a failure
"""
return self.returncode != 0
def get_stdout_and_stderr_together(self):
"""
:return: a string with user-friendly versions of stdout and stderr of the external program,
concatenated together.
"""
stdout_and_stderr = (
self.get_user_friendly_stdout_msg() + self.get_user_friendly_stderr_msg())
if not stdout_and_stderr:
stdout_and_stderr = "No stdout or stderr from command: " + self.invocation_details_str
return stdout_and_stderr
def print_output_to_stdout(self):
"""
Print both stdout and stderr of the external program to the stdout.
"""
sys.stdout.write(self.get_stdout_and_stderr_together())
sys.stdout.flush()
def _set_error_msg(self):
if self.returncode == 0:
self.error_msg = None
return
self.error_msg = "Non-zero exit code {} from {}.".format(
self.returncode,
self.invocation_details_str,
self.returncode, cmd_line_args_to_str)
self.error_msg += self.get_user_friendly_stdout_msg()
self.error_msg += self.get_user_friendly_stderr_msg()
self.error_msg = self.error_msg.rstrip()
def get_stdout(self):
if self.stdout is not None:
return self.stdout
if self.stdout_path is not None:
from yugabyte_pycommon import read_file
return read_file(self.stdout_path)
def get_stderr(self):
if self.stderr is not None:
return self.stderr
if self.stderr_path is not None:
from yugabyte_pycommon import read_file
return read_file(self.stderr_path)
def _wrap_for_error_msg(self, stream_type):
assert stream_type in ['output', 'error']
if stream_type == 'output':
value = self.get_stdout()
else:
value = self.get_stderr()
if value is None or not value.strip():
return ""
value = value.rstrip()
return "\nStandard {} from {}:\n{}\n(end of standard {})\n".format(
stream_type, self.invocation_details_str,
trim_long_text(value, self.max_lines_to_show),
stream_type)
def get_user_friendly_stdout_msg(self):
"""
:return: a user-friendly version of the external program's standard output
"""
return self._wrap_for_error_msg("output")
def get_user_friendly_stderr_msg(self):
"""
:return: a user-friendly version of the external program's standard error
"""
return self._wrap_for_error_msg("error")
def raise_error_if_failed(self):
"""
This is useful for delayed handling of external program errors. Raises an error if the
external program failed. Otherwise does nothing.
"""
if self.failure():
raise ExternalProgramError(self.error_msg, self)
def print_output_and_raise_error_if_failed(self):
if self.failure():
# TODO: maybe print stdout to stdout, stderr to stderr?
# TODO: avoid loading large output into memory.
self.print_output_to_stdout()
self.raise_error_if_failed()
class ExternalProgramError(Exception):
def __init__(self, message, result):
self.message = message
self.result = result
class WorkDirContext:
"""
Allows setting a working directory context for running external programs. The directory will
be changed to the given directory on entering the block, and will be restored to the old
directory on exit.
.. code-block:: python
with WorkDirContext('/tmp'):
run_program('ls')
"""
def __init__(self, work_dir):
self.thread_local = threading.local()
self.work_dir = work_dir
def __enter__(self):
self.thread_local.old_dir = os.getcwd()
os.chdir(self.work_dir)
def __exit__(self, exception_type, exception_value, traceback):
os.chdir(self.thread_local.old_dir)
def run_program(args, error_ok=False, report_errors=None, capture_output=True,
max_lines_to_show=DEFAULT_MAX_LINES_TO_SHOW, cwd=None, shell=None,
stdout_path=None, stderr_path=None, stdout_stderr_prefix=None, **kwargs):
"""
Run the given program identified by its argument list, and return a :py:class:`ProgramResult`
object.
:param args: This could be a single string, or a tuple/list of elements where each element is
either a string or an integer. If a single string is given as ``args``, and the ``shell``
parameter is not specified, it is automatically set to true.
:param report_errors: whether errors during execution (as identified by exit code) should be
reported in the log.
:param capture_output: whether standard output and standard error of the program need to be
captured in variables inside of the resulting :py:class:`ProgramResult` object.
:param error_ok: if this is true, we won't raise an exception in case the external program
fails.
:param stdout_path: instead of trying to capture all standard output in memory, save it
to this file. Both `stdout_file_path` and `stderr_file_path` have to be specified or
unspecified at the same time. Also `shell` has to be true in this mode as we are using
shell redirections to implement this.
:param stderr_path: similar to ``stdout_file_path`` but for standard error.
:param stdout_stderr_prefix: allows setting both `stdout_path` and `stderr_path` quickly.
Those variables are set to the value of this parameter with `.out` and `.err` appended.
"""
if isinstance(args, str) and shell is None:
# If we are given a single string, assume it is a command line to be executed in a shell.
shell = True
if isinstance(args, str):
# This is a special case, but very common.
cmd_line_str = args
args = [args]
else:
if isinstance(args, tuple):
args = list(args)
if isinstance(args, str):
args = [args]
def normalize_arg(arg):
if isinstance(arg, int):
return str(arg)
return arg
args = [normalize_arg(arg) for arg in args]
cmd_line_str = cmd_line_args_to_str(args)
if (stdout_path is None) != (stderr_path is None):
raise ValueError(
"stdout_file_path and stderr_file_path have to specified or unspecified at the same "
"time. Got: stdout_file_path={}, stderr_file_path={}", stdout_path,
stderr_path)
output_to_files = stdout_path is not None
if stdout_stderr_prefix is not None:
if output_to_files:
raise ValueError(
"stdout_stderr_prefix cannot be specified at the same time with stdout_path "
"or stderr_path")
stdout_path = stdout_stderr_prefix + '.out'
stderr_path = stdout_stderr_prefix + '.err'
output_to_files = True
if output_to_files and not shell:
raise ValueError("If {stdout,stderr}_to_file are specified, shell must be True")
invocation_details_str = "external program {{ %s }} running in '%s'" % (
cmd_line_str, cwd or os.getcwd())
if output_to_files:
cmd_line_str = '( %s ) >%s 2>%s' % (
cmd_line_str,
quote_for_bash(stdout_path),
quote_for_bash(stderr_path)
)
invocation_details_str += ", saving stdout to {{ %s }}, stderr to {{ %s }}" % (
# For the ease of copying and pasting, convert to absolute paths.
os.path.abspath(stdout_path),
os.path.abspath(stderr_path)
)
if is_verbose_mode():
logging.info("Running %s", invocation_details_str)
tmp_script_path = None
try:
output_redirection = subprocess.PIPE if (capture_output and not output_to_files) else None
args_to_run = args
if shell:
# Save the script to a temporary file to avoid anomalies with backslash un-escaping
# described at http://bit.ly/2SFoMpN (on Ubuntu 18.04).
with tempfile.NamedTemporaryFile(suffix='.sh', delete=False) as tmp_script_file:
tmp_script_file.write(cmd_line_str.encode('utf-8'))
tmp_script_path = tmp_script_file.name
args_to_run = os.getenv('SHELL', DEFAULT_UNIX_SHELL) + ' ' + quote_for_bash(
tmp_script_path)
program_subprocess = subprocess.Popen(
args_to_run,
stdout=output_redirection,
stderr=output_redirection,
shell=shell,
cwd=cwd,
**kwargs)
program_stdout, program_stderr = program_subprocess.communicate()
if output_to_files:
def report_unexpected_output(stream_name, output):
if output is not None and output.strip():
logging.warn(
"Unexpected standard %s from %s (should have been redirected):\n%s",
stream_name, invocation_details_str, output)
report_unexpected_output('output', program_stdout)
report_unexpected_output('error', program_stderr)
program_stdout = None
program_stderr = None
except OSError:
logging.error("Failed to run %s", invocation_details_str)
raise
finally:
if tmp_script_path and os.path.exists(tmp_script_path):
os.remove(tmp_script_path)
def cleanup_output(out_str):
if out_str is None:
return None
return decode_utf8(out_str)
clean_stdout = cleanup_output(program_stdout)
clean_stderr = cleanup_output(program_stderr)
result = ProgramResult(
cmd_line=args,
cmd_line_str=cmd_line_str,
program_path=os.path.realpath(args[0]),
returncode=program_subprocess.returncode,
stdout=clean_stdout,
stdout_path=stdout_path,
stderr=clean_stderr,
stderr_path=stderr_path,
invocation_details_str=invocation_details_str,
max_lines_to_show=max_lines_to_show,
output_captured=capture_output)
if program_subprocess.returncode != 0:
if report_errors is None:
report_errors = not error_ok
if report_errors:
logging.error(result.error_msg)
if not error_ok:
result.raise_error_if_failed()
return result
def check_run_program(*args, **kwargs):
"""
Similar to subprocess.check_call but using our run_program facility.
"""
kwargs['capture_output'] = False
kwargs['report_errors'] = True
run_program(*args, **kwargs)
return 0
def program_fails_no_log(args, **kwargs):
"""
Run the given program, and returns if it failed. Does not log anything in case of success
or failure.
:param args: command line arguments or a single string to run as a shell command
:param kwargs: additional keyword arguments for subprocess.Popen
:return: ``True`` if the program succeeded
"""
return run_program(args, error_ok=True, report_errors=False, **kwargs).failure()
def program_succeeds_no_log(args, **kwargs):
"""
Run the given program, and returns True if it succeeded. Does not log anything in case of
success or failure.
:param args: command line arguments or a single string to run as a shell command
:param kwargs: additional keyword arguments for subprocess.Popen
:return: ``True`` if the program failed
"""
return run_program(args, error_ok=True, report_errors=False, **kwargs).success()
def program_succeeds_empty_output(args, **kwargs):
"""
Runs a program that is not expected to produce any output.
:param args: command line arguments or a single string to run as a shell command
:param kwargs: additional keyword arguments for subprocess.Popen
:raises ExternalProgramError: if the program succeeds but produces extra output
:return: ``True`` if the program succeeds and does not produce any output
"""
result = run_program(args, error_ok=True, report_errors=False, **kwargs)
if result.failure():
return False
if result.stdout.strip():
error_msg = "Unexpected output in case of success. " + result.get_user_friendly_stdout_msg()
logging.error(error_msg)
raise ExternalProgramError(error_msg, result)
return True | yugabyte-pycommon | /yugabyte_pycommon-1.9.15.tar.gz/yugabyte_pycommon-1.9.15/yugabyte_pycommon/external_calls.py | external_calls.py |
# Installing Yugal
Install Python from [here](https://www.python.org/downloads/), make sure that you check 'Local Variables' and 'pip' checkbox while installation and then run the following command.
```bash
pip install yugal
```
## Installing PHP Enviroment
PHP environment is necessary for Yugal projects to test and run. In local computer you can install any PHP environment you are comfortable with. Here we are installing XAMPP.
Installing XAMPP is very easy, just download respective package from https://www.apachefriends.org/index.html suitable for your OS and install it as you do with any other software.
# Creating New Project
Run the command below to create new project in your server directory. Eg: `htdocs`, `public_html`.
```bash
yugal --init awesomeapp
```
[Read Full Documentation](https://yugalphp.gitbook.io/)
| yugal | /yugal-8.1.0.tar.gz/yugal-8.1.0/README.md | README.md |
import click
import os
import json
@click.command()
@click.option('--init', help='Creates a new Yugal Project', default='')
@click.option('--install', help='Installs a library in Yugal', default='')
@click.option('--remove', help='Deleted Library and Its files from Yugal Project', default='')
@click.option('--show', help='Show all libraries', default='')
@click.option('--dev', help='Toggles DEV_MODE, enter \'on\' to turn it on and \'off\' to turn production mode on.', default='')
def start(init, install, remove, show, dev):
if init != "":
init = init.lower()
try:
os.system('git clone https://github.com/sinhapaurush/yugal.git')
os.system(f"mv yugal {init}")
os.chdir(init)
os.system("rm -rf .git")
os.system("rm LICENSE")
os.system("rm README.md")
fh = open('string.php', 'r')
nl = []
data = fh.read()
data = data.replace("--Yugal-Project--", init)
fh.close()
fh = open('string.php', 'w')
fh.write(data)
fh.close()
conf = "You Are Good To Go!"
except FileNotFoundError:
print("Yugal Project can not be created due to unknown reason, please check your Internet connection")
else:
print("="*len(conf))
print(conf)
print("="*len(conf))
if install != "":
try:
os.chdir('./lib/')
os.system(f'git clone {install}')
libname = install.split("/")
libname = libname[len(libname) - 1]
libname = libname.split(".")
libname = libname[0]
os.chdir(libname)
os.system('rm *.md')
fh = open("lib.json", "r")
config = fh.read()
fh.close()
os.chdir('../')
fh = open('config.json', 'r')
uni = fh.read()
fh.close()
config = config.strip()
config = json.loads(config)
uni = uni.strip()
uni = json.loads(uni)
uni[libname] = config
json_str = json.dumps(uni)
fh = open('config.json', 'w')
fh.write(json_str)
fh.close()
except:
print("Unable to install Library")
else:
print("Library Installed!")
if remove != "":
try:
os.chdir('./lib/')
os.system(f'rm -rf {remove}')
fh = open('config.json', 'r')
data = fh.read()
fh.close()
data = data.strip()
data = json.loads(data)
del(data[remove])
data = json.dumps(data)
fh = open('config.json', 'w')
fh.write(data)
fh.close()
except Exception:
print('ERROR: Unable to delete this library.')
print(Exception)
else:
print("Library Deleted!")
if show != "":
try:
os.chdir('./lib/')
fh = open('config.json', 'r')
code = fh.read()
fh.close()
code = json.loads(code)
for i in code:
print(i, code[i]['github'], sep="\t")
except:
print("ERROR in fetching installed packages.")
if dev != "":
dev = dev.strip()
try:
fh = open('string.php', 'r')
sphp = fh.read()
fh.close()
fh = open('string.php', 'w')
if(dev == 'off'):
sphp = sphp.replace("define('DEV_MODE', true);", "define('DEV_MODE', false);")
elif(dev == 'on'):
sphp = sphp.replace("define('DEV_MODE', false);", "define('DEV_MODE', true);")
fh.write(sphp)
fh = fh.close()
if dev == 'off':
os.system('chmod 644 *')
os.system('chmod 755 modules')
os.system('chmod 755 lib')
os.system('chmod 755 src')
os.chdir('modules')
os.system('chmod 644 *')
elif dev == 'on':
os.system('chmod 777 *')
os.system('chmod 777 modules')
os.chdir('modules')
os.system('chmod 777 *')
except:
print("ERROR IN TOGGLING MODE")
else:
if (dev == 'on'):
print("DEVELOPER MODE TURNED ON")
elif (dev == 'off'):
print("PRODUCTION MODE TURNED ON")
if __name__ == '__main__':
start() | yugal | /yugal-8.1.0.tar.gz/yugal-8.1.0/yugal.py | yugal.py |
Yugioh Bot
----------
|Join the chat at https://gitter.im/Yugioh-bot/Lobby| |Discord|
|Software License| |Build Status| |Coverage Status| |Quality Score|
Bot to help with those npc in Yugioh Duel Links.
Features
--------
- Auto-duel npc
- Collect worlds rewards
|Example Install|
Prerequisites
-------------
| Have Nox installed (https://www.bignox.com)
| -- Note: Windows 10 Users make sure to disable Hyper-V in window
services otherwise BSoD errors will occur.
| Python 3.5 or 3.6 (https://www.python.org/downloads/,
https://www.anaconda.com/download/)
Install
-------
Via git
.. code:: bash
$ git clone https://github.com/will7200/Yugioh-bot
$ cd Yugioh-bot
$ pip install -r requirements.txt (or use conda if using)
$ pip install -r install_requirements.txt
$ python install.py
Via zip file -- Unzip Contents
.. code:: bash
$ cd Yugioh-bot
$ pip install -r requirements.txt (or use conda if using)
$ pip install -r install_requirements.txt
$ python install.py
If you are using conda, here is a powershell script that will help
.. code:: powershell
$ Get-Content requirements.txt | ForEach-Object {
conda install --yes $_
}
Afterwards
----------
Skip to 6 on this list if you used python install.py
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1. Install tesseract (http://3.onj.me/tesseract/)
If the above link is giving issues or is slow: Tesseract at UB
Mannheim (https://github.com/UB-Mannheim/tesseract/wiki)
-- Note: Testings occured on the 3.05.01 version
2. opencv\_python‑3.3.1+contrib and cv2+contrib 3.2.0 tested
(http://www.lfd.uci.edu/~gohlke/pythonlibs/#opencv) -- Note: pypi
package will now install cv2+contrib from requirements.txt
3. Copy downloaded tesseract folder into bin:raw-latex:`\tess`
4. Copy
C::raw-latex:`\Users`:raw-latex:`\USER`\_NAME:raw-latex:`\AppData`:raw-latex:`\Roaming`:raw-latex:`\Nox`:raw-latex:`\bin`:raw-latex:`\nox`\_adb.exe
as adb.exe into bin directory
5. Copy
C::raw-latex:`\Users`:raw-latex:`\USER`\_NAME:raw-latex:`\AppData`:raw-latex:`\Roaming`:raw-latex:`\Nox`:raw-latex:`\bin`:raw-latex:`\AdbWinApi`.dll
into bin directory
6. Set Nox as 480x800 phone
7. Download Yugioh app
8. Setup Yugioh app, link, etc... (first time only)
Usage
-----
To Start The Bot
.. code:: bash
$ python main.py bot -s
Generate Config File -- Only Needed if you did not git clone master
.. code:: bash
$ python main.py config --generate-config {optional --file-path path/to/file/config.ini}
The bot creates a file for runtime purposes that is specified in the
config file name runtimepersistence under the bot section.
The following values can be changed during runtime that will control the
bot until the ui has been made. ["run\_now", "stop", "next\_run\_at"]
| run\_now: if the bot is currently stopped it will schedule a run
immediately
| stop: if the bot is currently running it will halt execution
| next\_run\_at: will schedule a run at the specified time, if currently
running it will remove the current job in place of the new one
GUI
.. code:: bash
$ pythonw main.py gui -s
| This will start the bot with gui controls.
| So far the following signals have been implemented: \* Stop \* Run Now
| |Image of Gui|
Wakatime
--------
Check out what files I'm working on through
`WakaTime <https://wakatime.com/@will2700/projects/fofjloaywu>`__
Change log
----------
Please see `CHANGELOG <CHANGELOG.md>`__ for more information on what has
changed recently.
Security
--------
If you discover any security related issues, please open a issue with
"`Security <#security>`__" as the prefix.
Credits
-------
- `will7200 <https://github.com/will7200>`__
- `All Contributors <../../contributors>`__
- tellomichmich (https://github.com/tellomichmich/PokeNoxBot) for the
idea and some basic guides for nox usage with python ## License
The MIT License (MIT). Please see `License File <LICENSE>`__ for more
information.
.. |Join the chat at https://gitter.im/Yugioh-bot/Lobby| image:: https://badges.gitter.im/Yugioh-bot/Lobby.svg
:target: https://gitter.im/Yugioh-bot/Lobby?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge
.. |Discord| image:: https://img.shields.io/discord/392538066633359360.svg?colorB=0082ff&style=flat
:target: https://discord.gg/PGWedhf
.. |Software License| image:: https://img.shields.io/badge/license-MIT-brightgreen.svg?style=flat-square
:target: LICENSE
.. |Build Status| image:: https://img.shields.io/travis/:vendor/:package_name/master.svg?style=flat-square
:target: https://travis-ci.org/:vendor/:package_name
.. |Coverage Status| image:: https://coveralls.io/repos/github/will7200/Yugioh-bot/badge.svg?branch=master
:target: https://coveralls.io/github/will7200/Yugioh-bot?branch=master
.. |Quality Score| image:: https://img.shields.io/scrutinizer/g/:vendor/:package_name.svg?style=flat-square
:target: https://scrutinizer-ci.com/g/:vendor/:package_name
.. |Example Install| image:: https://media.giphy.com/media/3oFzm8CBfGBdhKRms8/giphy.gif
.. |Image of Gui| image:: https://image.ibb.co/ccQ79b/yugioh_duel_bots_gui.png
| yugioh-bot | /yugioh-bot-0.6.3.tar.gz/yugioh-bot-0.6.3/README.rst | README.rst |
## Yugioh Bot
[](https://gitter.im/Yugioh-bot/Lobby?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
[](https://discord.gg/PGWedhf)
[![Software License][ico-license]](LICENSE)
[](https://ci.appveyor.com/project/will7200/yugioh-bot/branch/master)
[](https://coveralls.io/github/will7200/Yugioh-bot?branch=master)
[![Quality Score][ico-code-quality]][link-code-quality]
Bot to help with those npc in Yugioh Duel Links.
Currently working on phasing python version into golang to help with better concurrent processes, data updates will be streamlined, and distribution will be easier.
Check the progress here [go port](https://github.com/will7200/Yugioh-bot/tree/go-yugioh).
## Features
- Auto-duel npc
- Collect worlds rewards

## Prerequisites
Have Nox installed (https://www.bignox.com)
-- Note: Windows 10 Users make sure to disable Hyper-V in window services otherwise BSoD errors will occur.
Alternatively have Yugioh for steam installed.
Python 3.5 or 3.6 (https://www.python.org/downloads/, https://www.anaconda.com/download/)
## Install
Via git
``` bash
$ git clone https://github.com/will7200/Yugioh-bot
$ cd Yugioh-bot
$ pip install -r requirements.txt
$ python install.py
```
Via zip file -- Unzip Contents
``` bash
$ cd Yugioh-bot
$ pip install -r requirements.txt
$ python install.py
```
If you are using conda, here is a powershell script that will help
``` powershell
$ Get-Content requirements.txt | ForEach-Object {
conda install --yes $_
}
```
## Afterwards Nox
### Skip to 6 on this list if you used python install.py
1. Install tesseract (http://3.onj.me/tesseract/)
If the above link is giving issues or is slow:
Tesseract at UB Mannheim (https://github.com/UB-Mannheim/tesseract/wiki)
-- Note: Testings occured on the 3.05.01 version
2. opencv_python‑3.3.1+contrib and cv2+contrib 3.2.0 tested (http://www.lfd.uci.edu/~gohlke/pythonlibs/#opencv)
-- Note: pypi package will now install cv2+contrib from requirements.txt
3. Copy downloaded tesseract folder into bin\tess\
4. Copy C:\Users\USER_NAME\AppData\Roaming\Nox\bin\nox_adb.exe as adb.exe into bin directory
5. Copy C:\Users\USER_NAME\AppData\Roaming\Nox\bin\AdbWinApi.dll into bin directory
6. Set Nox as 480x800 phone
7. Download Yugioh app
8. Setup Yugioh app, link, etc... (first time only)
## Afterwards Steam
### Skip to 4 on this list if you used python install.py
1. Install tesseract (http://3.onj.me/tesseract/)
If the above link is giving issues or is slow:
Tesseract at UB Mannheim (https://github.com/UB-Mannheim/tesseract/wiki)
-- Note: Testings occured on the 3.05.01 version
2. opencv_python‑3.3.1+contrib and cv2+contrib 3.2.0 tested (http://www.lfd.uci.edu/~gohlke/pythonlibs/#opencv)
-- Note: pypi package will now install cv2+contrib from requirements.txt
3. Copy downloaded tesseract folder into bin\tess\
4. Under the bot section in the config.ini file change the provider to Steam instead of Nox.
5. Make sure to have the steam app installed.
6. Note the detection system for the Steam app hasn't been optimized for it yet, so there will be a lot of false
positives and true negatives.
## Usage
To Start The Bot
``` bash
$ python main.py bot -s
```
Generate Config File -- Only Needed if you did not git clone master
``` bash
$ python main.py config --generate-config {optional --file-path path/to/file/config.ini}
```
The bot creates a file for runtime purposes that is specified in the config file name runtimepersistence under the bot section.
The following values can be changed during runtime that will control the bot until the ui has been made.
["run_now", "stop", "next_run_at"]
run_now: if the bot is currently stopped it will schedule a run immediately
stop: if the bot is currently running it will halt execution
next_run_at: will schedule a run at the specified time, if currently running it will remove the current job in place of the new one
GUI
````bash
$ pythonw main.py gui -s
````
This will start the bot with gui controls.
So far the following signals have been implemented:
* Stop
* Run Now

## Wakatime
Check out what files I'm working on through [WakaTime](https://wakatime.com/@will2700/projects/fofjloaywu)
## Change log
Please see [CHANGELOG](CHANGELOG.md) for more information on what has changed recently.
## Security
If you discover any security related issues, please open a issue with "[Security]" as the prefix.
## Credits
- [will7200][link-author]
- [All Contributors][link-contributors]
- tellomichmich (https://github.com/tellomichmich/PokeNoxBot) for the idea and some basic guides for nox usage with python
## License
The MIT License (MIT). Please see [License File](LICENSE) for more information.
[ico-version]: https://img.shields.io/packagist/v/:vendor/:package_name.svg?style=flat-square
[ico-license]: https://img.shields.io/badge/license-MIT-brightgreen.svg?style=flat-square
[ico-travis]: https://img.shields.io/travis/:vendor/:package_name/master.svg?style=flat-square
[ico-scrutinizer]: https://img.shields.io/scrutinizer/coverage/g/:vendor/:package_name.svg?style=flat-square
[ico-code-quality]: https://img.shields.io/scrutinizer/g/:vendor/:package_name.svg?style=flat-square
[ico-downloads]: https://img.shields.io/packagist/dt/:vendor/:package_name.svg?style=flat-square
[link-travis]: https://travis-ci.org/:vendor/:package_name
[link-scrutinizer]: https://scrutinizer-ci.com/g/:vendor/:package_name/code-structure
[link-code-quality]: https://scrutinizer-ci.com/g/:vendor/:package_name
[link-author]: https://github.com/will7200
[link-contributors]: ../../contributors
| yugioh-bot | /yugioh-bot-0.6.3.tar.gz/yugioh-bot-0.6.3/README.md | README.md |
from __future__ import print_function
try:
import configparser
except ImportError:
import ConfigParser as configparser
import errno
import json
import os
import re
import subprocess
import sys
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_root():
"""Get the project root directory.
We require that all commands are run from the project root, i.e. the
directory that contains setup.py, setup.cfg, and versioneer.py .
"""
root = os.path.realpath(os.path.abspath(os.getcwd()))
setup_py = os.path.join(root, "setup.py")
versioneer_py = os.path.join(root, "versioneer.py")
if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
# allow 'python path/to/setup.py COMMAND'
root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0])))
setup_py = os.path.join(root, "setup.py")
versioneer_py = os.path.join(root, "versioneer.py")
if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
err = ("Versioneer was unable to run the project root directory. "
"Versioneer requires setup.py to be executed from "
"its immediate directory (like 'python setup.py COMMAND'), "
"or in a way that lets it use sys.argv[0] to find the root "
"(like 'python path/to/setup.py COMMAND').")
raise VersioneerBadRootError(err)
try:
# Certain runtime workflows (setup.py install/develop in a setuptools
# tree) execute all dependencies in a single python process, so
# "versioneer" may be imported multiple times, and python's shared
# module-import table will cache the first one. So we can't use
# os.path.dirname(__file__), as that will find whichever
# versioneer.py was first imported, even in later projects.
me = os.path.realpath(os.path.abspath(__file__))
me_dir = os.path.normcase(os.path.splitext(me)[0])
vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0])
if me_dir != vsr_dir:
print("Warning: build in %s is using versioneer.py from %s"
% (os.path.dirname(me), versioneer_py))
except NameError:
pass
return root
def get_config_from_root(root):
"""Read the project setup.cfg file to determine Versioneer config."""
# This might raise EnvironmentError (if setup.cfg is missing), or
# configparser.NoSectionError (if it lacks a [versioneer] section), or
# configparser.NoOptionError (if it lacks "VCS="). See the docstring at
# the top of versioneer.py for instructions on writing your setup.cfg .
setup_cfg = os.path.join(root, "setup.cfg")
parser = configparser.SafeConfigParser()
with open(setup_cfg, "r") as f:
parser.readfp(f)
VCS = parser.get("versioneer", "VCS") # mandatory
def get(parser, name):
if parser.has_option("versioneer", name):
return parser.get("versioneer", name)
return None
cfg = VersioneerConfig()
cfg.VCS = VCS
cfg.style = get(parser, "style") or ""
cfg.versionfile_source = get(parser, "versionfile_source")
cfg.versionfile_build = get(parser, "versionfile_build")
cfg.tag_prefix = get(parser, "tag_prefix")
if cfg.tag_prefix in ("''", '""'):
cfg.tag_prefix = ""
cfg.parentdir_prefix = get(parser, "parentdir_prefix")
cfg.verbose = get(parser, "verbose")
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
# these dictionaries contain VCS-specific tools
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
return None, p.returncode
return stdout, p.returncode
LONG_VERSION_PY['git'] = '''
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.18 (https://github.com/warner/python-versioneer)
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s"
git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s"
git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "%(STYLE)s"
cfg.tag_prefix = "%(TAG_PREFIX)s"
cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s"
cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %%s" %% dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %%s" %% (commands,))
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %%s (error)" %% dispcmd)
print("stdout was %%s" %% stdout)
return None, p.returncode
return stdout, p.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %%s but none started with prefix %%s" %%
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %%d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%%s', no digits" %% ",".join(refs - tags))
if verbose:
print("likely tags: %%s" %% ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %%s" %% r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags", "date": None}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %%s not under git control" %% root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%%s*" %% tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%%s'"
%% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%%s' doesn't start with prefix '%%s'"
print(fmt %% (full_tag, tag_prefix))
pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'"
%% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"],
cwd=root)[0].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%%d" %% pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%%d" %% pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%%s" %% pieces["short"]
else:
# exception #1
rendered = "0.post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%%s" %% pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%%s'" %% style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None,
"date": pieces.get("date")}
def get_versions():
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree",
"date": None}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version", "date": None}
'''
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags", "date": None}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%s*" % tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
cwd=root)[0].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def do_vcs_install(manifest_in, versionfile_source, ipy):
"""Git-specific installation logic for Versioneer.
For Git, this means creating/changing .gitattributes to mark _version.py
for export-subst keyword substitution.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
files = [manifest_in, versionfile_source]
if ipy:
files.append(ipy)
try:
me = __file__
if me.endswith(".pyc") or me.endswith(".pyo"):
me = os.path.splitext(me)[0] + ".py"
versioneer_file = os.path.relpath(me)
except NameError:
versioneer_file = "versioneer.py"
files.append(versioneer_file)
present = False
try:
f = open(".gitattributes", "r")
for line in f.readlines():
if line.strip().startswith(versionfile_source):
if "export-subst" in line.strip().split()[1:]:
present = True
f.close()
except EnvironmentError:
pass
if not present:
f = open(".gitattributes", "a+")
f.write("%s export-subst\n" % versionfile_source)
f.close()
files.append(".gitattributes")
run_command(GITS, ["add", "--"] + files)
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
SHORT_VERSION_PY = """
# This file was generated by 'versioneer.py' (0.18) from
# revision-control system data, or from the parent directory name of an
# unpacked source archive. Distribution tarballs contain a pre-generated copy
# of this file.
import json
version_json = '''
%s
''' # END VERSION_JSON
def get_versions():
return json.loads(version_json)
"""
def versions_from_file(filename):
"""Try to determine the version from _version.py if present."""
try:
with open(filename) as f:
contents = f.read()
except EnvironmentError:
raise NotThisMethod("unable to read _version.py")
mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON",
contents, re.M | re.S)
if not mo:
mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON",
contents, re.M | re.S)
if not mo:
raise NotThisMethod("no version_json in _version.py")
return json.loads(mo.group(1))
def write_to_version_file(filename, versions):
"""Write the given version number to the given _version.py file."""
os.unlink(filename)
contents = json.dumps(versions, sort_keys=True,
indent=1, separators=(",", ": "))
with open(filename, "w") as f:
f.write(SHORT_VERSION_PY % contents)
print("set %s to '%s'" % (filename, versions["version"]))
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None,
"date": pieces.get("date")}
class VersioneerBadRootError(Exception):
"""The project root directory is unknown or missing key files."""
def get_versions(verbose=False):
"""Get the project version from whatever source is available.
Returns dict with two keys: 'version' and 'full'.
"""
if "versioneer" in sys.modules:
# see the discussion in cmdclass.py:get_cmdclass()
del sys.modules["versioneer"]
root = get_root()
cfg = get_config_from_root(root)
assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg"
handlers = HANDLERS.get(cfg.VCS)
assert handlers, "unrecognized VCS '%s'" % cfg.VCS
verbose = verbose or cfg.verbose
assert cfg.versionfile_source is not None, \
"please set versioneer.versionfile_source"
assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix"
versionfile_abs = os.path.join(root, cfg.versionfile_source)
# extract version from first of: _version.py, VCS command (e.g. 'git
# describe'), parentdir. This is meant to work for developers using a
# source checkout, for users of a tarball created by 'setup.py sdist',
# and for users of a tarball/zipball created by 'git archive' or github's
# download-from-tag feature or the equivalent in other VCSes.
get_keywords_f = handlers.get("get_keywords")
from_keywords_f = handlers.get("keywords")
if get_keywords_f and from_keywords_f:
try:
keywords = get_keywords_f(versionfile_abs)
ver = from_keywords_f(keywords, cfg.tag_prefix, verbose)
if verbose:
print("got version from expanded keyword %s" % ver)
return ver
except NotThisMethod:
pass
try:
ver = versions_from_file(versionfile_abs)
if verbose:
print("got version from file %s %s" % (versionfile_abs, ver))
return ver
except NotThisMethod:
pass
from_vcs_f = handlers.get("pieces_from_vcs")
if from_vcs_f:
try:
pieces = from_vcs_f(cfg.tag_prefix, root, verbose)
ver = render(pieces, cfg.style)
if verbose:
print("got version from VCS %s" % ver)
return ver
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
if verbose:
print("got version from parentdir %s" % ver)
return ver
except NotThisMethod:
pass
if verbose:
print("unable to compute version")
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None, "error": "unable to compute version",
"date": None}
def get_version():
"""Get the short version string for this project."""
return get_versions()["version"]
def get_cmdclass():
"""Get the custom setuptools/distutils subclasses used by Versioneer."""
if "versioneer" in sys.modules:
del sys.modules["versioneer"]
# this fixes the "python setup.py develop" case (also 'install' and
# 'easy_install .'), in which subdependencies of the main project are
# built (using setup.py bdist_egg) in the same python process. Assume
# a main project A and a dependency B, which use different versions
# of Versioneer. A's setup.py imports A's Versioneer, leaving it in
# sys.modules by the time B's setup.py is executed, causing B to run
# with the wrong versioneer. Setuptools wraps the sub-dep builds in a
# sandbox that restores sys.modules to it's pre-build state, so the
# parent is protected against the child's "import versioneer". By
# removing ourselves from sys.modules here, before the child build
# happens, we protect the child from the parent's versioneer too.
# Also see https://github.com/warner/python-versioneer/issues/52
cmds = {}
# we add "version" to both distutils and setuptools
from distutils.core import Command
class cmd_version(Command):
description = "report generated version string"
user_options = []
boolean_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
vers = get_versions(verbose=True)
print("Version: %s" % vers["version"])
print(" full-revisionid: %s" % vers.get("full-revisionid"))
print(" dirty: %s" % vers.get("dirty"))
print(" date: %s" % vers.get("date"))
if vers["error"]:
print(" error: %s" % vers["error"])
cmds["version"] = cmd_version
# we override "build_py" in both distutils and setuptools
#
# most invocation pathways end up running build_py:
# distutils/build -> build_py
# distutils/install -> distutils/build ->..
# setuptools/bdist_wheel -> distutils/install ->..
# setuptools/bdist_egg -> distutils/install_lib -> build_py
# setuptools/install -> bdist_egg ->..
# setuptools/develop -> ?
# pip install:
# copies source tree to a tempdir before running egg_info/etc
# if .git isn't copied too, 'git describe' will fail
# then does setup.py bdist_wheel, or sometimes setup.py install
# setup.py egg_info -> ?
# we override different "build_py" commands for both environments
if "setuptools" in sys.modules:
from setuptools.command.build_py import build_py as _build_py
else:
from distutils.command.build_py import build_py as _build_py
class cmd_build_py(_build_py):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
_build_py.run(self)
# now locate _version.py in the new build/ directory and replace
# it with an updated value
if cfg.versionfile_build:
target_versionfile = os.path.join(self.build_lib,
cfg.versionfile_build)
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
cmds["build_py"] = cmd_build_py
if "cx_Freeze" in sys.modules: # cx_freeze enabled?
from cx_Freeze.dist import build_exe as _build_exe
# nczeczulin reports that py2exe won't like the pep440-style string
# as FILEVERSION, but it can be used for PRODUCTVERSION, e.g.
# setup(console=[{
# "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION
# "product_version": versioneer.get_version(),
# ...
class cmd_build_exe(_build_exe):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
target_versionfile = cfg.versionfile_source
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
_build_exe.run(self)
os.unlink(target_versionfile)
with open(cfg.versionfile_source, "w") as f:
LONG = LONG_VERSION_PY[cfg.VCS]
f.write(LONG %
{"DOLLAR": "$",
"STYLE": cfg.style,
"TAG_PREFIX": cfg.tag_prefix,
"PARENTDIR_PREFIX": cfg.parentdir_prefix,
"VERSIONFILE_SOURCE": cfg.versionfile_source,
})
cmds["build_exe"] = cmd_build_exe
del cmds["build_py"]
if 'py2exe' in sys.modules: # py2exe enabled?
try:
from py2exe.distutils_buildexe import py2exe as _py2exe # py3
except ImportError:
from py2exe.build_exe import py2exe as _py2exe # py2
class cmd_py2exe(_py2exe):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
target_versionfile = cfg.versionfile_source
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
_py2exe.run(self)
os.unlink(target_versionfile)
with open(cfg.versionfile_source, "w") as f:
LONG = LONG_VERSION_PY[cfg.VCS]
f.write(LONG %
{"DOLLAR": "$",
"STYLE": cfg.style,
"TAG_PREFIX": cfg.tag_prefix,
"PARENTDIR_PREFIX": cfg.parentdir_prefix,
"VERSIONFILE_SOURCE": cfg.versionfile_source,
})
cmds["py2exe"] = cmd_py2exe
# we override different "sdist" commands for both environments
if "setuptools" in sys.modules:
from setuptools.command.sdist import sdist as _sdist
else:
from distutils.command.sdist import sdist as _sdist
class cmd_sdist(_sdist):
def run(self):
versions = get_versions()
self._versioneer_generated_versions = versions
# unless we update this, the command will keep using the old
# version
self.distribution.metadata.version = versions["version"]
return _sdist.run(self)
def make_release_tree(self, base_dir, files):
root = get_root()
cfg = get_config_from_root(root)
_sdist.make_release_tree(self, base_dir, files)
# now locate _version.py in the new base_dir directory
# (remembering that it may be a hardlink) and replace it with an
# updated value
target_versionfile = os.path.join(base_dir, cfg.versionfile_source)
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile,
self._versioneer_generated_versions)
cmds["sdist"] = cmd_sdist
return cmds
CONFIG_ERROR = """
setup.cfg is missing the necessary Versioneer configuration. You need
a section like:
[versioneer]
VCS = git
style = pep440
versionfile_source = src/myproject/_version.py
versionfile_build = myproject/_version.py
tag_prefix =
parentdir_prefix = myproject-
You will also need to edit your setup.py to use the results:
import versioneer
setup(version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(), ...)
Please read the docstring in ./versioneer.py for configuration instructions,
edit setup.cfg, and re-run the installer or 'python versioneer.py setup'.
"""
SAMPLE_CONFIG = """
# See the docstring in versioneer.py for instructions. Note that you must
# re-run 'versioneer.py setup' after changing this section, and commit the
# resulting files.
[versioneer]
#VCS = git
#style = pep440
#versionfile_source =
#versionfile_build =
#tag_prefix =
#parentdir_prefix =
"""
INIT_PY_SNIPPET = """
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
"""
def do_setup():
"""Main VCS-independent setup function for installing Versioneer."""
root = get_root()
try:
cfg = get_config_from_root(root)
except (EnvironmentError, configparser.NoSectionError,
configparser.NoOptionError) as e:
if isinstance(e, (EnvironmentError, configparser.NoSectionError)):
print("Adding sample versioneer config to setup.cfg",
file=sys.stderr)
with open(os.path.join(root, "setup.cfg"), "a") as f:
f.write(SAMPLE_CONFIG)
print(CONFIG_ERROR, file=sys.stderr)
return 1
print(" creating %s" % cfg.versionfile_source)
with open(cfg.versionfile_source, "w") as f:
LONG = LONG_VERSION_PY[cfg.VCS]
f.write(LONG % {"DOLLAR": "$",
"STYLE": cfg.style,
"TAG_PREFIX": cfg.tag_prefix,
"PARENTDIR_PREFIX": cfg.parentdir_prefix,
"VERSIONFILE_SOURCE": cfg.versionfile_source,
})
ipy = os.path.join(os.path.dirname(cfg.versionfile_source),
"__init__.py")
if os.path.exists(ipy):
try:
with open(ipy, "r") as f:
old = f.read()
except EnvironmentError:
old = ""
if INIT_PY_SNIPPET not in old:
print(" appending to %s" % ipy)
with open(ipy, "a") as f:
f.write(INIT_PY_SNIPPET)
else:
print(" %s unmodified" % ipy)
else:
print(" %s doesn't exist, ok" % ipy)
ipy = None
# Make sure both the top-level "versioneer.py" and versionfile_source
# (PKG/_version.py, used by runtime code) are in MANIFEST.in, so
# they'll be copied into source distributions. Pip won't be able to
# install the package without this.
manifest_in = os.path.join(root, "MANIFEST.in")
simple_includes = set()
try:
with open(manifest_in, "r") as f:
for line in f:
if line.startswith("include "):
for include in line.split()[1:]:
simple_includes.add(include)
except EnvironmentError:
pass
# That doesn't cover everything MANIFEST.in can do
# (http://docs.python.org/2/distutils/sourcedist.html#commands), so
# it might give some false negatives. Appending redundant 'include'
# lines is safe, though.
if "versioneer.py" not in simple_includes:
print(" appending 'versioneer.py' to MANIFEST.in")
with open(manifest_in, "a") as f:
f.write("include versioneer.py\n")
else:
print(" 'versioneer.py' already in MANIFEST.in")
if cfg.versionfile_source not in simple_includes:
print(" appending versionfile_source ('%s') to MANIFEST.in" %
cfg.versionfile_source)
with open(manifest_in, "a") as f:
f.write("include %s\n" % cfg.versionfile_source)
else:
print(" versionfile_source already in MANIFEST.in")
# Make VCS-specific changes. For git, this means creating/changing
# .gitattributes to mark _version.py for export-subst keyword
# substitution.
do_vcs_install(manifest_in, cfg.versionfile_source, ipy)
return 0
def scan_setup_py():
"""Validate the contents of setup.py against Versioneer's expectations."""
found = set()
setters = False
errors = 0
with open("setup.py", "r") as f:
for line in f.readlines():
if "import versioneer" in line:
found.add("import")
if "versioneer.get_cmdclass()" in line:
found.add("cmdclass")
if "versioneer.get_version()" in line:
found.add("get_version")
if "versioneer.VCS" in line:
setters = True
if "versioneer.versionfile_source" in line:
setters = True
if len(found) != 3:
print("")
print("Your setup.py appears to be missing some important items")
print("(but I might be wrong). Please make sure it has something")
print("roughly like the following:")
print("")
print(" import versioneer")
print(" setup( version=versioneer.get_version(),")
print(" cmdclass=versioneer.get_cmdclass(), ...)")
print("")
errors += 1
if setters:
print("You should remove lines like 'versioneer.VCS = ' and")
print("'versioneer.versionfile_source = ' . This configuration")
print("now lives in setup.cfg, and should be removed from setup.py")
print("")
errors += 1
return errors
if __name__ == "__main__":
cmd = sys.argv[1]
if cmd == "setup":
errors = do_setup()
errors += scan_setup_py()
if errors:
sys.exit(1) | yugioh-bot | /yugioh-bot-0.6.3.tar.gz/yugioh-bot-0.6.3/versioneer.py | versioneer.py |
import time
import cv2
import numpy as np
def loop_scan(fn, **kwargs):
limit = 3
doit = True
l_times = 0
while doit and l_times <= limit:
l_times += 1
doit = fn(**kwargs)
time.sleep(1)
def mask_image(lower_mask, upper_mask, img, apply_mask=False):
"""" Masks an image according to the upper and lower bounds
Parameters
----------
lower_mask : ndarray
lower mask to apply to image, length must match image channels
upper_mask : ndarray
upper mask to apply to image, length must match image channels
img : ndarray
image to apply mask to
apply_mask : bool
returns the masked image instead of the mask itself
"""
shape = np.array(img.shape).flatten()
if len(np.array(img.shape).flatten()) == 3:
shape_size = shape[-1]
else:
shape_size = 1
assert (len(lower_mask) == shape_size)
assert (len(upper_mask) == shape_size)
color_min = np.array(lower_mask, np.uint8)
color_max = np.array(upper_mask, np.uint8)
new_img = cv2.inRange(img, color_min, color_max)
if apply_mask:
return cv2.bitwise_and(img, img, mask=new_img)
return new_img
def mse(image_a, image_b):
# the 'Mean Squared Error' between the two images is the
# sum of the squared difference between the two images;
# NOTE: the two images must have the same dimension
err = np.sum((image_a.astype("float") - image_b.astype("float")) ** 2)
err /= float(image_a.shape[0] * image_a.shape[1])
# return the MSE, the lower the error, the more "similar"
# the two images are
return err
def crop_image(img, left=0, top=0, width=0, height=0):
left, top, width, height = tuple(np.asanyarray([left, top, width, height], np.uint64).tolist())
crop_img = img[top:(top + height), left:(left + width)].copy()
return crop_img
def bot_assertion(condition, error_type, message):
if not condition:
raise error_type(message)
def first_true(iterable, default=False, pred=None):
"""Returns the first true value in the iterable.
If no true value is found, returns *default*
If *pred* is not None, returns the first item
for which pred(item) is true.
"""
# first_true([a,b,c], x) --> a or b or c or x
# first_true([a,b], x, f) --> a if f(a) else b if f(b) else x
return next(filter(pred, iterable), default) | yugioh-bot | /yugioh-bot-0.6.3.tar.gz/yugioh-bot-0.6.3/bot/common.py | common.py |
import asyncio
import datetime
import inspect
import json
import os
import pathlib
import sys
import threading
import time
import traceback
from abc import abstractmethod
from concurrent.futures import ThreadPoolExecutor
import apscheduler
from apscheduler.jobstores.base import JobLookupError
from apscheduler.schedulers import SchedulerNotRunningError
from bot import logger, default_timestamp
from bot.utils.data import read_json_file, write_data_file
from bot.utils.watcher import SyncWithFile
try:
from bot.debug_helpers.helpers_decorators import async_calling_function
except ImportError:
from bot import fake_decorator as async_calling_function
class DuelLinkRunTimeOptions(object):
"""Class defines options used at runtime"""
_active = False
@property
def active(self):
return self._active
@active.setter
def active(self, value):
self._active = True
_last_run_at = datetime.datetime.fromtimestamp(default_timestamp)
@property
def last_run_at(self):
return self._last_run_at
@last_run_at.setter
def last_run_at(self, value):
if not isinstance(value, datetime.datetime):
self.runtime_error_options("last_run_at", datetime.datetime, type(value))
return
if self._last_run_at == value:
return
self._last_run_at = value
frame = inspect.currentframe()
logger.debug("Value {} modified to {}".format(inspect.getframeinfo(frame).function, value))
self.timeout_dump()
_next_run_at = datetime.datetime.fromtimestamp(default_timestamp)
@property
def next_run_at(self):
return self._next_run_at
@next_run_at.setter
def next_run_at(self, value):
if not isinstance(value, datetime.datetime):
self.runtime_error_options("next_run_at", datetime.datetime, type(value))
return
if self._next_run_at == value:
return
self._next_run_at = value
frame = inspect.currentframe()
logger.debug("Value {} modified to {}".format(inspect.getframeinfo(frame).function, value))
self.timeout_dump()
self.handle_option_change('next_run_at')
_run_now = False
@property
def run_now(self):
return self._run_now
@run_now.setter
def run_now(self, value):
if not isinstance(value, bool):
self.runtime_error_options("run_now", bool, type(value))
return
if self._run_now == value:
return
self._run_now = value
frame = inspect.currentframe()
logger.debug("Value {} modified".format(inspect.getframeinfo(frame).function))
self.timeout_dump()
self.handle_option_change('run_now')
_stop = threading.Event()
@property
def stop(self):
return self._stop.is_set()
@stop.setter
def stop(self, stop):
if not isinstance(stop, bool):
self.runtime_error_options("stop", bool, type(stop))
return
if self._stop == stop:
return
if stop:
self._stop.set()
else:
self._stop.clear()
frame = inspect.currentframe()
logger.debug("Value {} modified".format(inspect.getframeinfo(frame).function))
self.timeout_dump()
self.handle_option_change('stop')
_playmode = "autoplay"
_available_modes = ['autoplay', 'guided']
@property
def playmode(self):
return self._playmode
@playmode.setter
def playmode(self, playmode):
if not isinstance(playmode, str):
self.runtime_error_options("playmode", str, type(playmode))
return
if playmode not in self._available_modes:
return
if self._playmode == playmode:
return
self._playmode = playmode
frame = inspect.currentframe()
logger.debug("Value {} modified".format(inspect.getframeinfo(frame).function))
self.timeout_dump()
self.handle_option_change('playmode')
_battle_calls = {
"beforeStart": [],
"afterStart" : [],
"beforeEnd" : [],
"afterEnd" : []
}
@property
def battle_calls(self):
return self._battle_calls
@battle_calls.setter
def battle_calls(self, value):
if not isinstance(value, dict):
self.runtime_error_options("battle_calls", dict, type(value))
return
if self._battle_calls == value:
return
self._battle_calls = value
frame = inspect.currentframe()
logger.debug("Value {} modified".format(inspect.getframeinfo(frame).function))
self.timeout_dump()
self.handle_option_change('battle_calls')
@abstractmethod
def runtime_error_options(self, option, expecting_type, got_type):
raise NotImplementedError("runtime_error_options not implemented")
@abstractmethod
def timeout_dump(self):
raise NotImplementedError("timeout_dump not implemented")
@abstractmethod
def handle_option_change(self, value):
raise NotImplementedError("handle_option_change not implemented")
class DuelLinkTasks(object):
def __init__(self, dlrto):
self.dlRunTime = dlrto # type: DuelLinkRunTime
async def check_next_run(self):
while True:
time_diff = self.dlRunTime.next_run_at - datetime.datetime.now()
if time_diff.total_seconds() < -50 and self.dlRunTime.active is False:
logger.info("APScheduler failed to schedule run, forcing run now")
self.dlRunTime.run_now = True
await asyncio.sleep(60)
def start(self):
loop = asyncio.get_event_loop() # self.dlRunTime.get_loop()
assert (loop.is_running())
asyncio.run_coroutine_threadsafe(self.check_next_run(), loop)
def shutdown(self):
loop = asyncio.get_event_loop() # self.dlRunTime.get_loop()
async def st():
tasks = [task for task in asyncio.Task.all_tasks() if task is not
asyncio.tasks.Task.current_task()]
list(map(lambda task: task.cancel(), tasks))
results = await asyncio.gather(*tasks, return_exceptions=True)
for r in results:
if isinstance(r, str):
logger.warning("Task {}".format(r))
future = asyncio.run_coroutine_threadsafe(st(), loop)
future.result()
class DuelLinkRunTime(DuelLinkRunTimeOptions):
_file = None
_unknown_options = []
_scheduler = None
_config = None
_watcher = None
_timeout_dump = None
_executor = None
_provider = None
_loop = None
_run_main = None
_job = None
_allow_event_change = True
_disable_dump = False
_disable_persistence = False
_loop_thread = None
def __init__(self, config, scheduler, auto_start=True):
self._config = config
self._file = config.get('bot', 'runTimePersistence')
self._disable_persistence = config.get('bot', 'persist')
self._scheduler = scheduler
self._task_runner = DuelLinkTasks(self)
if auto_start:
self.start()
def start(self):
self.setUp()
if not self._disable_persistence:
logger.debug("Watching {} for runTime Options".format(self._file))
self._watcher = SyncWithFile(self._file)
self._watcher.settings_modified = self.settings_modified
self._task_runner.start()
def setUp(self):
self._loop = asyncio.get_event_loop()
self._loop.set_default_executor(ThreadPoolExecutor())
if os.path.dirname(self._file) == "":
self._file = os.path.join(os.getcwd(), self._file)
pathlib.Path(os.path.dirname(self._file)).mkdir(parents=True, exist_ok=True)
if os.path.exists(self._file):
self.update()
if not self._loop.is_running():
def run_loop():
self._loop.run_forever()
self._loop_thread = threading.Thread(target=run_loop)
self._loop_thread.start()
def handle_option_change(self, value):
if self._provider is None:
return
if value == 'stop':
if self.stop and self._provider.current_thread is not None:
for x in threading.enumerate():
if x == self._provider.current_thread:
logger.info("Stopping Bot Execution")
elif self._provider.current_thread is not None:
logger.info("Resuming Bot Execution")
if value == 'run_now' and self.run_now:
logger.info("Forcing run now")
if self._provider.current_thread is None:
try:
self._scheduler.remove_job(self._job)
except JobLookupError:
pass
self._scheduler.add_job(self._run_main, id='cron_main_force')
else:
logger.debug("Thread is currently running")
self.run_now = False
if value == 'next_run_at' and self._allow_event_change:
try:
self._scheduler.remove_job(self._job)
except JobLookupError:
pass
self.schedule_next_run()
next_run_at = self.next_run_at
self._job = 'cron_main_at_{}'.format(next_run_at.isoformat())
self._scheduler.add_job(self._run_main, trigger='date', id=self._job,
run_date=next_run_at)
def get_provider(self):
return self._provider
def set_provider(self, provider):
self._provider = provider
def settings_modified(self, events):
self.update()
def update(self):
self._unknown_options = []
try:
tmp_data = read_json_file(self._file)
except json.decoder.JSONDecodeError:
logger.error("runtime file error reading")
return
if tmp_data is None:
self.dump()
return
for key, value, in tmp_data.items():
if key.startswith('_'):
continue
if hasattr(self, key):
setattr(self, key, value)
else:
self._unknown_options.append(key)
if len(self._unknown_options) > 0:
logger.debug("Unknown options were passed in [{}]".format(','.join(self._unknown_options)))
def dump_options(self):
tmpdict = {}
for attribute in [a for a in dir(self) if not a.startswith('__') \
and not a.startswith('_') \
and not inspect.ismethod(getattr(self, a))
and not inspect.isfunction(getattr(self, a))]:
# print(attribute, type(getattr(self,attribute)))
tmpdict[attribute] = getattr(self, attribute)
return tmpdict
def dump(self):
if not self._disable_dump:
self._watcher.stop_observer()
tmpdict = self.dump_options()
logger.debug("Dump Getting Called {}".format(tmpdict))
write_data_file(tmpdict, self._file)
self._watcher.start_observer()
# self._timeout_dump = None
def timeout_dump(self):
if self._disable_persistence:
return
if self._timeout_dump is not None:
try:
self._timeout_dump.remove()
except apscheduler.jobstores.base.JobLookupError:
pass
time = datetime.datetime.now() + datetime.timedelta(seconds=5)
self._timeout_dump = self._scheduler.add_job(self.dump, trigger='date',
run_date=time)
logger.debug("Timeout dump Scheduled")
@staticmethod
def runtime_error(message):
logger.error(
"Runtime error: {}".format(message)
)
def runtime_error_options(self, option, expecting_type, got_type):
mess = "option {} has wrong type associated with it. Fix it, no events will be notified.".format(option)
self.runtime_error(mess)
mess = "option {} expecting {} but got {}".format(option, expecting_type, got_type)
self.runtime_error(mess)
def schedule_next_run(self):
if not self._disable_persistence and self._watcher.observer:
self._watcher.stop_observer()
if self.next_run_at == datetime.datetime.fromtimestamp(default_timestamp):
self.next_run_at = datetime.datetime.now() + datetime.timedelta(seconds=5)
elif datetime.datetime.now() > self.next_run_at:
self.next_run_at = datetime.datetime.now() + datetime.timedelta(seconds=5)
else:
next_at = self.next_run_at - datetime.datetime.now()
self.next_run_at = datetime.datetime.now(
) + datetime.timedelta(seconds=next_at.total_seconds())
if not self._disable_persistence:
self._watcher.start_observer()
def determine_playthrough(self, provider):
"""
Determines the mode to run
:param provider: Provider
:return:
"""
if self.playmode == 'autoplay':
logger.info("starting auto play through")
provider.auto()
logger.info("completed auto play through")
elif self.playmode == 'guided':
logger.info("starting guided play through")
provider.guided_mode()
logger.info("guided play through interrupted")
else:
logger.critical("Unknown play through mode")
def main(self):
self._allow_event_change = False
self._run_main = self.in_main
if self._config.getboolean("bot", "startBotOnStartUp"):
self.next_run_at = datetime.datetime.now() + datetime.timedelta(seconds=1)
else:
self.schedule_next_run()
next_run_at = self.next_run_at
self._job = 'cron_main_at_{}'.format(next_run_at.isoformat())
self._scheduler.add_job(self._run_main, trigger='date', id=self._job,
run_date=next_run_at)
if not self._disable_persistence:
self._watcher.start_observer()
logger.info("Tracking {}".format(self._file))
self._allow_event_change = True
logger.info('Next run at {}'.format(self.next_run_at.isoformat()))
def in_main(self):
def thread_shutdown(_self):
_self.shutdown()
def handle_exception(_self, e):
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
logger.error(e)
logger.debug("{} {} {}".format(exc_type, fname, exc_tb.tb_lineno))
logger.debug(traceback.format_exc())
logger.critical("Provider does not have method correctly implemented cannot continue")
tt = threading.Thread(target=thread_shutdown, args=(_self))
tt.start() # (schedule_shutdown, args=(), id='shutdown')
self.active = True
self.last_run_at = datetime.datetime.now()
provider = self.get_provider()
try:
if not provider.is_process_running():
provider.start_process()
provider.wait_for_ui(30)
provider.pass_through_initial_screen(False)
else:
provider.pass_through_initial_screen(True)
provider.compare_with_back_button()
self.determine_playthrough(provider)
except NotImplementedError as ee:
handle_exception(self, ee)
return
except AttributeError as ee:
handle_exception(self, ee)
return
except TypeError as ee:
handle_exception(self, ee)
return
except Exception:
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
logger.debug("{} {} {}".format(exc_type, fname, exc_tb.tb_lineno))
logger.debug(traceback.format_exc())
return
if not self._disable_persistence:
self._watcher.stop_observer()
self._allow_event_change = False
self.next_run_at = datetime.datetime.now() + datetime.timedelta(hours=4)
next_run_at = self.next_run_at
self._allow_event_change = True
self._job = 'cron_main_at_{}'.format(next_run_at.isoformat())
self._scheduler.add_job(lambda: self._run_main, trigger='date', id=self._job,
run_date=next_run_at)
self.active = False
self.stop = False
if not self._disable_persistence:
self._watcher.start_observer()
_shutdown = False
def shutdown(self):
""" Waits for the current thread execution to become None or else will not shutdown properly"""
self._disable_dump = True # will not write to run time options
self.stop = True # signals all long_running operations to not execute, os calls will not occur either
while self._provider.current_thread is not None:
logger.warning('Waiting for bot thread to stop')
time.sleep(5)
logger.info("Shutdown started")
self._task_runner.shutdown()
self._loop.call_soon_threadsafe(self._loop.stop)
try:
self._scheduler.shutdown()
except SchedulerNotRunningError:
pass
if self._loop_thread:
self._loop_thread.join()
while self._loop.is_running():
pass
self._loop.close()
logger.info("Shutdown complete")
self._shutdown = True
def get_loop(self):
return self._loop
def __exit__(self):
self.dump() | yugioh-bot | /yugioh-bot-0.6.3.tar.gz/yugioh-bot-0.6.3/bot/duel_links_runtime.py | duel_links_runtime.py |
#############################################################################
##
# Copyright (C) 2013 Riverbank Computing Limited.
# Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
# All rights reserved.
##
# This file is part of the examples of PyQt.
##
# $QT_BEGIN_LICENSE:BSD$
# You may use this file under the terms of the BSD license as follows:
##
# "Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor
# the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
##
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
# $QT_END_LICENSE$
##
#############################################################################
import logging
import sys
import time
from enum import Enum
from PyQt5 import QtCore, QtGui
from PyQt5.QtGui import QIcon
from PyQt5.QtWidgets import (QAction, QApplication, QComboBox,
QGridLayout, QGroupBox, QHBoxLayout, QLabel, QMessageBox, QMenu, QPushButton,
QSystemTrayIcon,
QTextEdit, QVBoxLayout, QDesktopWidget, QWidget, QFrame, qApp, QTabWidget, QMainWindow)
from bot.duel_links_runtime import DuelLinkRunTime
from bot import images_qr
class WINDOWS_TASKBAR_LOCATION(Enum):
LEFT = 1
TOP = 2
RIGHT = 3
BOTTOM = 4
app_name = "Yugioh-DuelLinks Bot"
default_open_offset = 7
def mock_data(): return False
update_intervals = {
'next_run_at' : 10,
'nox_status' : 10,
'current_time': 1
}
class QtHandler(logging.Handler):
def __init__(self):
logging.Handler.__init__(self)
def emit(self, record):
record = self.format(record)
if record:
XStream.stdout().write('%s\n' % record)
class XStream(QtCore.QObject):
_stdout = None
_stderr = None
messageWritten = QtCore.pyqtSignal(str)
def flush(self):
pass
def fileno(self):
return -1
def write(self, msg):
if not self.signalsBlocked():
self.messageWritten.emit(msg)
@staticmethod
def stdout():
if (not XStream._stdout):
XStream._stdout = XStream()
sys.stdout = XStream._stdout
return XStream._stdout
@staticmethod
def stderr():
if (not XStream._stderr):
XStream._stderr = XStream()
sys.stderr = XStream._stderr
return XStream._stderr
class DuelLinksGui(QFrame, QMainWindow):
_shouldShowSystrayBox = mock_data
dlRunTime = None
def __init__(self, duelLinksRunTime=None, assets=None):
super(DuelLinksGui, self).__init__()
self.assets = assets
assert (type(duelLinksRunTime) is DuelLinkRunTime)
self.dlRunTime = duelLinksRunTime # type: DuelLinkRunTime
self.createRunTimeFields()
self.createBotControls()
self.setObjectName("BotFrame")
self.setStyleSheet("#BotFrame {border: 2px solid #9e3939;}")
self.createActions()
self.createBotActions()
self.createTrayIcon()
self.setShouldShowSystrayBox(mock_data)
self.hideButton.clicked.connect(self.close)
self.exitButton.clicked.connect(self.__quit__)
self.trayIcon.messageClicked.connect(self.messageClicked)
self.trayIcon.activated.connect(self.iconActivated)
# bot actions connected
self.pauseButton.clicked.connect(self.pause_bot)
self.runButton.clicked.connect(self.start_bot)
# log creation
textViewLog = QtHandler()
# You can format what is printed to text box
textViewLog.setFormatter(
logging.Formatter('%(asctime)s | %(levelname)s | %(name)s | %(message)s', datefmt='%Y-%m-%d %H:%M:%S'))
logging.getLogger('bot').addHandler(textViewLog)
# self.textViewLog.signal.connect(self.add_to_log)
self.tabs = QTabWidget(self)
self.tab1 = QWidget(self)
mainLayout = QVBoxLayout(self.tab1)
mainLayout.addWidget(self.runTimeGroupBox)
mainLayout.addWidget(self.botControls)
self.tab1.setLayout(mainLayout)
self.tabs.addTab(self.tab1, "General")
self.clear_log = QPushButton("Clear log")
self.tab2 = QWidget(self)
logLayout = QVBoxLayout(self.tab2)
self.textViewLog = QTextEdit(self.tab2)
self.textViewLog.setReadOnly(True)
self.clear_log.clicked.connect(self.textViewLog.clear)
XStream.stdout().messageWritten.connect(self.add_to_log)
XStream.stderr().messageWritten.connect(self.add_to_log)
logLayout.addWidget(self.textViewLog)
logLayout.addWidget(self.clear_log)
self.tab2.setLayout(logLayout)
self.tabs.addTab(self.tab2, "Log")
viewlayout = QVBoxLayout(self)
viewlayout.addWidget(self.tabs)
self.setLayout(viewlayout)
self.setIcon()
self.trayIcon.show()
self.setWindowTitle(app_name)
self.setFixedSize(400, 300)
self.setWindowFlags(QtCore.Qt.FramelessWindowHint | QtCore.Qt.Popup)
self.location_on_the_screen()
self.update_values(True)
def add_to_log(self, msg):
try:
cursor = self.textViewLog.textCursor()
src = msg.split('|')
if len(src) != 4:
self.textViewLog.append(msg)
else:
text = ""
text += "<span>"
text += "<b>{}</b>".format(src[0])
text += "<span style=\"color:blue;\">{}</span>".format(src[1])
text += src[2]
text += src[3]
text += "</span>"
cursor.insertHtml(text + "<br>")
self.textViewLog.moveCursor(QtGui.QTextCursor.End)
except Exception as e:
print('Error on updating log: ', end='')
print(e)
def location_on_the_screen(self):
ag = QDesktopWidget().availableGeometry()
sg = QDesktopWidget().screenGeometry()
widget = self.geometry()
position = self.get_task_bar_position()
if position == WINDOWS_TASKBAR_LOCATION.BOTTOM:
x = ag.width() - widget.width()
y = 2 * ag.height() - sg.height() - widget.height()
elif position == WINDOWS_TASKBAR_LOCATION.LEFT:
x = sg.width() - ag.width() + default_open_offset
y = 2 * ag.height() - sg.height() - widget.height() - default_open_offset
elif position == WINDOWS_TASKBAR_LOCATION.TOP:
x = ag.width() - widget.width() - default_open_offset
y = sg.height() - ag.height() + default_open_offset
elif position == WINDOWS_TASKBAR_LOCATION.RIGHT:
x = ag.width() - widget.width() - default_open_offset
y = 2 * ag.height() - sg.height() - widget.height() - default_open_offset
self.move(x, y)
def get_task_bar_position(self):
desktop = QDesktopWidget()
displayRect = desktop.screenGeometry()
desktopRect = desktop.availableGeometry()
if desktopRect.height() < displayRect.height():
if desktopRect.y() > displayRect.y():
return WINDOWS_TASKBAR_LOCATION.TOP
else:
return WINDOWS_TASKBAR_LOCATION.BOTTOM
else:
if desktopRect.x() > displayRect.x():
return WINDOWS_TASKBAR_LOCATION.LEFT
else:
return WINDOWS_TASKBAR_LOCATION.RIGHT
def setVisible(self, visible):
self.minimizeAction.setEnabled(visible)
self.maximizeAction.setEnabled(not self.isMaximized())
self.restoreAction.setEnabled(self.isMaximized() or not visible)
super(DuelLinksGui, self).setVisible(visible)
def closeEvent(self, event):
if self.trayIcon.isVisible():
if self.shouldShowSystrayBox():
QMessageBox.information(self, app_name,
"The program will keep running in the system tray. To "
"terminate the program, choose <b>Quit</b> in the "
"context menu of the system tray entry.")
self.hide()
event.ignore()
def setShouldShowSystrayBox(self, callback):
self._shouldShowSystrayBox = callback
def shouldShowSystrayBox(self):
self._shouldShowSystrayBox()
def setIcon(self):
icon = QIcon(QIcon(':/assets/yugioh.ico'))
self.trayIcon.setIcon(icon)
self.setWindowIcon(icon)
self.trayIcon.setToolTip('Duel-Links Bot')
def iconActivated(self, reason):
if reason in (QSystemTrayIcon.Trigger, QSystemTrayIcon.DoubleClick):
self.showNormal()
elif reason == QSystemTrayIcon.MiddleClick:
self.showNotifcation(
"In Development", "You pressed the middle mouse button.\n Hidden Feature!!!!")
def showMessage(self):
icon = QSystemTrayIcon.MessageIcon(
self.typeComboBox.itemData(self.typeComboBox.currentIndex()))
self.trayIcon.showMessage(self.titleEdit.text(),
self.bodyEdit.toPlainText(), icon,
self.durationSpinBox.value() * 1000)
def showNotifcation(self, title, message):
icon = QSystemTrayIcon.MessageIcon(
self.typeComboBox.itemData(self.typeComboBox.currentIndex()))
self.trayIcon.showMessage(title,
message, icon,
self.durationSpinBox.value() * 1000)
def messageClicked(self):
QMessageBox.information(None, "Systray",
"Sorry, I already gave what help I could.\nMaybe you should "
"try asking a human?")
def modeChange(self, index):
self.dlRunTime.playmode = self.available_modes.currentData()
def createBotControls(self):
self.botControls = QGroupBox("Controls")
controlLayout = QGridLayout()
self.runLabel = QLabel("Run the bot:")
self.modeLabel = QLabel("Current Mode:")
self.available_modes = QComboBox()
for index, mode in enumerate(self.dlRunTime._available_modes):
self.available_modes.addItem(mode.title(), mode)
self.available_modes.setStyleSheet("QComboBox {text-align: center;}")
self.available_modes.setEditable(True)
self.available_modes.lineEdit().setReadOnly(True)
self.available_modes.lineEdit().setAlignment(QtCore.Qt.AlignCenter)
self.available_modes.setCurrentIndex(self.dlRunTime._available_modes.index(self.dlRunTime.playmode))
self.available_modes.currentIndexChanged.connect(self.modeChange)
# self.available_modes.lineEdit().setAlignment(QtCore.Qt.AlignCenter)
self.runButton = QPushButton("Run")
self.showLabel = QLabel("Pause the bot:")
self.pauseButton = QPushButton("Pause")
self.exitButton = QPushButton("Exit")
self.hideButton = QPushButton("Hide")
controlLayout.addWidget(self.modeLabel, 0, 0, 1, 2)
controlLayout.addWidget(self.available_modes, 0, 2, 1, 2)
controlLayout.addWidget(self.runLabel, 1, 0)
controlLayout.addWidget(self.runButton, 1, 2, 1, 2)
controlLayout.addWidget(self.showLabel, 2, 0)
controlLayout.addWidget(self.pauseButton, 2, 2, 1, 2)
controlLayout.addWidget(self.hideButton, 3, 0, 1, 2)
controlLayout.addWidget(self.exitButton, 3, 2, 1, 2)
self.botControls.setLayout(controlLayout)
def createRunTimeFields(self):
self.runTimeGroupBox = QGroupBox("RunTime Fields")
self.current_time = QLabel("Current Time: ")
self.current_time_value = QLabel("")
self.nox_status_label = QLabel("{} status: ".format(self.dlRunTime.get_provider().__str__()))
self.nox_status_value = QLabel("")
self.next_run_at_label = QLabel("Next Run At:")
self.next_run_at_value = QLabel("")
self.in_timer = QtCore.QTimer(self)
self.in_timer.setInterval(1000)
self.in_timer.timeout.connect(self.update_values)
self.in_timer.start()
layout = QVBoxLayout()
top = QHBoxLayout()
top.addWidget(self.current_time)
top.addWidget(self.current_time_value)
top.addStretch()
runTimeLayout = QHBoxLayout()
runTimeLayout.addWidget(self.nox_status_label)
runTimeLayout.addWidget(self.nox_status_value)
runTimeLayout.addStretch()
runTimeLayout.addWidget(self.next_run_at_label)
runTimeLayout.addWidget(self.next_run_at_value)
layout.addLayout(top)
layout.addLayout(runTimeLayout)
self.runTimeGroupBox.setLayout(layout)
_counter = 0
def update_values(self, force=False):
self._counter += 1
if self._counter % update_intervals.get('current_time', 1) == 0 or force:
self.current_time_value.setText(QtCore.QDateTime.currentDateTime().toString())
if self._counter % update_intervals.get('nox_status', 1) == 0 or force:
self.nox_status_value.setText(
(lambda: "Running" if self.dlRunTime.get_provider().is_process_running() else "Off")())
if self._counter % update_intervals.get('next_run_at', 1) == 0 or force:
self.next_run_at_value.setText(self.dlRunTime.next_run_at.strftime("%Y-%m-%dT%H:%M:%S"))
if self.dlRunTime.get_provider().current_thread is not None:
self.runButton.setDisabled(False)
self.runButton.setEnabled(False)
self.pauseButton.setDisabled(True)
self.pauseButton.setEnabled(True)
else:
self.runButton.setDisabled(True)
self.runButton.setEnabled(True)
self.pauseButton.setDisabled(False)
self.pauseButton.setEnabled(False)
if self.dlRunTime._shutdown:
self.__quit__()
def createActions(self):
self.minimizeAction = QAction("Mi&nimize", self, triggered=self.hide)
self.maximizeAction = QAction("Ma&ximize", self,
triggered=self.showMaximized)
self.restoreAction = QAction("&Restore", self,
triggered=self.showNormal)
self.quitAction = QAction("&Quit", self,
triggered=self.__quit__)
def __quit__(self):
QApplication.instance().closingDown()
self.hide()
if not self.dlRunTime._shutdown:
self.dlRunTime.shutdown()
self.in_timer.stop()
self.in_timer.deleteLater()
self.close()
qApp.closeAllWindows()
time.sleep(1)
del self.dlRunTime
QApplication.instance().quit()
def createBotActions(self):
self.startAction = QAction('Start', self, triggered=self.start_bot)
self.pauseAction = QAction('Pause', self, triggered=self.pause_bot)
def start_bot(self):
self.dlRunTime.stop = False
self.dlRunTime.run_now = True
def pause_bot(self):
self.dlRunTime.stop = True
self.dlRunTime.run_now = False
def createTrayIcon(self):
self.trayIconMenu = QMenu(self)
self.trayIconMenu.addAction(self.minimizeAction)
self.trayIconMenu.addAction(self.maximizeAction)
self.trayIconMenu.addAction(self.restoreAction)
self.trayIconMenu.addSeparator()
self.trayIconMenu.addAction(self.quitAction)
self.trayIcon = QSystemTrayIcon(self)
self.trayIcon.setContextMenu(self.trayIconMenu) | yugioh-bot | /yugioh-bot-0.6.3.tar.gz/yugioh-bot-0.6.3/bot/dl_gui.py | dl_gui.py |
import datetime
import json
import h5py
import numpy as np
from bot.utils.common import DotDict
data_object = {
'next_run_at': None,
'last_run_at': None,
'runnow': False,
'stop': False
}
data_object = DotDict(data_object)
data_file_name = "run_at.json"
data_file = data_file_name
def set_data_file(file_path):
global data_file
data_file = file_path
def read_data_file(key=None):
try:
with open(data_file) as f:
data = json.load(f, object_hook=date_hook)
data = DotDict(data)
except:
data = DotDict(data_object)
if key == None:
return data
if key != None and key in data:
return data[key]
return None
def read_json_file(file=data_file):
try:
with open(file) as f:
data = json.load(f, object_hook=date_hook)
return data
except FileNotFoundError:
return None
def datetime_handler(x):
if isinstance(x, datetime.datetime):
return x.isoformat()
raise TypeError("Unknown type")
try_formats = ["%Y-%m-%dT%H:%M:%S.%f", "%Y-%m-%dT%H:%M:%S"]
def date_hook(json_dict):
for (key, value) in json_dict.items():
for try_format in try_formats:
try:
json_dict[key] = datetime.datetime.strptime(
value, try_format)
except:
pass
return json_dict
def write_data_file(data, file=data_file):
with open(file, 'w') as f:
json.dump(data, f, sort_keys=True,
indent=4, separators=(',', ': '), default=datetime_handler)
"""
Pulled from:
https://codereview.stackexchange.com/questions/120802/recursively-save-python-dictionaries-to-hdf5-files-using-h5py
"""
def save_dict_to_hdf5(dic, filename, mode='w'):
"""
....
"""
with h5py.File(filename, mode) as h5file:
recursively_save_dict_contents_to_group(h5file, '/', dic)
def recursively_save_dict_contents_to_group(h5file, path, dic):
"""
....
"""
for key, item in dic.items():
if isinstance(item, (np.ndarray, np.int64, np.float64, str, bytes)):
h5file[path + key] = item
elif isinstance(item, dict):
recursively_save_dict_contents_to_group(h5file, path + key + '/', item)
else:
raise ValueError('Cannot save %s type' % type(item))
def load_dict_from_hdf5(filename):
"""
....
"""
with h5py.File(filename, 'r') as h5file:
return recursively_load_dict_contents_from_group(h5file, '/')
def recursively_load_dict_contents_from_group(h5file, path):
"""
....
"""
ans = {}
for key, item in h5file[path].items():
if isinstance(item, h5py._hl.dataset.Dataset):
ans[key] = item.value
elif isinstance(item, h5py._hl.group.Group):
ans[key] = recursively_load_dict_contents_from_group(h5file, path + key + '/')
return ans
if __name__ == '__main__':
data = {'x': 'astring',
'y': np.arange(10),
'd': {'z': np.ones((2, 3)),
'b': b'bytestring'}}
print(data)
filename = 'test.h5'
save_dict_to_hdf5(data, filename)
dd = load_dict_from_hdf5(filename)
print(dd) | yugioh-bot | /yugioh-bot-0.6.3.tar.gz/yugioh-bot-0.6.3/bot/utils/data.py | data.py |
import logging
import os
import time
from abc import abstractmethod
import watchdog
from watchdog.events import LoggingEventHandler
from watchdog.events import PatternMatchingEventHandler
from watchdog.observers import Observer
class WatchFile(PatternMatchingEventHandler):
root = logging.getLogger("bot.watcher")
def on_moved(self, event):
super(WatchFile, self).on_moved(event)
self.root.debug("File %s was just moved" % event.src_path)
def on_created(self, event):
super(WatchFile, self).on_created(event)
self.root.debug("File %s was just created" % event.src_path)
def on_deleted(self, event):
super(WatchFile, self).on_deleted(event)
self.root.debug("File %s was just deleted" % event.src_path)
def on_modified(self, event):
super(WatchFile, self).on_modified(event)
self.root.debug("File %s was just modified" % event.src_path)
def on_any_event(self, event):
super(WatchFile, self).on_modified(event)
self.notify_event(event)
def notify_event(self, event):
self.root.debug(str(event))
self.event_notification(event)
@abstractmethod
def event_notification(self, event):
raise NotImplementedError("event notification not ready")
class SyncWithFile(WatchFile):
_observer = None
def __init__(self, file, auto_start=False):
self.watcher = WatchFile(patterns=[file])
self.watcher.event_notification = self.event_notification
self.file_observing = file
if auto_start:
self.start_observer()
@property
def observer(self):
return self._observer
@observer.setter
def observer(self, observer):
self._observer = observer
_file_observing = None
@property
def file_observing(self):
return self._file_observing
@file_observing.setter
def file_observing(self, value):
self._file_observing = value
def event_notification(self, event):
""" HANDLES ROUTING OF WATCHDOG EVENT TYPES, SOME EDITORS MOVE TO TEMP FILES TO WRITE"""
# TODO LP Investigate other possible file modifications
if isinstance(event, watchdog.events.FileModifiedEvent):
self.settings_modified(event)
elif isinstance(event, watchdog.events.FileMovedEvent):
if event.dest_path == self.file_observing:
self.settings_modified(event)
else:
self.root.debug("Event type {} is not handled".format(type(event)))
@abstractmethod
def settings_modified(self, events):
raise NotImplementedError("settings_modified not implemented")
def start_observer(self):
self.observer = Observer()
self.observer.schedule(self.watcher, os.path.dirname(self.file_observing), recursive=False)
self.observer.start()
def stop_observer(self):
if 'stop' in dir(self.observer):
self.observer.stop()
if __name__ == "__main__":
data_file = r"D:\Sync\OneDrive\Yu-gi-oh_bot\run_at.json"
syncer = SyncWithFile(data_file, auto_start=True)
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
syncer.stop_observer()
syncer.observer.join() | yugioh-bot | /yugioh-bot-0.6.3.tar.gz/yugioh-bot-0.6.3/bot/utils/watcher.py | watcher.py |
import asyncio
import datetime
import logging
import threading
import subprocess
import apscheduler
import cv2
from bot.duel_links_runtime import DuelLinkRunTime
from bot.providers import trainer_matches as tm
from bot.providers.duellinks import DuelLinks, LOW_CORR, DuelError, alpha_numeric, DuelLinksInfo
from bot.providers.misc import Misc
from bot.providers.actions import Actions
from bot.common import crop_image, mask_image
from bot.modes import battle_modes
from bot.modes.SpecialEvents import StreetReplay, RankedDuelsQuickStart
class Provider(DuelLinks, Misc, Actions):
scheduler = None
current_job = None # indicates the current job running
lock = None
# indicates that this provider is currently battling
current_battle = None
# logger
root = logging.getLogger("bot.provider")
assets = None
predefined = None
def __init__(self, scheduler, config, run_time):
self.scheduler = scheduler
self._config = config
self.assets = config.get('locations', 'assets')
self.lock = None
self.run_time = run_time # type: DuelLinkRunTime
self.battle_modes = [x(self) for x in battle_modes]
self.check_events = [
StreetReplay(self),
RankedDuelsQuickStart(self)
]
def auto(self):
t = threading.currentThread()
self.register_thread(t)
self.root.info("starting auto run through")
for x in range(0, 8):
if self.run_time.stop:
# Leaves a checkpoint when stopped
self.current_run = x
break
self.root.debug("Run through {}".format(x + 1))
self.compare_with_back_button()
self.wait_for_ui(1)
self.swipe_right()
try:
self.scan()
except Exception as e:
self.register_thread(None)
raise e
self.register_thread(None)
def battle_mode(self, battle, version, info):
img = self.get_img_from_screen_shot(True)
for mode in self.battle_modes:
if mode.check_battle(info, img):
mode.start(battle, version, info)
break
def debug_battle(self):
self.battle(check_battle=self.check_battle)
# self.CheckBattle()
def __check_battle_is_running__(self):
self.root.info("CHECKING AUTO DUEL STATUS")
img = self.get_img_from_screen_shot()
status = self.determine_autoduel_status(img)
self.root.info("AUTO_DUEL STATUS: {}".format(status))
if not status and self.current_battle:
self.click_auto_duel()
self.check_battle()
def check_battle(self, signal_done=False, delay=5):
self.lock.acquire()
try:
self.scheduler.remove_job(self.current_job)
except apscheduler.jobstores.base.JobLookupError:
if signal_done:
self.current_battle = False
self.lock.release()
return
when = datetime.datetime.now() + datetime.timedelta(seconds=delay)
job_id = 'cron_check_battle_at_%s' % (when.isoformat())
self.current_job = job_id
self.scheduler.add_job(self.__check_battle_is_running__, trigger='date',
id=job_id, run_date=when)
self.lock.release()
self.root.debug(job_id)
def is_street_replay(self):
img = self.get_img_from_screen_shot()
street_replay = self.predefined.street_replay
img = crop_image(img, **street_replay)
word = self.img_to_string(img, "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789")
if 'street' in word or 'replay' in word.lower():
return True
return False
def get_current_page(self, img):
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
area = crop_image(img, **self.predefined.page_area)
area = mask_image([254], [255], area)
height, width = area.shape
current_page = 0
for x in range(4):
box = crop_image(area, (x * width / 4), 0, ((x + 1) * width / 4), height)
if cv2.countNonZero(box) > 0:
current_page = x
break
return current_page + 1
def guided_mode(self):
t = threading.currentThread()
self.register_thread(t)
while True:
if self.run_time.stop:
break
try:
battle, version = self.verify_battle(log=False)
if battle:
dl_info = DuelLinksInfo(None, None, None, "Starting Battle")
self.current_battle = True
self.root.info("Guided mode on")
self.scan_for_ok(LOW_CORR)
self.battle_mode(battle, version, dl_info)
self.current_battle = False
except DuelError:
self.wait_for_ui(1)
except Exception as e:
self.register_thread(None)
raise e
self.register_thread(None)
def possible_battle_points(self):
if self.run_time.stop:
self.root.info("Received Stopping signal")
return
img = self.get_img_from_screen_shot()
t = tm.Trainer(img)
t.capture_white_circles()
current_page = self.get_current_page(img)
logging.debug("Current-Page {}".format(current_page))
for x, y in t.circlePoints:
if self.run_time.stop:
self.root.info("Received Stopping signal")
break
yield x, y, current_page
def special_events(self, dl_info: DuelLinksInfo):
img = self.get_img_from_screen_shot(True)
for event in self.check_events:
if event.event_condition(dl_info, img):
event.event_occurred(dl_info, img)
def scan(self):
raise NotImplementedError("scan not implemented")
def verify_battle(self, ori_img=None, log=True):
if log:
self.root.info("Verifying battle")
if ori_img is None:
ori_img = self.get_img_from_screen_shot()
img = crop_image(ori_img, **self.predefined.auto_duel_location_pre)
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
word = self.img_to_string(img, alpha_numeric).lower()
if word.startswith("auto") or 'auto' in word:
pointer = self.predefined.duel_variant_version('v2-autoduel')
return pointer, 2
img = crop_image(ori_img, **self.predefined.duel_location_pre)
word = self.img_to_string(img, alpha_numeric).lower()
if word.startswith("due") or word == "duel":
pointer = self.predefined.duel_variant_version('v1')
return pointer, 1
if log:
self.root.debug("No Auto-Duel button or Button Found")
self.root.critical("Cannot find the auto-duel button")
raise DuelError("Auto Duel Button failed comparison test")
def wait_for_auto_duel(self):
self.root.debug("WAITING FOR AUTO-DUEL TO APPEAR")
word = ''
while 'Auto-Duel' not in word and 'AutoDuel' not in word and not self.run_time.stop:
img = self.get_img_from_screen_shot()
area = img.crop(self.auto_duel_box)
try:
word = Provider.img_to_string(area, "Auto-Duel")
except:
self.wait_for_ui(1)
continue
self.wait_for_ui(.5)
self.click_auto_duel()
def wait_for_white_bottom(self, tryScanning=False):
self.root.debug("WAITING FOR WHITE BOTTOM TO APPEAR")
img = self.get_img_from_screen_shot()
b = self.check_if_battle(img)
while not b and not self.run_time.stop:
if tryScanning:
found = self.scan_for_ok(LOW_CORR)
if found:
self.wait_for_ui(.5)
img = self.get_img_from_screen_shot()
b = self.check_if_battle(img)
if b:
break
self.wait_for_ui(1)
self.root.debug("White Bottom Found")
def wait_for_ui(self, amount):
if not self.run_time.stop:
super(Provider, self).wait_for_ui(amount)
def do_system_call(self, command):
if not self.run_time.stop:
CREATE_NO_WINDOW = 0x08000000
subprocess.call(command, shell=True, creationflags=CREATE_NO_WINDOW)
@staticmethod
def img_to_string(img, char_set=None, mask_area=None):
if mask_area is not None:
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
lower, upper = mask_area[0], mask_area[1]
img = mask_image(lower, upper, img)
cv2.imwrite("tmp\\ocr.png", img)
command = "bin\\tess\\tesseract.exe --tessdata-dir bin\\tess\\tessdata tmp\\ocr.png tmp\\ocr "
if char_set is not None:
command += "-c tessedit_char_whitelist=" + char_set + " "
command += "-psm 7 "
command += "> nul 2>&1"
CREATE_NO_WINDOW = 0x08000000
subprocess.call(command, shell=True, creationflags=CREATE_NO_WINDOW)
# Get the largest line in txt
with open("tmp\\ocr.txt") as f:
content = f.read().splitlines()
output_line = ""
for line in content:
line = line.strip()
if len(line) > len(output_line):
output_line = line
return output_line
def __generic_wait_for__(self, message, condition_check, fn, *args, **kwargs):
self.root.info("Waiting for {}".format(message))
timeout = kwargs.get('timeout', 10)
throwException = kwargs.get('throw', True)
async def wait_for(self):
exceptions_occurred = 0
while not self.run_time.stop:
try:
condition = fn(*args, **kwargs)
except Exception:
if exceptions_occurred > 5:
if throwException:
raise Exception("Maximum exception count occurred waiting for {}".format(message))
return False
exceptions_occurred += 1
await self.async_wait_for_ui(1)
continue
if condition_check(condition):
break
await self.async_wait_for_ui(2)
return True
async def main(self):
return await wait_for(self)
loop = self.run_time.get_loop()
future = asyncio.run_coroutine_threadsafe(main(self), loop)
return future.result(timeout)
def __wrapper_kmeans_result__(self, trainer, location, corr, info=None):
if trainer.get_matches(location, corr):
x, y = trainer.kmeans.cluster_centers_[0]
if info:
self.root.info("NPC Battle Mode,Points: ({},{}) at location: ({}), message: {}".format(
info.x, info.y, info.page, info.status
))
self.tap(x, y)
return True
return False | yugioh-bot | /yugioh-bot-0.6.3.tar.gz/yugioh-bot-0.6.3/bot/providers/provider.py | provider.py |
import time
from abc import abstractmethod
import asyncio
from bot.shared import *
class Event(object):
_name = None
_args = []
_kwargs = {}
def __init__(self, func, *args, **kwargs):
self._name = func
self._args = args
self._kwargs = kwargs
@property
def func(self):
return self._name
@property
def args(self):
return self._args
@property
def kwargs(self):
return self._kwargs
class DuelLinksInfo(object):
_x = None
_y = None
_page = None
_status = None
_name = None
def __init__(self, x, y, page, status):
self._x = x
self._y = y
self._page = page
self._status = status
@property
def x(self):
return self._x
@property
def y(self):
return self._y
@property
def page(self):
return self._page
@property
def status(self):
return self._status
@property
def name(self):
return self._name
@x.setter
def x(self, value):
self._x = value
@y.setter
def y(self, value):
self._y = value
@page.setter
def page(self, value):
self._page = value
@status.setter
def status(self, value):
self._status = value
@name.setter
def name(self, value):
self._name = value
class EventExecutor(object):
def __init__(self):
pass
def do_event(self, _event):
exists = getattr(self, _event.name, False)
if exists:
func = getattr(self, _event.name)
if not callable(func):
return False
func(**_event.args)
return True
return False
class DuelLinks(object):
_thread = None
@property
def current_thread(self):
return self._thread
@current_thread.setter
def current_thread(self, thread):
self.register_thread(thread)
def register_thread(self, thread):
self._thread = thread
_auto_duel_box = None
@property
def auto_duel_box(self):
"Determines the location of where the auto duel button is"
return self._auto_duel_box
_current_run = 0
@property
def current_run(self):
return self._current_run
@current_run.setter
def current_run(self, run):
self._current_run = run
_sleep_factor = 1
@property
def sleep_factor(self):
return self._sleep_factor
@sleep_factor.setter
def sleep_factor(self, value):
self._sleep_factor = value
def wait_for_ui(self, amount):
time.sleep(amount * self.sleep_factor)
async def async_wait_for_ui(self, amount):
await asyncio.sleep(amount * self.sleep_factor)
@abstractmethod
def auto(self):
raise NotImplementedError("auto not implemented")
@abstractmethod
def debug_battle(self):
"Helps to debug when battle it not going right"
raise NotImplementedError("debug_battle not implemented")
@abstractmethod
def check_battle_is_running(self):
raise NotImplementedError("check_battle_is_running not implemented")
@abstractmethod
def check_battle(self):
raise NotImplementedError("check_battle not implemented")
@abstractmethod
def scan(self):
raise NotImplementedError("scan not implemented")
@abstractmethod
def method_name(self):
raise NotImplementedError("method_name not implemented")
@abstractmethod
def compare_with_cancel_button(self, corr=HIGH_CORR, info=None, img=None):
raise NotImplementedError("compare_with_cancel_button not implemented")
@abstractmethod
def compare_with_back_button(self, corr=HIGH_CORR, info=None, img=None):
raise NotImplementedError("compare_with_back_button not implemented")
@abstractmethod
def scan_for_ok(self, corr=HIGH_CORR, info=None, img=None):
raise NotImplementedError("scan_for_ok not implemented")
@abstractmethod
def scan_for_close(self, corr=HIGH_CORR, info=None, img=None):
raise NotImplementedError("scan_for_close not implemented")
@abstractmethod
def get_current_page(self, img):
raise NotImplementedError("get_current_image not implemented")
@abstractmethod
def click_auto_duel(self):
raise NotImplementedError("click_auto_duel not implemented")
@abstractmethod
def determine_autoduel_status(self, img):
raise NotImplementedError("determine_autoduel_status not implemented")
@abstractmethod
def battle(self, info=None, check_battle=None):
raise NotImplementedError("battle not implemented")
@abstractmethod
def check_if_battle(self, img):
raise NotImplementedError("check_if_battle not implemented")
@abstractmethod
def verify_battle(self, img=None, log=True):
raise NotImplementedError("verify_battle not implemented")
@abstractmethod
def pass_through_initial_screen(self):
raise NotImplementedError("pass_through_initial_screen not implemented")
@abstractmethod
def wait_for(self, word, try_scanning=False):
raise NotImplementedError("wait_for not implemented")
@abstractmethod
def wait_for_auto_duel(self):
raise NotImplementedError("wait_for_auto_duel not implemented")
@abstractmethod
def wait_for_white_bottom(self):
raise NotImplementedError("wait for white bottom not implemented")
class DuelError(Exception):
"""Exception raised for errors in the in duel Links"""
def __init__(self, value):
self.value = value
def __str__(self):
return self.value | yugioh-bot | /yugioh-bot-0.6.3.tar.gz/yugioh-bot-0.6.3/bot/providers/duellinks.py | duellinks.py |
import numpy as np
import cv2
from matplotlib import pyplot as plt
import os
from bot.common import mask_image
os.environ['FOR_DISABLE_CONSOLE_CTRL_HANDLER'] = 'T'
from sklearn.cluster import KMeans
from sklearn.metrics.pairwise import euclidean_distances
class Trainer(object):
_debug = False
def __init__(self, query, x=0, y=0):
self.query = query
self.xThreshold = x
self.yThreshold = y
if type(query) is np.ndarray:
self.query = query
else:
self.query = cv2.imread(query, 0)
self.goodMatches = []
self.images = []
self.circlePoints = []
self.kmeans = None
self.white_query = None
def get_matches(self, train, corr):
train_img = cv2.imread(train, 0)
query_img = self.query
# Initiate SIFT detector
sift = cv2.xfeatures2d.SIFT_create()
# find the keypoints and descriptors with SIFT
kp1, des1 = sift.detectAndCompute(train_img, None)
kp2, des2 = sift.detectAndCompute(query_img, None)
if des1 is None or des2 is None:
return False
# create BFMatcher object
bf = cv2.BFMatcher()
try:
matches = bf.knnMatch(des1, des2, k=2)
except cv2.error:
return False
good_matches = []
cluster = []
for m, n in matches:
img2_idx = m.trainIdx
img1_idx = m.queryIdx
(x1, y1) = kp1[img1_idx].pt
(x2, y2) = kp2[img2_idx].pt
# print("Comare %d to %d and %d to %d" % (x1,x2,y1,y2))
if m.distance < 0.8 * n.distance and y2 > self.yThreshold and x2 < self.xThreshold:
good_matches.append([m])
cluster.append([int(x2), int(y2)])
if len(cluster) <= corr:
return False
self.kmeans = KMeans(n_clusters=1, random_state=0).fit(cluster)
new_cluster, new_matches = self.compare_distances(train_img, cluster, good_matches)
if len(new_cluster) == 0 or len(new_cluster) / len(cluster) < .5:
return False
img3 = cv2.drawMatchesKnn(
train_img, kp1, query_img, kp2, new_matches, None, flags=2)
if self._debug:
self.images.append(img3)
self.debug_matcher(img3)
return True
def compare_distances(self, train_img, cluster, good_matches):
# sometimes the sift algorithm matches random points on screen so therefore
# it is necessary to determine the euclidean distances between these points
distances = euclidean_distances([self.kmeans.cluster_centers_[0]], cluster)
height, width = train_img.shape
new_cluster = []
new_matches = []
# If all the points are greater than np.sqrt((width / 2) ** 2 + (height / 2) ** 2)
# Which then we can assume that they are not correct
# this will only work on images that fit the same dimensions against the query image
for index, distance in enumerate(distances[0]):
if distance <= np.sqrt((width / 2) ** 2 + (height / 2) ** 2):
new_cluster.append(cluster[index])
new_matches.append(good_matches[index])
return new_cluster, new_matches
def debug_matcher(self, img):
# plt.scatter(*zip(*cluster)),plt.axis([0,480,0,800]),plt.gca().invert_yaxis(),plt.show()
# plt.imshow(img)
# plt.show()
cv2.imwrite('debug_pic.png', img)
def read_captured_circles(self):
img = cv2.cvtColor(self.query, cv2.COLOR_BGR2GRAY)
img = cv2.medianBlur(img, 7)
cimg = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR)
circles = cv2.HoughCircles(img, cv2.HOUGH_GRADIENT, 1, 30,
param1=50, param2=30, minRadius=20, maxRadius=50)
if circles is None:
return
circles = np.uint16(np.around(circles))
for i in circles[0, :]:
if i[1] < 400:
continue
self.circlePoints.append((i[0], i[1]))
if self._debug:
self.draw_circles(circles, cimg)
def capture_white_circles(self, x_limit=480, y_limit=670):
self.prep_for_white_circles()
img = cv2.cvtColor(self.white_query, cv2.COLOR_BGR2GRAY)
cimg = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR)
circles = cv2.HoughCircles(img, cv2.HOUGH_GRADIENT, 1, 40,
param1=50, param2=30, minRadius=5, maxRadius=60)
if circles is None:
return
circles = np.uint16(np.around(circles))
for i in circles[0, :]:
if i[0] <= x_limit and i[1] <= y_limit:
self.circlePoints.append((i[0], i[1]))
if self._debug:
self.draw_circles(circles, cimg)
def draw_circles(self, circles, cimg):
for i in circles[0, :]:
# draw the outer circle
cv2.circle(cimg, (i[0], i[1]), i[2], (0, 255, 0), 2)
# draw the center of the circle
cv2.circle(cimg, (i[0], i[1]), 2, (0, 0, 255), 3)
self.images.append(cimg)
def prep_for_white_circles(self):
lower, upper = ([215, 215, 215], [255, 255, 255])
self.white_query = mask_image(lower, upper, self.query, apply_mask=True)
def compare(self):
if len(self.images) > 0:
plot_image = self.images[0]
for x in range(1, len(self.images)):
plot_image = np.concatenate((plot_image, self.images[x]), axis=1)
plt.imshow(plot_image), plt.show()
@staticmethod
def show_area(x, y, image):
if type(image) is np.ndarray:
pass
else:
image = cv2.imread(image)
h, w, d = image.shape
image = image[0:max(0, (h - y) - 1), 0:max(x, 0)]
plt.imshow(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)), plt.show()
class BoundingTrainer(Trainer):
def __init__(self, query, x=0, y=0, w=0, h=0, bounding_area=None, blacklist=None):
if bounding_area is not None:
x, y, w, h = bounding_area.get('left'), bounding_area.get('top'), \
bounding_area.get('width'), bounding_area.get('height')
super(BoundingTrainer, self).__init__(query, x, y)
self.xThreshold_lower = self.xThreshold
self.yThreshold_lower = self.xThreshold
self.blacklist = None
if w is None:
self.xThreshold_upper = self.query.shape[1]
else:
self.xThreshold_upper = x + w
if h is None:
self.yThreshold_upper = self.query.shape[0]
else:
self.yThreshold_upper = y + h
def in_box(self, x, y):
if self.xThreshold_lower <= x <= self.xThreshold_upper:
if self.yThreshold_lower <= y <= self.yThreshold_upper:
return True
return False
def in_blacklist(self, x, y):
if self.blacklist:
for exclude in self.blacklist:
cx, cy, cw, ch = exclude.get('left'), exclude.get('top'), \
exclude.get('width'), exclude.get('height')
if cx <= x <= cx + cw:
if cx <= y <= cy + ch:
return True
return False
def get_matches(self, train, corr):
train_img = cv2.imread(train, 0)
query_img = self.query
# Initiate SIFT detector
sift = cv2.xfeatures2d.SIFT_create()
# find the keypoints and descriptors with SIFT
kp1, des1 = sift.detectAndCompute(train_img, None)
kp2, des2 = sift.detectAndCompute(query_img, None)
if des1 is None or des2 is None:
return False
# create BFMatcher object
bf = cv2.BFMatcher()
try:
matches = bf.knnMatch(des1, des2, k=2)
except cv2.error:
return False
good_matches = []
cluster = []
for m, n in matches:
img2_idx = m.trainIdx
img1_idx = m.queryIdx
(x1, y1) = kp1[img1_idx].pt
(x2, y2) = kp2[img2_idx].pt
# print("Comare %d to %d and %d to %d" % (x1,x2,y1,y2))
if m.distance < 0.8 * n.distance and self.in_box(x2, y2):
good_matches.append([m])
cluster.append([int(x2), int(y2)])
if len(cluster) <= corr:
return False
self.kmeans = KMeans(n_clusters=1, random_state=0).fit(cluster)
new_cluster, new_matches = self.compare_distances(train_img, cluster, good_matches)
if len(new_cluster) == 0 or len(new_cluster) / len(cluster) < .5:
return False
img3 = cv2.drawMatchesKnn(
train_img, kp1, query_img, kp2, new_matches, None, flags=2)
if self._debug:
self.images.append(img3)
self.debug_matcher(img3)
return True
def capture_white_circles(self):
self.prep_for_white_circles()
img = cv2.cvtColor(self.white_query, cv2.COLOR_BGR2GRAY)
img = cv2.medianBlur(img, 1)
cimg = cv2.cvtColor(self.query, cv2.COLOR_BGR2RGB)
circles = cv2.HoughCircles(img, cv2.HOUGH_GRADIENT, 1, img.shape[0] / 15,
param1=50, param2=22, minRadius=5, maxRadius=60)
if circles is None:
return
circles = np.uint16(np.around(circles))
new_circles = []
for i in circles[0, :]:
if self.in_box(i[0], i[1]) and not self.in_blacklist(i[0], i[1]):
self.circlePoints.append((i[0], i[1]))
new_circles.append(i)
if self._debug:
# self.draw_circles(circles, cimg)
if len(new_circles) > 0:
self.draw_circles(np.array([new_circles]), cimg)
@staticmethod
def show_area(x, y, w, h, image):
if type(image) is np.ndarray:
pass
else:
image = cv2.imread(image)
if h is None:
h = image.shape[0]
if w is None:
w = image.shape[1]
image = image[y:y + h, x:x + w]
plt.imshow(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)), plt.show()
@staticmethod
def show_area_bounded(bounding_area, image):
return BoundingTrainer.show_area(bounding_area.get('left'),
bounding_area.get('top'),
bounding_area.get('width'),
bounding_area.get('height'),
image) | yugioh-bot | /yugioh-bot-0.6.3.tar.gz/yugioh-bot-0.6.3/bot/providers/trainer_matches.py | trainer_matches.py |
import os
from abc import abstractmethod
from datetime import datetime
import cv2
import numpy as np
from bot import default_timestamp
from bot.utils.data import load_dict_from_hdf5, save_dict_to_hdf5
class Predefined(object):
_config = None
dataset = None
version = None
assets = None
def __init__(self, config, version):
self._config = config
self.cache_file = config.get('locations', 'cache_file')
self.dataset = self.dataset or self.__class__.__name__
self.assets = config.get('locations', 'assets')
self.version = version
self.get_cache()
self.check_cache()
_cache = None
_last_read = datetime.fromtimestamp(default_timestamp)
@property
def cache(self):
return self._cache
@cache.setter
def cache(self, value):
self._last_read = datetime.now()
self._cache = value
def check_cache(self):
pass
def get_cache(self):
if not os.path.exists(self.cache_file):
self.generate()
if self.cache is None:
self.cache = load_dict_from_hdf5(self.cache_file)
if self.dataset in self.cache.keys():
return
self.generate()
_duel_varient = None
@property
def duel_variant(self):
raise NotImplementedError("Class {} did not implement duel variant property".format(self.__class__.__name__))
_auto_duel = None
@property
def autoduel(self):
raise NotImplementedError("Class {} did not implement auto duel property".format(self.__class__.__name__))
# TODO: IMPLEMENT METHOD TO DETERMINE THE ACCURACY OR THE LIKELHOOD THAT THIS IS AN AUTODUEL BUTTON
def determine_autoduel_status(self, img):
vals = self.cache.get(self.dataset)
autodueloff = vals['auto_duel_off']
autoduelon = vals['auto_duel_on']
current = self.get_image_stats(img, **self.autoduel)
dist1 = np.linalg.norm(current - autoduelon)
dist2 = np.linalg.norm(current - autodueloff)
if dist1 < dist2:
return True
return False
def determine_duel_variant(self, img):
vals = self.cache.get(self.dataset)
ver_duel_variant = vals['duel_variant']
edges = cv2.Canny(img, 240, 255)
current = Predefined.get_image_stats(edges, **self.duel_variant)
dist1 = np.linalg.norm(current - ver_duel_variant)
if dist1 <= 5:
return True
return False
@staticmethod
def get_image_stats(img, left=0, top=0, width=0, height=0):
crop_img = img[top:(top + height), left:(left + width)]
(means, stds) = cv2.meanStdDev(crop_img)
stats = np.concatenate([means, stds]).flatten()
return stats
def write_hdf5(self, data, dataset):
data = {dataset: data}
save_dict_to_hdf5(data, self.cache_file, mode='a')
@abstractmethod
def generate(self):
raise NotImplementedError("Class {} did not implement generate".format(self.__class__.__name__))
@property
def street_replay_location(self):
return 4
@property
def quick_rankduel_location(self):
return 2 | yugioh-bot | /yugioh-bot-0.6.3.tar.gz/yugioh-bot-0.6.3/bot/providers/predefined.py | predefined.py |
import base64
import concurrent.futures
import logging
import os
import subprocess
import sys
import win32gui
import cv2
import deprecation
import numpy as np
from skimage.measure import compare_ssim
from bot import clean_version
from bot.common import loop_scan, mask_image
from bot.providers import trainer_matches as tm
from bot.providers.duellinks import DuelLinksInfo, DuelError
from bot.providers.nox.predefined import NoxPredefined
from bot.providers.provider import Provider
from bot.shared import *
class Nox(Provider):
NotPath = None
_debug = False
def __init__(self, scheduler, config, run_time):
super(Nox, self).__init__(scheduler, config, run_time)
self.predefined = NoxPredefined(self._config, nox_current_version)
self.NoxPath = os.path.join(self._config.get('nox', 'location'), 'Nox.exe')
def swipe_time(self, x1, y1, x2, y2, time_amount):
command = "bin\\adb.exe shell input swipe %d %d %d %d %d" % (
x1, y1, x2, y2, time_amount)
self.do_system_call(command)
def swipe_right(self, time_sleep=0):
self.swipe(0, 500, 100, 500)
self.wait_for_ui(2)
def swipe(self, x1, y1, x2, y2):
command = "bin\\adb.exe shell input swipe %d %d %d %d " % (x1, y1, x2, y2)
self.do_system_call(command)
def take_png_screenshot(self):
while True:
try:
command = "bin\\adb.exe shell \"screencap -p | busybox base64\""
pcommand = os.popen(command)
png_screenshot_data = pcommand.read()
png_screenshot_data = base64.b64decode(png_screenshot_data)
pcommand.close()
break
except KeyboardInterrupt:
sys.exit(0)
except:
print("[!] Failed to get screen")
return png_screenshot_data
def tap(self, x, y):
self.root.debug("Tapping at location ({},{})".format(int(x), int(y)))
command = "bin\\adb.exe shell input tap %d %d" % (int(x), int(y))
if self._debug:
# Helper to debug taps
input("waiting for confirmation press enter")
self.do_system_call(command)
def key_escape(self):
command = "bin\\adb.exe shell input keyevent 4"
self.do_system_call(command)
root = logging.getLogger("bot.provider.Nox")
@staticmethod
def __str__():
return "Nox"
def wait_for(self, word, try_scanning=False):
self.root.info("WAITING FOR {} BUTTON TO APPEAR".format(word))
ok = ''
while ok != word and not self.run_time.stop:
# root.debug("waiting for {}".format(word))
img = self.get_img_from_screen_shot()
img = img[745:770, 210:270]
try:
if try_scanning:
self.scan_for_ok(LOW_CORR)
ok = self.img_to_string(img, alphabet)
except:
self.wait_for_ui(1)
continue
if ok == word:
break
self.wait_for_ui(2)
def __is_initial_screen__(self, *args, **kwargs):
original = cv2.imread(os.path.join(self.assets, "start_screen.png"))
against = self.get_img_from_screen_shot()
# convert the images to grayscale
original = mask_image([127], [255], cv2.cvtColor(original, cv2.COLOR_BGR2GRAY), True)
against = mask_image([127], [255], cv2.cvtColor(against, cv2.COLOR_BGR2GRAY), True)
(score, diff) = compare_ssim(original, against, full=True)
if score > .9:
return True
return False
def __start_app__(self):
command = "bin\\adb.exe shell monkey -p jp.konami.duellinks -c android.intent.category.LAUNCHER 1"
self.do_system_call(command)
def pass_through_initial_screen(self, already_started=False):
self.__start_app__()
if not already_started:
self.root.info("Passing Through Start Screen")
else:
self.root.info("Checking for Start Screen")
try:
is_home_screen = self.__generic_wait_for__('DuelLinks Landing Page', lambda x: x is True,
self.__is_initial_screen__, timeout=4, throw=False)
except concurrent.futures.TimeoutError:
is_home_screen = False
if not is_home_screen:
return
self.__generic_wait_for__('DuelLinks Landing Page', lambda x: x is True,
self.__is_initial_screen__, timeout=20)
self.tapnsleep(self.predefined.yugioh_initiate_link, 2)
timeout = 45
if self.scan_for_download():
timeout = 480
self.__generic_wait_for__('Notifications Page', lambda x: x is True, self.wait_for_notifications,
timeout=timeout)
self.wait_for_notifications()
def wait_for_notifications(self, *args, **kwargs):
self.scan_for_close()
self.wait_for_ui(1)
self.scan_for_ok()
self.wait_for_ui(3)
t = self.compare_with_back_button(corr=5)
return t
def scan_for_download(self, corr=HIGH_CORR, info=None):
corrword = 'HIGH' if corr == HIGH_CORR else 'LOW'
self.root.debug("Looking for Download Button, {} CORRERLATION".format(corrword))
img = self.get_img_from_screen_shot()
t = tm.Trainer(img, 480, 0)
location = os.path.join(self.assets, "download_button.png")
return self.__wrapper_kmeans_result__(t, location, corr, info)
def scan_for_close(self, corr=HIGH_CORR, info=None, img=None):
corrword = 'HIGH' if corr == HIGH_CORR else 'LOW'
self.root.debug("LOOKING FOR CLOSE BUTTON, {} CORRERLATION".format(corrword))
if img is None:
img = self.get_img_from_screen_shot()
t = tm.Trainer(img, 400, 500)
location = os.path.join(self.assets, "close.png")
return self.__wrapper_kmeans_result__(t, location, corr, info)
def method_name(self):
super(Nox, self).method_name()
def start_process(self):
try:
self.root.info("Starting Nox...")
process = subprocess.Popen([self.NoxPath], stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
except FileNotFoundError as e:
self.root.critical("Nox executable not found")
raise e
except:
self.root.error("The program can't run Nox")
raise NotImplementedError
def is_process_running(self):
try:
if win32gui.FindWindow(None, "Nox") or win32gui.FindWindow(None, "NoxPlayer"):
return True
except:
return False
def compare_with_cancel_button(self, corr=HIGH_CORR, info=None, img=None):
corrword = 'HIGH' if corr == HIGH_CORR else 'LOW'
self.root.debug("LOOKING FOR CANCEL BUTTON, {} CORRERLATION".format(corrword))
img = self.get_img_from_screen_shot()
t = tm.Trainer(img, 240, 0)
location = os.path.join(self.assets, "cancel_button.png")
return self.__wrapper_kmeans_result__(t, location, corr, info)
def compare_with_back_button(self, corr=HIGH_CORR, info=None, img=None):
corrword = 'HIGH' if corr == HIGH_CORR else 'LOW'
self.root.debug("LOOKING FOR BACK BUTTON, {} CORRERLATION".format(corrword))
if img is None:
img = self.get_img_from_screen_shot()
t = tm.Trainer(img, 150, 720)
location = os.path.join(self.assets, "back__.png")
return self.__wrapper_kmeans_result__(t, location, corr, info)
def click_auto_duel(self):
self.root.debug("AUTO-DUEL FOUND CLICKING")
self.wait_for_ui(.1)
self.tap(356, 85)
@deprecation.deprecated(deprecated_in="0.5.0", removed_in="0.6.0", current_version=clean_version,
details="Battle Modes are now defined separate from the provider")
def battle(self, info=None, check_battle=False):
"The main battle mode"
if check_battle:
self.wait_for_auto_duel()
self.click_auto_duel()
self.wait_for('OK')
if info:
info.status = "Battle Ended"
self.root.info("NPC Battle Mode,Points: ({},{}) at location: ({}), message: {}".format(
info.x, info.y, info.page, info.status
))
self.wait_for_ui(.5)
self.tap(*self.predefined.button_duel)
self.wait_for('NEXT', True)
self.tapnsleep(self.predefined.button_duel, .5)
self.wait_for('NEXT', True)
self.wait_for_ui(.3)
self.tap(*self.predefined.button_duel)
self.wait_for_white_bottom(True)
self.wait_for_ui(.5)
self.tapnsleep(self.predefined.button_duel, .1)
dialog = True
while dialog:
dialog = self.check_if_battle(self.get_img_from_screen_shot())
if dialog:
self.tap(*self.predefined.button_duel)
self.wait_for_ui(.5)
self.scan_for_ok(LOW_CORR)
self.wait_for_ui(.1)
self.scan_for_ok(LOW_CORR)
# battle_calls = self.run_time.battle_calls
# for section in ["beforeStart", "afterStart", "beforeEnd", "afterEnd"]:
# for value in battle_calls.get(section):
# pass
# self.root.debug(value)
def check_if_battle(self, img):
img = np.array(img)
img = img[750:800, 0:400]
blue_min = np.array([250, 250, 250], np.uint8)
blue_max = np.array([255, 255, 255], np.uint8)
amount = cv2.inRange(img, blue_min, blue_max)
if cv2.countNonZero(amount) > (50 * 200):
return True
return False
def determine_autoduel_status(self):
super(Nox, self).determine_autoduel_status()
def check_battle_is_running(self):
super(Nox, self).check_battle_is_running()
def kill_process(self):
try:
if self.is_process_running():
command = "taskkill /im Nox.exe /f"
CREATE_NO_WINDOW = 0x08000000
subprocess.call(command, shell=True, creationflags=CREATE_NO_WINDOW)
except:
self.root.error("The program could not be killed")
def scan_for_ok(self, corr=HIGH_CORR, info=None, img=None):
corrword = look_up_translation_correlation(corr)
self.root.debug("LOOK FOR WORD '{}', {} CORRERLATION".format('OK', corrword))
if img is None:
img = self.get_img_from_screen_shot()
t = tm.Trainer(img, 480, 50)
location = os.path.join(self.assets, "ok_box.png")
return self.__wrapper_kmeans_result__(t, location, corr, info)
def scan(self):
for x, y, current_page in self.possible_battle_points():
self.compare_with_back_button(info=None)
self.wait_for_ui(1)
self.tapnsleep((x, y), .5)
img1 = self.get_img_from_screen_shot()
battle = self.check_if_battle(img1)
self.wait_for_ui(2.5)
dl_info = DuelLinksInfo(x, y, current_page, "Starting Battle")
version = 0
if battle:
self.tapnsleep(self.predefined.dialog_ok, 2.5)
try:
battle, version = self.verify_battle()
except DuelError:
self.tapnsleep(self.predefined.dialog_ok, 2.5)
battle, version = self.verify_battle()
if battle:
self.current_battle = True
self.root.info(battlemode % (x, y, current_page, "Starting Battle"))
self.scan_for_ok(LOW_CORR)
self.battle_mode(battle, version, dl_info)
self.current_battle = False
else:
self.wait_for_ui(2)
self.special_events(dl_info)
dl_info.status = "failure/Back-Button"
loop_scan(self.compare_with_back_button, **{'info': dl_info})
dl_info.status = "failure/Close-Button"
loop_scan(self.scan_for_close, **{'info': dl_info})
dl_info.status = "success/Gift"
loop_scan(self.scan_for_ok, **{'info': dl_info})
self.wait_for_ui(2) | yugioh-bot | /yugioh-bot-0.6.3.tar.gz/yugioh-bot-0.6.3/bot/providers/nox/nox.py | nox.py |
import os as os
import cv2
import deprecation
import numpy as _np
from bot import clean_version
from bot.providers.predefined import Predefined
from bot.shared import nox_current_version, tupletodict
left = 319
top = 79
width = 80
height = 23
auto_duel_box = (left, top, left + width, top + height)
duel_variant_v = {
'v1' : (230, 690),
'v2-duel' : (120, 690),
'v2-autoduel': (290, 690)
}
class NoxPredefined(Predefined):
files_need = [
"auto_duel_on.png",
"auto_duel_off.png",
"new_duel_variant.png"
]
files_needed_for_comparision = [
"download_button.png"
]
def __init__(self, *args, **kwargs):
super(NoxPredefined, self).__init__(*args, **kwargs)
def run_prechecks(self):
for file in self.files_need:
assert (os.path.exists(os.path.join(self.assets,
file))), "Missing File for stats generations: if you git cloned this repo you probably have a miss configured home!!!"
def generate(self):
self.run_prechecks()
save = {}
temp_dict = self.generate_autoduel_stats()
save = {**save, **temp_dict}
temp_dict = self.generate_duel_button_stats()
save = {**save, **temp_dict}
save['version'] = nox_current_version
self.write_hdf5(save, self.dataset)
@deprecation.deprecated(deprecated_in="0.3.1", removed_in="0.5.0", current_version=clean_version,
details="App is not opened through ADB Call instead of ui touch event")
@property
def yugioh_app_location(self):
""" Note that if you don't place the app icon on the bottom left corner of a 800x480 nox emulator,
you will need to change this """
return 25, 550
@property
def yugioh_initiate_link(self):
return 240, 530
@property
def autoduel(self):
return {
'left' : 319,
'top' : 79,
'width' : 80,
'height': 23,
}
@property
def button_duel(self):
"""Specifies the location of the button to click"""
return 230, 750
@property
def duel_variant(self):
return {
'left' : 210,
'top' : 680,
'width' : 265 - 210,
'height': 710 - 680
}
@property
def auto_duel_location_pre(self):
"""This location points to the autoduel button before the battle starts"""
return tupletodict(680, 300, 30, 120)
@property
def duel_location_pre(self):
"""This location points to the duel button before the battle starts"""
return tupletodict(680, 210, 30, 55)
@property
def page_area(self):
return {
'left' : 0,
'top' : 775,
'width' : 480,
'height': 25,
}
@property
def dialog_ok(self):
return 150, 400
@property
def street_replay(self):
return {
'left' : 181,
'top' : 283,
'width' : 311 - 181,
'height': 307 - 285
}
@property
def quick_rankduel_area(self):
return tupletodict(307, 110, 25, 260)
@property
def duelist_name_area(self):
return tupletodict(590, 0, 25, 150)
@staticmethod
def duel_variant_version(value):
return duel_variant_v.get(value, None)
def generate_autoduel_stats(self):
location = self.assets
autoduelon = os.path.join(location, "auto_duel_on.png")
autodueloff = os.path.join(location, "auto_duel_off.png")
a = self.get_image_stats(cv2.imread(autodueloff), **self.autoduel)
b = self.get_image_stats(cv2.imread(autoduelon), **self.autoduel)
save = {
'auto_duel_off': a,
'auto_duel_on' : b
}
return save
def generate_duel_button_stats(self):
location = self.assets
new_duel_variant = os.path.join(location, "new_duel_variant.png")
im = cv2.imread(new_duel_variant, cv2.COLOR_BGR2GRAY)
edges = cv2.Canny(im, 240, 255)
a = self.get_image_stats(_np.array(edges), **self.duel_variant)
save = {
'duel_variant': a
}
return save | yugioh-bot | /yugioh-bot-0.6.3.tar.gz/yugioh-bot-0.6.3/bot/providers/nox/predefined.py | predefined.py |
BOT_NAME = "yugioh_scraper"
SPIDER_MODULES = ["yugioh_scraper.spiders"]
NEWSPIDER_MODULE = "yugioh_scraper.spiders"
# Crawl responsibly by identifying yourself (and your website) on the user-agent
# USER_AGENT = 'db_yugioh_card (+http://www.yourdomain.com)'
# Obey robots.txt rules
ROBOTSTXT_OBEY = True
# Configure maximum concurrent requests performed by Scrapy (default: 16)
# CONCURRENT_REQUESTS = 32
# Configure a delay for requests for the same website (default: 0)
# See https://docs.scrapy.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
# DOWNLOAD_DELAY = 3
# The download delay setting will honor only one of:
# CONCURRENT_REQUESTS_PER_DOMAIN = 16
# CONCURRENT_REQUESTS_PER_IP = 16
# Disable cookies (enabled by default)
# COOKIES_ENABLED = False
# Disable Telnet Console (enabled by default)
# TELNETCONSOLE_ENABLED = False
# Override the default request headers:
# DEFAULT_REQUEST_HEADERS = {
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
# }
# Enable or disable spider middlewares
# See https://docs.scrapy.org/en/latest/topics/spider-middleware.html
# SPIDER_MIDDLEWARES = {
# 'db_yugioh_card.middlewares.DbYugiohCardSpiderMiddleware': 543,
# }
# Enable or disable downloader middlewares
# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
# DOWNLOADER_MIDDLEWARES = {
# 'db_yugioh_card.middlewares.DbYugiohCardDownloaderMiddleware': 543,
# }
# Enable or disable extensions
# See https://docs.scrapy.org/en/latest/topics/extensions.html
# EXTENSIONS = {
# 'scrapy.extensions.telnet.TelnetConsole': None,
# }
# Configure item pipelines
# See https://docs.scrapy.org/en/latest/topics/item-pipeline.html
ITEM_PIPELINES = {
"yugioh_scraper.pipelines.DuplicatesPipeline": 500,
"yugioh_scraper.pipelines.CardDetailsPipeline": 600,
}
# Enable and configure the AutoThrottle extension (disabled by default)
# See https://docs.scrapy.org/en/latest/topics/autothrottle.html
# AUTOTHROTTLE_ENABLED = True
# The initial download delay
# AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
# AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
# AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
# AUTOTHROTTLE_DEBUG = False
# Enable and configure HTTP caching (disabled by default)
# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
HTTPCACHE_ENABLED = True
HTTPCACHE_EXPIRATION_SECS = 60 * 60 * 24 * 7 # 1 week
HTTPCACHE_DIR = "httpcache"
HTTPCACHE_IGNORE_HTTP_CODES = []
HTTPCACHE_STORAGE = "scrapy.extensions.httpcache.FilesystemCacheStorage"
# HTTPCACHE_POLICY = "scrapy.extensions.httpcache.RFC2616Policy"
# Set settings whose default value is deprecated to a future-proof value
REQUEST_FINGERPRINTER_IMPLEMENTATION = "2.7"
TWISTED_REACTOR = "twisted.internet.asyncioreactor.AsyncioSelectorReactor"
DEPTH_PRIORITY = 1
SCHEDULER_DISK_QUEUE = "scrapy.squeues.PickleFifoDiskQueue"
SCHEDULER_MEMORY_QUEUE = "scrapy.squeues.FifoMemoryQueue" | yugioh-scraper | /yugioh_scraper-0.2.0-py3-none-any.whl/yugioh_scraper/settings.py | settings.py |
from scrapy import signals
# useful for handling different item types with a single interface
from itemadapter import is_item, ItemAdapter
class DbYugiohCardSpiderMiddleware:
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the spider middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_spider_input(self, response, spider):
# Called for each response that goes through the spider
# middleware and into the spider.
# Should return None or raise an exception.
return None
def process_spider_output(self, response, result, spider):
# Called with the results returned from the Spider, after
# it has processed the response.
# Must return an iterable of Request, or item objects.
for i in result:
yield i
def process_spider_exception(self, response, exception, spider):
# Called when a spider or process_spider_input() method
# (from other spider middleware) raises an exception.
# Should return either None or an iterable of Request or item objects.
pass
def process_start_requests(self, start_requests, spider):
# Called with the start requests of the spider, and works
# similarly to the process_spider_output() method, except
# that it doesn’t have a response associated.
# Must return only requests (not items).
for r in start_requests:
yield r
def spider_opened(self, spider):
spider.logger.info("Spider opened: %s" % spider.name)
class DbYugiohCardDownloaderMiddleware:
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the downloader middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_request(self, request, spider):
# Called for each request that goes through the downloader
# middleware.
# Must either:
# - return None: continue processing this request
# - or return a Response object
# - or return a Request object
# - or raise IgnoreRequest: process_exception() methods of
# installed downloader middleware will be called
return None
def process_response(self, request, response, spider):
# Called with the response returned from the downloader.
# Must either;
# - return a Response object
# - return a Request object
# - or raise IgnoreRequest
return response
def process_exception(self, request, exception, spider):
# Called when a download handler or a process_request()
# (from other downloader middleware) raises an exception.
# Must either:
# - return None: continue processing this exception
# - return a Response object: stops process_exception() chain
# - return a Request object: stops process_exception() chain
pass
def spider_opened(self, spider):
spider.logger.info("Spider opened: %s" % spider.name) | yugioh-scraper | /yugioh_scraper-0.2.0-py3-none-any.whl/yugioh_scraper/middlewares.py | middlewares.py |
import scrapy
class DBYugiohCardSpider(scrapy.Spider):
name = "db_yugioghcard"
allowed_domains = ["yugioh-card.com"]
start_urls = ["https://www.db.yugioh-card.com/yugiohdb/card_list.action"]
def parse(self, response):
product_lists = response.xpath(
'//div[contains(@id, "card_list")]//div[contains(@class, "pac_set")]'
)
for product_list in product_lists:
product_type = product_list.xpath(
'div[contains(@class, "list_title")]//span//text()'
).extract_first()
products = product_list.xpath(
'div[contains(@class, "list_body")]//div[contains(@class, "toggle")]//div[contains(@class, "pack")]'
)
for product in products:
product_name = product.xpath("p//strong//text()").extract_first()
product_link = product.xpath("input//@value").extract_first()
yield {
"product_type": product_type,
"product_name": product_name,
"data_type": "product",
}
yield response.follow(product_link, self.parse_product)
def parse_product(self, response):
cards = response.xpath(
'//div[contains(@id, "card_list")]//div[contains(@class, "t_row")]'
)
for card in cards:
card_name = card.xpath(
'input[contains(@class, "cnm")]//@value'
).extract_first()
card_link = card.xpath(
'input[contains(@class, "link_value")]//@value'
).extract_first()
yield {
"card_name": card_name,
"data_type": "product_card",
}
yield response.follow(card_link, self.parse_card)
def parse_card(self, response):
card_name = (
response.xpath(
'//div[contains(@id, "CardSet")]//div[contains(@id, "cardname")]//h1//text()'
)
.extract_first()
.strip()
)
card_details = response.xpath(
'//div[contains(@id, "CardSet")]//div[contains(@class, "top")]//div[contains(@id, "CardTextSet")]//div[contains(@class, "CardText")]//text()'
).extract()
set_list = response.xpath(
'//div[contains(@id, "update_list")]//div[contains(@class, "t_body")]//div[contains(@class, "t_row")]'
)
for set_item in set_list:
product_release_date = (
set_item.xpath(
'div[contains(@class, "inside")]//div[contains(@class, "time")]//text()'
)
.extract_first()
.strip()
)
product_name = (
set_item.xpath(
'div[contains(@class, "inside")]//div[contains(@class, "contents")]//div[contains(@class, "pack_name")]//text()'
)
.extract_first()
.strip()
)
card_rarity = (
set_item.xpath(
'div[contains(@class, "inside")]//div[contains(@class, "icon")]//div//span//text()'
)
.extract_first()
.strip()
)
yield {
"card_name": card_name,
"card_rarity": card_rarity,
"card_details": card_details,
"product_name": product_name,
"product_release_date": product_release_date,
"data_type": "card",
} | yugioh-scraper | /yugioh_scraper-0.2.0-py3-none-any.whl/yugioh_scraper/spiders/db_yugioh_card.py | db_yugioh_card.py |
The MIT License (MIT)
Copyright (c) 2022 Albert Marrero
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
| yugioh-scraper | /yugioh_scraper-0.2.0-py3-none-any.whl/yugioh_scraper-0.2.0.dist-info/LICENSE.md | LICENSE.md |
**YGOPRODECK API Wrapper**
## Installation
You can install it with pip3:
pip3 install yugioh
## Upgrading
pip3 install --upgrade yugioh
## Usage
### Get card by name
```python3
import yugioh
card = yugioh.get_card(card_name = "The Wicked Dreadroot")
print(card.name) #Returns "The Wicked Dreadroot"
print(card.archetype) #Returns "Wicked God"
print(card.attack) #Returns "4000"
```
### Get card by ID
```python3
import yugioh
card = yugioh.get_card_by_id(card_id = "62180201")
print(card.attack) #Returns "4000"
print(card.name) #Returns "The Wicked Dreadroot"
```
### All cards with `keyword` in their name
```python3
import yugioh
cards = yugioh.get_cards_by_name(keyword = 'magician')
for card in cards.list:
print(card)
```
#### Monster Attributes
Attribute | Description
------------ | -------------
name | The card's name
archetype | The card's archetype
attack | The card's attack points
attribute | The card's attribute
defense | The card's defense points
description | The card's description
id | The card's ID
level | The card's level
race | The card's "race" (Still thinking about this one - Its documented in the API as "race", but is formally called "type")
type | Monster/Normal card
cardmarket_price | The card's Cardmarket price
tcgplayer_price | The card's Tcgplayer price
ebay_price | The card's eBay price
amazon_price | The card's Amazon price
coolstuffinc_price | The card's CoolStuffInc price
#### Spell/Trap/Skill Attributes
Attribute | Description
------------ | -------------
description | The card's description
id | The card's ID
name | The card's name
type | The card's type
race | The card's race
cardmarket_price | The card's Cardmarket price
tcgplayer_price | The card's Tcgplayer price
ebay_price | The card's eBay price
amazon_price | The card's Amazon price
coolstuffinc_price | The card's CoolStuffInc price
**Please report all issues [here](https://github.com/ilikepyt/yugioh/issues)**
| yugioh | /yugioh-0.0.25.tar.gz/yugioh-0.0.25/README.md | README.md |
# Yui - personal task manager
Git based personal task manager.
100% command line.
(also, Japanese girl name)
> According to TDD as described in "Extreme Programming: Test Driven Development" by Kent Beck,
we need to start every day with blank sheet of paper, white down small tasks
and stroke out what is done. This is digital replacement for that paper sheets.
## Install
### Pip 3
```
pip3 install yui
```
## 4 dummies
0. `yui` without arguments will display help
Workflow:
1. Add task to the heap `yui create task name or short descriprion`
1. Show tasks in heap `yui list heap`
2. Pick the task to current day schedule `yui pick %taskId%`
1. Show tasks for current day `yui list cur`
3. Open task in text editor `yui open %taskId%`
1. Task file is just md file with yaml header
2. Adjust status in header
3. Write any notes below
4. Save
4. When you done working, or before next day
1. Move unfinished tasks back to heap `yui reset all`
2. Archive tasks with status *done* `yui archive today`
5. View archive for specific date `yui list 2023-01-11`
6. Run manual git command on task list `yui git %git_command% %git_command_args%`
7. Adjust visible scope with `yui scope`
Optional configuration:
`.yui/config.yaml`
`.yui/projects/%projectName%.yaml`
## How it works
1. There is git repository behind the scenes. So you have history and you can sync tasks using any git server. History, branches, etc.
1. Create/pick/reset/archive commands will invoke `git add` + `git commit` commands automatically.
2. Task data stored in plain text files.
1. Format markdown(.md) with yaml header.
2. You work with single task using plain text editor, like kate
3. yui tool is used to organize and navigate .md files
## The method
### Step 1: Write it down and forget.
Once you spot new task or idea - you just add it to the heap, and continue with your current work. So you stay focused.
```
yui create
```
*Heap* is the most chaotic, unorganised, unsorted, *backlog* you can imagine.
Do not waste your time on details, just stock pile it in the heap as is.
Imagine that you are working with paper stickers and you have a big box of chaotic notes written on stickers - that's the heap.
```
yui list heap
```
Will show you tasks in the heap in form of table
```
id | context | project | name | status
----|----------|------------|------------------------------------------------------------------------|-------
7 | personal | yui | show creation date column in task list | new
15 | personal | yui | sanitize slashes in task filename | new
```
First column is *taskId* you will need it to manipulate the task
### Step 2: pick the task into daily plan
Pick all tasks you are planning to work with today.
```
yui pick %taskId%
```
Will pick the task
```
yui list cur
```
Will display tasks for current day.
If you made a mistake, you can return the task back to heap
```
yui reset %taskId%
```
### Step 3: open the task in text editor
```
yui open %taskId%
```
### Step 4: change status to *work*
Adjust yaml header, simple replace `status: new` with `status: work`.
You can use any custom statuses, but buildin - *new*, *work*, *done* will be highlighted in `yui list` output.
### Step 5: Make notes while you are working on the task
As for it's just .md file, you can make any notes behind yaml header.
### Note: you can work with as many tasks at once as you want
That's just plain text files, after all.
### Step 6: change task status to *done*
In yaml header, replace `status: work` with `status: done`.
### Step 7: check your progress
```
yui list cur
```
Will get more green lines while you complete the tasks.
### Step 8: Cleanup workspace
```
yui reset all
```
Will return unfinished tasks back to the heap
```
yui archive today
```
Will move only *done* tasks to archive folder with current day. You can also use "yesterday" or specify date as "YYYY-MM-DD", or anything else recognized by php `strtotime()` function.
To view archived tasks use:
```
yui list YYYY-MM-DD
```
### Step 9: apply git
All git commands are mapped with `yui git`.
Most used:
- `yui git log` history of changes in tasks
- `yui git remote add origin %link%` link your task list with remote repository
- `yui git push` save local changes to remote repository
- `yui git pull` load fresh changes from remote repository
## Apllication working folder
Defaut:
- Linux: `~/.yui`
- Windows: `c:\Users\%username%\AppData\Local\yui`
Can be overwriten with `YUI_HOME` environment variable, like `YUI_HOME=mypath yui list heap`
## config.yaml
> Location: %application-working-folder%/config.yaml
Example:
```yaml
---
# % will be replaced with filename
# Examples:
# To run gui editor: nohup kate % > /dev/null 2>&1 &
# To run in terminal: mcedit %
# You can use nano, vi, mcedit etc
# If not specified, tsk will try EDITOR environment variable and then mcedit, nano, vim, vi, ee in that order
editor: nohup kate % > /dev/null 2>&1 &
---
```
## Project configuration
> Location: %application-working-folder%/projects/%projectName%.yaml
Example file:projects/myProject.yaml:
```yaml
---
# this will make YUI_PROJECT=myProject yui create test
# work the same way as YUI_PROJECT=myProject YUI_CONTEXT=myContext yui create test
defaultContext: myContext
---
```
## View archives
```
yui list_archives
```
Will display list of existing archives, from application working folder
To view tasks in single archive use
```
yui list %archiveName%
```
## Advanced options
You can pick range of items like so `pick 141,142,143..154`
| yui | /yui-1.0.8.tar.gz/yui-1.0.8/README.md | README.md |
# 羽箭 Yujian
一个异步的 RabbitMQ HTTP API 客户端。A asyncio Rabbitmq HTTP API client.

写这个库的初衷是用在 Jiama 的 console 命令中,用于显示 RPC 的服务端和客户端;但找了一圈,却没有发现一个异步且支持Python3 的库,所以写了这个库。
Rabbitmq 的安装可以使用 docker 方式,具体参见[官网](https://www.rabbitmq.com/download.html)。
```shell
sudo docker run -d --name rabbitmq -p 5672:5672 -p 15672:15672 rabbitmq:3.10-management
```
在启用“管理插件”运行后,可以在 http://server-name:15672/api/ 查看 Rabbitmq HTTP API 的内容。
### 安装 Install
``` shell
pip install wangong
```
### 接口 API
#### yujian.api.config
Rabbitmq HTTP API 接口配置字典,键定义客户端方法名称,值定义方法调用的 API 地址、请求方法、必填项、默认值等,如:
```python
'overview': {
'uri': '/api/overview',
'method': 'get',
'option': {
'columns': [
'rabbitmq_version',
'cluster_name',
'queue_totals.messages',
'object_totals.queues',
]
},
},
'whoami': {'uri': '/api/whoami', 'method': 'get'},
```
你可以根据需要扩展此配置。
#### client = await Client().init('http://localhost:15672')
客户端初始化方法。
#### await client.close()
客户端销毁方法。
#### await client.declare_queue(name, vhost=None, **kwargs)
- `name` str - 队列名称
- `vhost` str - 队列所属的虚拟机,默认为 `/`
- `kwargs` Any - 其他可用参数,具体参见 [Rabbitmq HTTP API DOC](https://rawcdn.githack.com/rabbitmq/rabbitmq-server/v3.11.2/deps/rabbitmq_management/priv/www/api/index.html)
#### await client.list_queue(vhost, columns, **kwargs)
- `vhost` str - 队列所属的虚拟机,默认为 `/`
- `columns` list[str] - 返回结果中包含的列
- `kwargs` Any - 其他可用参数
#### await delete_queue(name, vhost)
- `name` str - 队列名称
- `vhost` str - 队列所属的虚拟机
#### await client.declare_exchange(name, type, vhost, **kwargs)
- `name` str - 交换机名称
- `type` str - 交换机类型
- `vhost` str - 交换机所属的虚拟机,默认为 `/`
- `kwargs` Any - 其他可用参数,具体参见 [Rabbitmq HTTP API DOC](https://rawcdn.githack.com/rabbitmq/rabbitmq-server/v3.11.2/deps/rabbitmq_management/priv/www/api/index.html)
#### await client.declare_binding(source, routing_key, destination, destination_type, vhost, **kwargs)
- `source` str - 绑定的源,交换机名称
- `routing_key` str - 绑定的路由键
- `destination` str - 绑定的终点,交换机或队列的名称
- `destination_type` str - 绑定的重点类型,exchange 或 queue
- `vhost` str - 绑定所属的虚拟机,默认为 `/`
- `kwargs` Any - 其他可用参数,具体参见 [Rabbitmq HTTP API DOC](https://rawcdn.githack.com/rabbitmq/rabbitmq-server/v3.11.2/deps/rabbitmq_management/priv/www/api/index.html)
#### await client.publish_message(payload, routing_key, properties, exchange, vhost, **kwargs)
- `payload` str - 消息内容
- `routing_key` - 路由键
- `properties` - 消息属性
- `exchange` - 交换机
- `vhost` - 虚拟机
- `kwargs` - 其他可用参数,具体参见 [Rabbitmq HTTP API DOC](https://rawcdn.githack.com/rabbitmq/rabbitmq-server/v3.11.2/deps/rabbitmq_management/priv/www/api/index.html)
#### await client.await client.invoke(act, **kwargs)
- `act` str - 需要执行的动作,对应 yujian.api.config 中的键
- `kwargs` Any - 需要传递的参数
#### client.__getattr__(method)
- `method` str - 方法名称,对应 yujian.api.config 中的键
借助 `__getattr__` 方法,你可以根据 [Rabbitmq HTTP API DOC](https://rawcdn.githack.com/rabbitmq/rabbitmq-server/v3.11.2/deps/rabbitmq_management/priv/www/api/index.html) 的要求任意扩展 `yujian.api.config`,直接以键作为方法名在 `client` 对象上调用。
### 示例 Example
``` python
from loguru import logger
from yujian.api import Client
async def main():
c = await Client().init('http://192.168.56.109:15672/')
r20 = await c.whoami()
r21 = await c.list_exchange(columns=['name'])
r22 = await c.list_queue(
columns=['vhost', 'name', 'node', 'messages'], sort='name', sort_reverse='true'
)
r23 = await c.list_user()
r24 = await c.get_user(name='guest')
r25 = await c.get_vhost(name='%2F')
r26 = await c.get_permission('guest')
r27 = await c.get_queue('test_queue_2')
r30 = await c.invoke('declare_queue', name='test_queue')
r31 = await c.invoke(
'list_queue',
columns=['vhost', 'name', 'node', 'messages'],
sort='name',
sort_reverse='true',
)
r32 = await c.invoke('delete_queue', name='test_queue')
r33 = await c.invoke('declare_exchange', name='test_exchange', type='direct')
r34 = await c.invoke(
'get_message',
queue='test_queue',
count=5,
ackmode='ack_requeue_true',
encoding='auto',
)
r35 = await c.invoke(
'publish_message',
exchange='test_exchange',
payload='ko ko ko',
routing_key='test_queue',
payload_encoding='string',
properties={},
)
r36 = await c.invoke(
'declare_binding',
source='test_exchange',
destination='test_queue',
destination_type='queue',
routing_key='test_queue',
)
r37 = await c.invoke('whoami')
await c.close()
if __name__ == '__main__':
config = {
'handlers': [
{
'sink': sys.stdout,
'format': '<green>{time:YYYY-MM-DD at HH:mm:ss}</green> {level} <level>{message}</level>',
},
],
}
logger.configure(**config)
asyncio.run(main())
```
### License
[MIT](LICENSE) © Li zhigang | yujian | /yujian-0.8.4.tar.gz/yujian-0.8.4/README.md | README.md |
Yujin Tools
=========
Utilies for yujin development. These are system tools (i.e. not deployed within a
ros environment).
## Tools
* **Workspace Tools**
* `yujin_tools_settings` : configure the rosdistro and rosinstall database the yujin tools should work with.
* `yujin_init_workspace` : easy rosinstaller, drop in a listed rosinstall from the yujin tools rosinstall database.
* `yujin_init_build` : configure a build dir and populate with environment startup scripts (esp. useful for parallel builds)
* `yujin_make` : catkin_make on drugs
* **Rocon**
* `avahi-browse-concerts`
* `avahi-browse-multimaster`
* `masteruri` : quickly switch between ROS_MASTER_URI's (e.g. `. masteruri 2` -> 11312)
* **General Purpose**
* `yujin_list_git_branches` : list branches of all rosinstalls in a ros source workspace.
## Usage
### Installation
> sudo apt-get install python-pip
> sudo pip install --upgrade yujin_tools
### Usage
**yujin_tools_settings**, **yujin_init_workspace**, **yujin_init_buld**, **yujin_make**
* [Yujin Init Tools](https://github.com/yujinrobot/yujin_tools/wiki/yujin-init)
**masteruri**
Allows checking and switching of `ROS_MASTER_URI`'s on localhost.
# To check the current value:
> masteruri
http://localhost:11311
# To switch
> . masteruri 2
> masteruri
http://localhost:11312
**yujin_list_git_branches**
Use in the src directory of a catkin_make/yujin_make installed source workspace populated with git
clones. This will list the branches in each and highlight the currently used branch.
**git-big-picture**
This is a nice tool from https://github.com/esc/git-big-picture. It will let you easily visualise your
bitbucket repos (doesn't have a network visualiser like github). To view the current workign directory's
git repo in chrome, firefox:
```
> git-big-picture -v google-chrome
> git-big-picture -v firefox
```
You can also make a git alias for it:
```
> git config --global alias.bp "big-picture -v google-chrome"
> git bp
```
## Developing & Packaging
For the yujin guys:
> git clone https://github.com/yujinrobot/yujin_tools.git
> cd yujin_tools
* make some changes
* bump the version in `setup.py`
* add a note to the `Changelog`
Finally, upload
> make release
See Daniel if you need permissions for uploading.
| yujin_tools | /yujin_tools-0.4.54.tar.gz/yujin_tools-0.4.54/README.md | README.md |
from __future__ import print_function
##############################################################################
# Imports
##############################################################################
import io
import os
import subprocess
import sys
import stat
try:
from catkin_pkg.cmake import configure_file, get_metapackage_cmake_template_path
except ImportError as e:
sys.exit(
'ImportError: "from catkin_pkg.topological_order import '
'topological_order" failed: %s\nMake sure that you have installed '
'"catkin_pkg", it is up to date and on the PYTHONPATH.' % e
)
##############################################################################
# Local Imports
##############################################################################
from .terminal_color import ansi, fmt, sanitize
##############################################################################
# Methods
##############################################################################
def split_arguments(args, splitter_name, default=None):
if splitter_name not in args:
return args, default
index = args.index(splitter_name)
return args[0:index], args[index + 1:]
def extract_cmake_and_make_arguments(args):
cmake_args = []
make_args = []
if '--cmake-args' in args and '--make-args' in args:
cmake_index = args.index('--cmake-args')
make_index = args.index('--make-args')
# split off last argument group first
if cmake_index < make_index:
args, make_args = split_arguments(args, '--make-args')
args, cmake_args = split_arguments(args, '--cmake-args')
else:
args, cmake_args = split_arguments(args, '--cmake-args')
args, make_args = split_arguments(args, '--make-args')
elif '--cmake-args' in args:
args, cmake_args = split_arguments(args, '--cmake-args')
elif '--make-args' in args:
args, make_args = split_arguments(args, '--make-args')
# classify -D* and -G* arguments as cmake specific arguments
implicit_cmake_args = [a for a in args if a.startswith('-D') or a.startswith('-G')]
args = [a for a in args if a not in implicit_cmake_args]
return args, implicit_cmake_args + cmake_args, make_args
def cprint(msg, end=None):
print(fmt(msg), end=end)
def colorize_line(line):
cline = sanitize(line)
cline = cline.replace(
'-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~',
'-- @{pf}~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~@|'
)
if line.startswith('-- ~~'):
# -- ~~ -
cline = cline.replace('~~ ', '@{pf}~~ @|')
cline = cline.replace(' - ', ' - @!@{bf}')
cline = cline.replace('(', '@|(')
cline = cline.replace('(plain cmake)', '@|(@{rf}plain cmake@|)')
cline = cline.replace('(unknown)', '@|(@{yf}unknown@|)')
if line.startswith('-- +++'):
# -- +++ add_subdirectory(package)
cline = cline.replace('+++', '@!@{gf}+++@|')
cline = cline.replace('kin package: \'', 'kin package: \'@!@{bf}')
cline = cline.replace(')', '@|)')
cline = cline.replace('\'\n', '@|\'\n')
cline = cline.replace('cmake package: \'', 'cmake package: \'@!@{bf}')
cline = cline.replace('\'\n', '@|\'\n')
if line.startswith('-- ==>'):
cline = cline.replace('-- ==>', '-- @!@{bf}==>@|')
if line.lower().startswith('warning'):
# WARNING
cline = ansi('yf') + cline
if line.startswith('CMake Warning'):
# CMake Warning...
cline = cline.replace('CMake Warning', '@{yf}@!CMake Warning@|')
if line.startswith('ERROR:'):
# ERROR:
cline = cline.replace('ERROR:', '@!@{rf}ERROR:@|')
if line.startswith('CMake Error'):
# CMake Error...
cline = cline.replace('CMake Error', '@{rf}@!CMake Error@|')
if line.startswith('Call Stack (most recent call first):'):
# CMake Call Stack
cline = cline.replace('Call Stack (most recent call first):',
'@{cf}@_Call Stack (most recent call first):@|')
return fmt(cline)
def print_command_banner(cmd, cwd, color):
if color:
# Prepare for printing
cmd_str = sanitize(' '.join(cmd))
cwd_str = sanitize(cwd)
# Print command notice
cprint('@{bf}####')
cprint('@{bf}#### Running command: @!"%s"@|@{bf} in @!"%s"' % (cmd_str, cwd_str))
cprint('@{bf}####')
else:
print('####')
print('#### Running command: "%s" in "%s"' % (' '.join(cmd), cwd))
print('####')
def run_command_colorized(cmd, cwd, quiet=False, env=None):
run_command(cmd, cwd, quiet=quiet, colorize=True, env=env)
def run_command(cmd, cwd, quiet=False, colorize=False, env=None):
capture = (quiet or colorize)
stdout_pipe = subprocess.PIPE if capture else None
stderr_pipe = subprocess.STDOUT if capture else None
try:
proc = subprocess.Popen(
cmd, cwd=cwd, shell=False,
stdout=stdout_pipe, stderr=stderr_pipe, env=env
)
except OSError as e:
raise OSError("Failed command '%s': %s" % (cmd, e))
out = io.StringIO() if quiet else sys.stdout
if capture:
while True:
line = proc.stdout.readline().decode('utf8', 'replace')
line = unicode(line)
if proc.returncode is not None or not line:
break
try:
line = colorize_line(line) if colorize else line
except Exception as e:
import traceback
traceback.print_exc()
print('<yujin_make> color formatting problem: ' + str(e),
file=sys.stderr)
out.write(line)
proc.wait()
if proc.returncode:
if quiet:
print(out.getvalue())
raise subprocess.CalledProcessError(proc.returncode, ' '.join(cmd))
return out.getvalue() if quiet else ''
blue_arrow = '@!@{bf}==>@|@!'
def _check_build_dir(name, workspace, buildspace):
package_build_dir = os.path.join(buildspace, name)
if not os.path.exists(package_build_dir):
cprint(
blue_arrow + ' Creating build directory: \'' +
os.path.relpath(package_build_dir, workspace) + '\'@|'
)
os.mkdir(package_build_dir)
return package_build_dir
def isolation_print_command(cmd, path=None):
cprint(
blue_arrow + " " + sanitize(cmd) + "@|" +
(" @!@{kf}in@| '@!" + sanitize(path) + "@|'" if path else '')
)
def get_python_path(path):
python_path = []
lib_path = os.path.join(path, 'lib')
if os.path.exists(lib_path):
items = os.listdir(lib_path)
for item in items:
if os.path.isdir(item) and item.startswith('python'):
python_items = os.listdir(os.path.join(lib_path, item))
for py_item in python_items:
if py_item in ['dist-packages', 'site-packages']:
py_path = os.path.join(lib_path, item, py_item)
if os.path.isdir(py_path):
python_path.append(py_path)
return python_path
def build_catkin_package(
path, package,
workspace, buildspace, develspace, installspace,
install, jobs, force_cmake, quiet, cmake_args, make_args,
catkin_python_path
):
cprint(
"Processing @{cf}catkin@| package: '@!@{bf}" +
package.name + "@|'"
)
# Make the build dir
build_dir = _check_build_dir(package.name, workspace, buildspace)
# Help find catkin cmake and python
env = os.environ.copy()
try:
env['PYTHONPATH'] = env['PYTHONPATH'] + os.pathsep + catkin_python_path
except KeyError:
env['PYTHONPATH'] = catkin_python_path
# Check for Makefile and maybe call cmake
makefile = os.path.join(build_dir, 'Makefile')
# check if toolchain.cmake, config.cmake exist
toolchain_cmd = "-DCMAKE_TOOLCHAIN_FILE=%s" % os.path.join(workspace, 'toolchain.cmake') if os.path.isfile(os.path.join(workspace, 'toolchain.cmake')) else None
config_cmd = "-C%s" % os.path.join(workspace, 'config.cmake') if os.path.isfile(os.path.join(workspace, 'config.cmake')) else None
if not os.path.exists(makefile) or force_cmake:
package_dir = os.path.dirname(package.filename)
if not os.path.exists(os.path.join(package_dir, 'CMakeLists.txt')):
export_tags = [e.tagname for e in package.exports]
if 'metapackage' not in export_tags:
print(colorize_line('Error: Package "%s" does not have a CMakeLists.txt file' % package.name))
raise RuntimeError('Can not build catkin package without CMakeLists.txt file')
# generate CMakeLists.txt for metpackages without one
print(colorize_line('Warning: metapackage "%s" should have a CMakeLists.txt file' % package.name))
cmake_code = configure_file(
get_metapackage_cmake_template_path(),
{'name': package.name, 'metapackage_arguments': 'DIRECTORY "%s"' % package_dir})
cmakelists_txt = os.path.join(build_dir, 'CMakeLists.txt')
with open(cmakelists_txt, 'w') as f:
f.write(cmake_code)
package_dir = build_dir
# Run cmake
cmake_cmd = [
'cmake',
package_dir,
]
if toolchain_cmd:
cmake_cmd.append(toolchain_cmd)
if config_cmd:
cmake_cmd.append(config_cmd)
cmake_cmd.extend(cmake_args)
isolation_print_command(' '.join(cmake_cmd))
#if last_env is not None:
# cmake_cmd = [last_env] + cmake_cmd
try:
run_command_colorized(cmake_cmd, build_dir, quiet, env=env)
except subprocess.CalledProcessError as e:
# remove Makefile to force CMake invocation next time
os.remove(makefile)
raise
else:
print('Makefile exists, skipping explicit cmake invocation...')
# Check to see if cmake needs to be run via make
make_check_cmake_cmd = ['make', 'cmake_check_build_system']
isolation_print_command(' '.join(make_check_cmake_cmd), build_dir)
#if last_env is not None:
# make_check_cmake_cmd = [last_env] + make_check_cmake_cmd
run_command_colorized(
make_check_cmake_cmd, build_dir, quiet, env=env
)
# Run make
make_cmd = ['make', '-j' + str(jobs), '-l' + str(jobs)]
make_cmd.extend(make_args)
isolation_print_command(' '.join(make_cmd), build_dir)
#if last_env is not None:
# make_cmd = [last_env] + make_cmd
run_command(make_cmd, build_dir, quiet, env=env)
# Make install
if install:
make_install_cmd = ['make', 'install']
isolation_print_command(' '.join(make_install_cmd), build_dir)
#if last_env is not None:
# make_install_cmd = [last_env] + make_install_cmd
run_command(make_install_cmd, build_dir, quiet)
def build_cmake_package(
path, package,
workspace, buildspace, develspace, installspace,
install, jobs, force_cmake, quiet, cmake_args, make_args,
catkin_cmake_path
):
# Ros typically puts the package devel space as devel/<pkg_name>.
# Undesirable here since we want it to do normal cmake installs
# of everything into devel/ OR have environment chaining
# everywhere. ugh. Changing this for now - DJS.
develspace = os.path.abspath(os.path.join(develspace, os.pardir))
# Notify the user that we are processing a plain cmake package
cprint(
"Processing @{cf}plain cmake@| package: '@!@{bf}" + package.name +
"@|'"
)
# Make the build dir
build_dir = _check_build_dir(package.name, workspace, buildspace)
# Check for Makefile and maybe call cmake
makefile = os.path.join(build_dir, 'Makefile')
install_target = installspace if install else develspace
if not os.path.exists(makefile) or force_cmake:
# Call cmake
cmake_cmd = [
'cmake',
os.path.dirname(package.filename),
'-DCMAKE_INSTALL_PREFIX=' + install_target
]
cmake_cmd.extend(cmake_args)
isolation_print_command(' '.join(cmake_cmd))
run_command_colorized(cmake_cmd, build_dir, quiet)
else:
print('Makefile exists, skipping explicit cmake invocation...')
# Check to see if cmake needs to be run via make
make_check_cmake_cmd = ['make', 'cmake_check_build_system']
isolation_print_command(' '.join(make_check_cmake_cmd), build_dir)
run_command_colorized(
make_check_cmake_cmd, build_dir, quiet
)
# Run make
make_cmd = ['make', '-j' + str(jobs), '-l' + str(jobs)]
make_cmd.extend(make_args)
isolation_print_command(' '.join(make_cmd), build_dir)
run_command(make_cmd, build_dir, quiet)
# Make install
make_install_cmd = ['make', 'install']
isolation_print_command(' '.join(make_install_cmd), build_dir)
run_command(make_install_cmd, build_dir, quiet)
# If we are installing, and a env.sh exists, don't overwrite it
# if install and os.path.exists(os.path.join(installspace, 'env.sh')):
# return
# cprint(blue_arrow + " Generating an env.sh")
# Generate env.sh for chaining to catkin packages
# new_env_path = os.path.join(install_target, 'env.sh')
# variables = {
# 'SETUP_DIR': install_target,
# 'SETUP_FILENAME': 'setup'
# }
# with open(os.path.join(new_env_path), 'w') as f:
# f.write(configure_file(os.path.join(catkin_cmake_path, 'templates', 'env.sh.in'), variables))
# os.chmod(new_env_path, stat.S_IXUSR | stat.S_IWUSR | stat.S_IRUSR)
#
# # Generate setup.sh for chaining to catkin packages
# new_setup_path = os.path.join(install_target, 'setup.sh')
# subs = {}
# subs['cmake_prefix_path'] = install_target + ":"
# subs['ld_path'] = os.path.join(install_target, 'lib') + ":"
# pythonpath = ":".join(get_python_path(install_target))
# if pythonpath:
# pythonpath += ":"
# subs['pythonpath'] = pythonpath
# subs['pkgcfg_path'] = os.path.join(install_target, 'lib', 'pkgconfig')
# subs['pkgcfg_path'] += ":"
# subs['path'] = os.path.join(install_target, 'bin') + ":"
# if not os.path.exists(install_target):
# os.mkdir(install_target)
# with open(new_setup_path, 'w+') as file_handle:
# file_handle.write("""\
##!/usr/bin/env sh
## generated from catkin.builder module
#
#""")
# if last_env is not None:
# last_setup_env = os.path.join(os.path.dirname(last_env), 'setup.sh')
# file_handle.write('. %s\n\n' % last_setup_env)
# file_handle.write("""\
## detect if running on Darwin platform
#UNAME=`which uname`
#UNAME=`$UNAME`
#IS_DARWIN=0
#if [ "$UNAME" = "Darwin" ]; then
# IS_DARWIN=1
#fi
#
## Prepend to the environment
#export CMAKE_PREFIX_PATH="{cmake_prefix_path}$CMAKE_PREFIX_PATH"
#if [ $IS_DARWIN -eq 0 ]; then
# export LD_LIBRARY_PATH="{ld_path}$LD_LIBRARY_PATH"
#else
# export DYLD_LIBRARY_PATH="{ld_path}$DYLD_LIBRARY_PATH"
#fi
#export PATH="{path}$PATH"
#export PKG_CONFIG_PATH="{pkgcfg_path}$PKG_CONFIG_PATH"
#export PYTHONPATH="{pythonpath}$PYTHONPATH"
#
#exec "$@"
#""".format(**subs)
# )
def build_package(
path, package,
workspace, buildspace, develspace, installspace,
install, jobs, force_cmake, quiet, cmake_args, make_args,
number=None, of=None,
catkin_cmake_path=None,
catkin_python_path=None
):
cprint('@!@{gf}==>@| ', end='')
#new_last_env = get_new_env(package, develspace, installspace, install, last_env)
build_type = _get_build_type(package)
if build_type == 'catkin':
build_catkin_package(
path, package,
workspace, buildspace, develspace, installspace,
install, jobs, force_cmake, quiet, cmake_args, make_args,
catkin_python_path
)
#if not os.path.exists(new_last_env):
# raise RuntimeError(
# "No env.sh file generated at: '" + new_last_env +
# "'\n This sometimes occurs when a non-catkin package is "
# "interpreted as a catkin package.\n This can also occur "
# "when the cmake cache is stale, try --force-cmake."
# )
elif build_type == 'cmake':
build_cmake_package(
path, package,
workspace, buildspace, develspace, installspace,
install, jobs, force_cmake, quiet, cmake_args, make_args,
catkin_cmake_path
)
else:
raise RuntimeError('Can not build package with unknown build_type')
if number is not None and of is not None:
msg = ' [@{gf}@!' + str(number) + '@| of @!@{gf}' + str(of) + '@|]'
else:
msg = ''
cprint('@{gf}<==@| Finished processing package' + msg + ': \'@{bf}@!' +
package.name + '@|\'')
def get_new_env(package, develspace, installspace, install, last_env):
new_env = None
build_type = _get_build_type(package)
if build_type in ['catkin', 'cmake']:
new_env = os.path.join(
installspace if install else develspace,
'env.sh'
)
return new_env
def _get_build_type(package):
build_type = 'catkin'
if 'build_type' in [e.tagname for e in package.exports]:
build_type = [e.content for e in package.exports if e.tagname == 'build_type'][0]
return build_type
def cmake_input_changed(packages, build_path, install=None, cmake_args=None, filename='catkin_make'):
# get current input
package_paths = os.pathsep.join(sorted(packages.keys()))
cmake_args = ' '.join(cmake_args) if cmake_args else ''
# file to store current input
changed = False
install_toggled = False
input_filename = os.path.join(build_path, '%s.cache' % filename)
if not os.path.exists(input_filename):
changed = True
else:
# compare with previously stored input
with open(input_filename, 'r') as f:
previous_package_paths = f.readline().rstrip()
previous_cmake_args = f.readline().rstrip()
previous_install = f.readline().rstrip() == str(True)
if package_paths != previous_package_paths:
changed = True
if cmake_args != previous_cmake_args:
changed = True
if install is not None and install != previous_install:
install_toggled = True
# store current input for next invocation
with open(input_filename, 'w') as f:
f.write('%s\n%s\n%s' % (package_paths, cmake_args, install))
return changed, install_toggled | yujin_tools | /yujin_tools-0.4.54.tar.gz/yujin_tools-0.4.54/src/catkin_make/builder.py | builder.py |
from __future__ import print_function
import string
import os
_ansi = {}
def ansi(key):
"""Returns the escape sequence for a given ansi color key"""
global _ansi
return _ansi[key]
def enable_ANSI_colors():
"""
Populates the global module dictionary `ansi` with ANSI escape sequences.
"""
global _ansi
color_order = [
'black', 'red', 'green', 'yellow', 'blue', 'purple', 'cyan', 'white'
]
short_colors = {
'black': 'k', 'red': 'r', 'green': 'g', 'yellow': 'y', 'blue': 'b',
'purple': 'p', 'cyan': 'c', 'white': 'w'
}
_ansi = {
'escape': '\033', 'reset': 0, '|': 0,
'boldon': 1, '!': 1, 'italicson': 3, '/': 3, 'ulon': 4, '_': 4,
'invon': 7, 'boldoff': 22, 'italicsoff': 23,
'uloff': 24, 'invoff': 27
}
# Convert plain numbers to escapes
for key in _ansi:
if key != 'escape':
_ansi[key] = '{0}[{1}m'.format(_ansi['escape'], _ansi[key])
# Foreground
for index, color in enumerate(color_order):
_ansi[color] = '{0}[{1}m'.format(_ansi['escape'], 30 + index)
_ansi[color + 'f'] = _ansi[color]
_ansi[short_colors[color] + 'f'] = _ansi[color + 'f']
# Background
for index, color in enumerate(color_order):
_ansi[color + 'b'] = '{0}[{1}m'.format(_ansi['escape'], 40 + index)
_ansi[short_colors[color] + 'b'] = _ansi[color + 'b']
# Fmt sanitizers
_ansi['atexclimation'] = '@!'
_ansi['atfwdslash'] = '@/'
_ansi['atunderscore'] = '@_'
_ansi['atbar'] = '@|'
def disable_ANSI_colors():
"""
Sets all the ANSI escape sequences to empty strings, effectively disabling
console colors.
"""
global _ansi
for key in _ansi:
_ansi[key] = ''
# Default to ansi colors on
enable_ANSI_colors()
if os.name in ['nt']:
disable_ANSI_colors()
class ColorTemplate(string.Template):
delimiter = '@'
def sanitize(msg):
"""Sanitizes the existing msg, use before adding color annotations"""
msg = msg.replace('@', '@@')
msg = msg.replace('{', '{{')
msg = msg.replace('}', '}}')
msg = msg.replace('@@!', '@{atexclimation}')
msg = msg.replace('@@/', '@{atfwdslash}')
msg = msg.replace('@@_', '@{atunderscore}')
msg = msg.replace('@@|', '@{atbar}')
return msg
def fmt(msg):
"""Replaces color annotations with ansi escape sequences"""
global _ansi
msg = msg.replace('@!', '@{boldon}')
msg = msg.replace('@/', '@{italicson}')
msg = msg.replace('@_', '@{ulon}')
msg = msg.replace('@|', '@{reset}')
t = ColorTemplate(msg)
return t.substitute(_ansi) + ansi('reset') | yujin_tools | /yujin_tools-0.4.54.tar.gz/yujin_tools-0.4.54/src/catkin_make/terminal_color.py | terminal_color.py |
import os
import uuid
import xml.sax
class XmlHandler(xml.sax.ContentHandler):
def __init__(self):
self.inExecute = False
self.executeXml = ""
self.configXml = ""
def init(self):
self.inExecute = False
self.executeXml = ""
self.configXml = ""
def startElement(self, tag, attributes):
if tag == "execute":
self.inExecute = True
self.executeXml += " <execute>\n"
if tag == "processor" and self.inExecute:
proc_type = attributes["type"]
self.executeXml += "<processor name=\"std{0}\"/>\n".format(proc_type)
self.configXml += " <processor name=\"std{0}\" type=\"{0}\"/>\n".format(proc_type)
if tag == "processor" and (not self.inExecute):
self.executeXml += "<processor name=>"
def endElement(self, tag):
if tag == "execute":
self.inExecute = False
self.executeXml += "</execute>\n"
def characters(self, content):
if content.endswith("\n"):
content = content[:-1]
if self.inExecute:
self.executeXml += content
else:
self.configXml += content
class ConfigDatabase(object):
""" Configuration database
"""
ins = None
def __local_dir(self):
local_dir = os.environ["HOME"] + "/.yuki/local/configurations/"
if not os.path.exists(local_dir):
os.makedirs(local_dir)
return local_dir
def __official_dir(self):
local_dir = os.environ["HOME"] + "/.yuki/official/configurations/"
if not os.path.exists(local_dir):
os.makedirs(local_dir)
return local_dir
@classmethod
def instance(cls):
""" Return the database itself
"""
if cls.ins is None:
cls.ins = ConfigDatabase()
return cls.ins
def __init__(self):
self.handler = XmlHandler()
def listXmls(self):
path = self.__local_dir()
return os.listdir(path)
def new_config(self, template=None):
uid = uuid.uuid4().hex
file_name = self.__local_dir() + "/{0}".format(uid)
open(file_name, "w").close()
return file_name
def __getXml(self, config):
path = self.__local_dir() + "/" + config
with open(path) as f:
return f.read()
def getHeaderXml(self):
return """<!--
This xml configration file is automatically generated by Yuki.
None experts will not be able to touch the core of it.
-->
"""
def getExecuteXml(self, config):
xmlstring = self.__getXml(config)
self.handler.init()
xml.sax.parseString(xmlstring, self.handler)
return self.handler.executeXml
def getConfigXml(self, config):
xmlstring = self.__getXml(config)
self.handler.init()
xml.sax.parseString(xmlstring, self.handler)
return self.handler.configXml
def get_output_xml(self, output_file, config, release):
return """ <processor name="yukiLCIOWriter" type="YukiLCIOWriter">
<parameter name="LCIOOutputFile" type="string" >
{0}
</parameter>
<parameter name="release" type="string" value="{1}"/>
<parameter name="config" type="string" value="{2}"/>
<parameter name="LCIOWriteMode" type="string" value="WRITE_NEW"/>
</processor>
""".format(output_file, release, config)
def getGlobalXml(self, input_file):
return """ <global>
<parameter name="LCIOInputFiles">
{0}
</parameter>
<parameter name="GearXMLFile">
GearOutput.xml
</parameter>
<parameter name="MaxRecordNumber" value="-1"/>
<parameter name="SkipNEvents" value="-1"/>
<parameter name="SupressCheck" value="false"/>
<parameter name="Verbosity" options="DEBUG0-4,MESSAGE0-4,WARNING0-4,ERROR0-4" />
<parameter name="RandomSeed" value="1234567890" />
</global>
""".format(input_file)
configdb = ConfigDatabase.instance() | yuki | /yuki-1.0.6.tar.gz/yuki-1.0.6/Yuki/configDatabase.py | configDatabase.py |
################################################
Licence
################################################
*************************************
Yukon Backend (MIT)
*************************************
| Copyright (c) 2019 UAVCAN. All rights reserved.
| Copyright (c) 2020 dronesolutions.io. All rights reserved.
|
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
| yukon-backend | /yukon_backend-0.1.2.tar.gz/yukon_backend-0.1.2/LICENSE.rst | LICENSE.rst |
import os
import random
import asyncio
import json
from quart import Quart
from quart_cors import cors
from typing import Tuple
from typing import AsyncGenerator
from typing import Dict
dir_path = os.path.dirname(os.path.realpath(__file__))
def fileresponse(path: str) -> Tuple[str, int]:
f = os.path.join(dir_path, path)
if os.path.isfile(f + '.json'):
with open(f + '.json', 'r') as file:
return (file.read(), 200)
else:
return ('', 404)
class ServerSentEvent:
"""
Sends a REST response to the client side. Example:
.. code-block:: python
from src.devserv.mock_responses import ServerSentEvent
import asyncio
import json
import typing
async def sse() -> typing.AsyncGenerator[bytes, None]:
async def send_events() -> typing.AsyncGenerator[bytes, None]:
data = [
{
"Test key": 0,
"Test value": "CRITICAL"
}
]
while True:
await asyncio.sleep(2)
random.shuffle(data)
event = ServerSentEvent(data=json.dumps(data), event='NODE_STATUS')
yield event.encode()
return send_events()
loop = asyncio.get_event_loop()
assert loop.run_until_complete(asyncio.gather(sse()))
"""
def __init__(
self,
data: str,
event: str
) -> None:
self.data = data
self.event = event
def encode(self) -> bytes:
message = f"data: {json.dumps(self.data)}"
if self.event is not None:
message = f"{message}\nevent: {self.event}"
message = f"{message}\r\n\r\n"
return message.encode('utf-8')
api_prefix = '/api/v1'
app = Quart(__name__,
static_folder='../../../frontend/static/',
template_folder='../../../frontend/')
app = cors(app)
@app.route(api_prefix + '/eventSource')
async def sse() -> Tuple[AsyncGenerator[bytes, None], Dict[str, str]]:
async def send_events() -> AsyncGenerator[bytes, None]:
data = [
{
"id": 0,
"health": "CRITICAL"
},
{
"id": 1,
"health": 'WARNING'
},
{
"id": 2,
"health": 'OPERATIONAL'
},
{
"id": 3,
"health": 'ERROR'
}
]
while True:
await asyncio.sleep(2)
random.shuffle(data)
event = ServerSentEvent(data=json.dumps(data), event='NODE_STATUS')
yield event.encode()
return send_events(), {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
'Transfer-Encoding': 'chunked',
}
# Sink all undeclared routes so that vue can work with router properly
@app.route('/<path:path>')
def serve_mocks(path: str) -> Tuple[str, int]:
return fileresponse(path)
if __name__ == "__main__":
app.run(port=5000) | yukon-backend | /yukon_backend-0.1.2.tar.gz/yukon_backend-0.1.2/src/devserv/mock_responses.py | mock_responses.py |
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<https://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<https://www.gnu.org/licenses/why-not-lgpl.html>. | yulang | /yulang-0.0.2.tar.gz/yulang-0.0.2/LICENSE.md | LICENSE.md |
# Yu Language - yulang
A toy project for creating a simple programming language using Bison and Flex in C++.
This is the wrapper to Python.
## interactive console
```python
$ python
>>> import yulang
>>> yulang.console()
Yu Language 0.0.1 (unstable, Oct 23 2021, 21:31:19)
interactive:
y> var a = 3;
y> print(a);
print: 3
y> square a: {return a ^ 2;}
y> print(square(a : 10));
print: 100
y>
```
## parsing Python string
```python
$ python
>>> import yulang
>>> yulang.parse("var a = 10; print(a);")
print: 10
```
## examples
### arithmetic
source codes:
```bash
print(10 + 30);
print(10 - 30);
print(10. / 30.);
print(10 * 30);
print(30 % 9);
print((5. + 5.) / 3.);
```
output:
```python
$ python
>>> import yulang
>>> yulang.read("path/to/arithmetics.yu")
print: 40
print: -20
print: 0.333333
print: 300
print: 3
print: 3.33333
```
### string
source codes:
```bash
var a = "hello world";
print(a);
```
```python
$ python
>>> import yulang
>>> yulang.read("path/to/strings.yu")
print: hello world
```
### functions
source codes: (`//` is used for comments)
```bash
// create a square function
square p, l:
{
var L = p * l;
return L;
}
// using the square function in a volume function
volume t:
{
var V = square(p : 5, l : 6) * t;
return V;
}
// assigning volume return in myVol variable
var myVol = volume(4);
print(myVol);
```
output:
```python
$ python
>>> import yulang
>>> yulang.read("path/to/functions.yu")
print: 120
```
### variables
source codes: (`var` is used for generating variables)
```bash
// defining variables
var a, b, c, d;
a = 10; // integer
b = .5; // float
c = 3.; // float
d = 3.14; // float
print(a);
print(b);
print(c);
print(d);
```
output:
```python
$ python
>>> import yulang
>>> yulang.read("path/to/variables.yu")
print: 10
print: 0.5
print: 3
print: 3.14
```
### import files
source codes in `includes.yu`
```bash
multiply a, b:
{
return a * b;
}
```
source codes in `multi_files.yu`
```bash
// relative to current directory of terminal (or shell)
// you can import multiple relative paths and they do not create errors
import: "tests/stable/multi_files/includes.yu"
import: "stable/multi_files/includes.yu"
import: "multi_files/includes.yu"
var c = multiply(a : 10, b : 20);
print(c);
```
output:
```python
$ python
>>> import yulang
>>> yulang.read("path/to/multi_files.yu")
print: 200
``` | yulang | /yulang-0.0.2.tar.gz/yulang-0.0.2/README.md | README.md |
# Yuleak API
[](https://pypi.python.org/pypi/yuleak-api/)
[](https://yuleak-api.readthedocs.io/en/latest/?badge=latest)
[](https://pypi.python.org/pypi/yuleak-api)
The official Python library for the [Yuleak API](https://app.yuleak.com/apidoc).
## Features
- Convenient methods for making calls to the API.
- Automatic parsing of API responses into Python objects.
## Installation
``yuleak-api`` is available on [PYPI](https://pypi.python.org/pypi/yuleak-api)
```bash
pip install yuleak-api
```
## Documentation
You can use the API with default demo key for development purpose but to use it you'll need to [register to Yuleak](https://app.yuleak.com).
### [Authentication](https://app.yuleak.com/apidoc#authentication)
```python
from yuleak_api.client import YuleakClient
YuleakClient.set_apikey('my_secret_api_key')
```
### [Errors](https://app.yuleak.com/apidoc#errors)
In case of error, GET methods will return an empty list and POST/DELETE will return False.
The error will be displayed in ``yuleak-api`` logger.
Warnings will (such as a deprecated endpoint) will also be displayed in ``yuleak-api`` logger.
### [Pagination](https://app.yuleak.com/apidoc#pagination)
Pagination will be handle by the YuleakClient, you do not have to care about.
## Usage
This is not intended to provide complete documentation of the API.
For more details, [please refer to the official documentation](https://app.yuleak.com/apidoc).
For more information on the included models and abstractions, please read the code.
### [Credits](https://app.yuleak.com/apidoc#get-credits)
Please check your credits amount before making any search or renew action to avoid errors.
```python
print(YuleakClient.credits())
```
### [Dashboards](https://app.yuleak.com/apidoc#get-dashboards)
```python
dashboards = YuleakClient.dashboards()
for dashboard in dashboards:
# Display stats (similar to dashboard view in WebUI)
print(dashboard.stats())
# Display map (similar to map widget in WebUI)
print(dashboard.map())
# Display graph (similar to graph view in WebUI)
for node in dashboard.graph():
if node.type == 'asn':
print('AS: {0}'.format(node.label))
for child in node.neighbors:
if child.type == 'server':
print(child)
# Display timeline (similar to timeline widget in WebUI)
print(dashboard.timeline())
# Display details (similar to details view in WebUI)
for server in dashboard.details():
print(server.geo.country_name)
# Download screenshots
for domain in server.domains:
if domain.screenshot is not None:
domain.screenshot.download('/tmp/{0}.png'.format(domain.value))
dashboard.delete()
```
### [Resources](https://app.yuleak.com/apidoc#get-resources)
```python
resources = dashboard.resources()
for resource in resources:
print('{0} :: {1}'.format(resource.value, resource.status))
if resource.type == 'server':
resource.renew()
else:
resource.delete()
```
### [Bookmarks](https://app.yuleak.com/apidoc#post-bookmark)
```python
server = dashboard.details()[0]
assert not server.bookmark
server.add_bookmark()
assert server.bookmark
server.del_bookmark()
assert not server.bookmark
```
### [Filters](https://app.yuleak.com/apidoc#get-filters)
```python
dashboard.add_filter('domain', 'all')
for f in dashboard.filters():
print(f)
f.delete()
```
### [Search](https://app.yuleak.com/apidoc#post-search)
```python
YuleakClient.search('yuleak.com')
dashboard = YuleakClient.dashboards()[-1]
dashboard.search('yuleak.io')
# Mass search
new_servers = dashboard.list_new_servers()
if YuleakClient.credits() >= new_servers > 0:
dashboard.searchall()
```
## Changelog
### v1.5.0
* GET dashboard/{id}/statsdns added
* GET dashboard/{id}/dns added
* you can now add custom headers in YuleakClient requests
### v1.4.0
* preview screenshot can now be recovered using server.domain.screenshot.download()
### v1.3.4
* requests timeout can now be set with YuleakClient.REQUESTS_TIMEOUT
* requests retry on error can now be set with YuleakClient.REQUESTS_RETRY
### v1.3.3
* correct error on 'DELETE dashboard/{id}' endpoint
### v1.3.2
* correct error on 'searchall' endpoint
### v1.3.1
* correct error on pip install
### v1.3.0
* GET dashboard/{id}/renewall added
* POST dashboard/{id}/renewall added
### v1.2.0
* GET dashboard/{id}/searchall added
* POST dashboard/{id}/searchall added
### v1.1.0
* Change to match the Yuleak API path modifications
| yuleak-api | /yuleak-api-1.5.0.tar.gz/yuleak-api-1.5.0/README.md | README.md |
import requests
from .errors import YuleakAPIError
from .logs import logger
class YuleakClient(object):
""" Client for Yuleak API.
Class must be used without instance of it.
"""
BASE_URL = 'https://api.yuleak.com/'
APIKEY = 'demo'
REQUESTS_RETRY = 3
REQUESTS_TIMEOUT = 3
@classmethod
def set_apikey(cls, apikey):
"""Define the ApiKey to use (by defaut 'demo' is used).
Args:
apikey (str): ApiKey to use
"""
cls.APIKEY = apikey
@classmethod
def credits(cls):
"""Get current user remaining credits
See https://app.yuleak.com/apidoc#get-credits for endpoint details.
Returns:
available credits amount
"""
data = cls.get_request('credits')
if len(data) == 0:
return 0
return data[0].get('credits', 0)
@classmethod
def dashboards(cls):
"""Get the current user dashboards list
See https://app.yuleak.com/apidoc#get-dashboards for endpoint details.
Returns:
list of Dashboard items
"""
from .models.dashboard import Dashboard
results = []
for d in cls.get_request('dashboards'):
results.append(Dashboard(d.get('id'), d.get('name')))
return results
@classmethod
def search(cls, search):
"""Launch a new search (credits will be used). A new dashboard will be created.
See https://app.yuleak.com/apidoc#post-search for endpoint details.
Args:
search (str): Expression to search
Returns:
(bool) True if the search has been launched
"""
return cls.post_request('search', data={'value': search})
@classmethod
def get_request(cls, endpoint, headers=None):
"""Make a GET request to the API.
Args:
endpoint (str): Name of the endpoint to query.
headers (dict): Custom headers to add
Returns:
a list of items
"""
return cls._do_request('GET', endpoint, headers=headers)
@classmethod
def post_request(cls, endpoint, data=None, headers=None):
"""Make a POST request to the API.
Args:
endpoint (str): Name of the endpoint to query.
data (dict): Data to send
headers (dict): Custom headers to add
Returns:
(bool) True if the request performed well
"""
return cls._do_request('POST', endpoint, data=data, headers=headers)
@classmethod
def delete_request(cls, endpoint, params=None, headers=None):
"""Make a DELETE request to the API.
Args:
endpoint (str): Name of the endpoint to query.
params (dict): GET data to send
headers (dict): Custom headers to add
Returns:
(bool) True if the request performed well
"""
return cls._do_request('DELETE', endpoint, params=params, headers=headers)
@classmethod
def _do_request(cls, method, endpoint, retry=None, params=None, data=None, headers=None):
try:
if retry is None:
retry = YuleakClient.REQUESTS_RETRY
if params is None:
params = {}
if data is None:
data = {}
full_headers = {'X-Api-Key': cls.APIKEY}
if headers is not None:
full_headers.update(headers)
req = requests.request(method,
cls.BASE_URL + endpoint,
headers=full_headers,
data=data,
params=params,
timeout=YuleakClient.REQUESTS_TIMEOUT)
# Error handling
if req.status_code not in (200,201):
raise YuleakAPIError(req.json())
# Warnings handling
for warning in req.json().get('warnings', []):
logger.warning('{0} [{1}]'.format(warning.get('message'), warning.get('code')))
result = req.json().get('data', [])
# Pagination
pagination = req.json().get('pagination')
if pagination is not None:
if (pagination.get('total') / pagination.get('max')) > pagination.get('page'):
params['page'] = pagination.get('page') + 1
result += cls._do_request(method, endpoint, params=params, data=data, headers=headers)
# Return
if method == 'GET':
return result
else:
return True
except YuleakAPIError as e:
logger.error(e)
if method == 'GET':
return []
else:
return True
except requests.exceptions.RequestException as e:
if retry > 0:
return cls._do_request(method, endpoint, retry=retry-1, params=params, data=data)
logger.error(e)
if method == 'GET':
return []
else:
return True | yuleak-api | /yuleak-api-1.5.0.tar.gz/yuleak-api-1.5.0/yuleak_api/client.py | client.py |
from yuleak_api.client import YuleakClient
from .marker import Marker
from .node import Node
from .event import Event
from .server import Server
from .resource import Resource
from .filter import Filter
from .dnsentry import DNSEntry
class Dashboard:
"""Dashboard model"""
BASE_STATS = {'server': 0,
'domain': 0,
'service': 0,
'alert': 0,
'hacked': 0,
'vulnerability': 0,
'leakcreds': 0,
'leak': 0,
'filedisclosure': 0,
'weakservice': 0,
'proxy': 0,
'tornode': 0,
'onion': 0,
'blacklist': 0,
'phishingurl': 0,
'ssl': 0,
'paste': 0,
'warez': 0,
'repository': 0,
'social_networks': 0,
'ids': 0,
'domain_expiration': 0,
'cryptowallet': 0}
def __init__(self, id_, name):
self.id = id_
self.name = name
def stats(self):
"""Get the current dashboard statistics (similar to dahboard view in WebUI).
See https://app.yuleak.com/apidoc#get-dashboard for endpoint details.
Returns:
dict containing statistics
"""
stats = self.BASE_STATS.copy()
data = YuleakClient.get_request('dashboard/{0}'.format(self.id))
if len(data) == 0:
return stats
for k, v in data[0].items():
stats[k] = v
return stats
def statsdns(self):
"""Get the current dashboard DNS (Typosquatting) stats (similare to dns view in WebUI).
See https://app.yuleak.com/apidoc#get-statsdns for endpoint details.
Returns:
dict containing statistics
"""
data = YuleakClient.get_request('dashboard/{0}/statsdns'.format(self.id))
if len(data) == 0:
return []
return data[0]
def map(self):
"""Get the current dashboard map markers (similar to map widget in WebUI).
See https://app.yuleak.com/apidoc#get-map for endpoint details.
Returns:
list of Marker items
"""
results = []
for d in YuleakClient.get_request('dashboard/{0}/map'.format(self.id)):
results.append(Marker.from_json(d))
return results
def graph(self):
"""Get the current dashboard graph (similar to graph view in WebUI).
See https://app.yuleak.com/apidoc#get-graph for endpoint details.
Returns:
list of Node items
"""
results = {}
data = YuleakClient.get_request('dashboard/{0}/graph'.format(self.id))
if len(data) == 0:
return []
# Nodes
for n in data[0].get('nodes', []):
results[n.get('id')] = Node.from_json(n)
# Edges
for e in data[0].get('edges', []):
parent_node = results.get(e[0])
child_node = results.get(e[1])
if parent_node is None or child_node is None:
continue
parent_node.connect(child_node)
return list(results.values())
def timeline(self):
"""Get the current dashboard timeline (similar to timeline widget in WebUI).
See https://app.yuleak.com/apidoc#get-timeline for endpoint details.
Returns:
list of Event items
"""
results = []
for d in YuleakClient.get_request('dashboard/{0}/timeline'.format(self.id)):
results.append(Event.from_json(d))
return results
def details(self):
"""Get the current dashboard servers (similar to details view in WebUI).
See https://app.yuleak.com/apidoc#get-details for endpoint details.
Returns:
list of Server items
"""
results = []
for d in YuleakClient.get_request('dashboard/{0}/details'.format(self.id)):
results.append(Server.from_json(d, self))
return results
def dns(self):
"""Get the current dashboard typosquatting dns entries (similar to dns view in WebUI).
See https://app.yuleak.com/apidoc#get-dns for endpoint details.
Returns:
list of DNSEntry items
"""
results = []
for d in YuleakClient.get_request('dashboard/{0}/dns'.format(self.id)):
results.append(DNSEntry.from_json(d))
return results
def resources(self):
"""Get the current dashboard resources (similar to resources list widget in WebUI).
See https://app.yuleak.com/apidoc#get-resources for endpoint details.
Returns:
list of Resource items
"""
results = []
for d in YuleakClient.get_request('dashboard/{0}/resources'.format(self.id)):
results.append(Resource.from_json(d, self))
return results
def renew_cost(self):
"""Get the cost to renew all resources
See https://app.yuleak.com/apidoc#get-renewall for endpoint details.
Returns:
(int) Amount of credits
"""
data = YuleakClient.get_request('dashboard/{0}/renewall'.format(self.id))
if len(data) == 0:
return 0
return data[0].get('credits', 0)
def renew_all(self):
"""Re-launch all resources of the current dashboard.
See https://app.yuleak.com/apidoc#post-renewall for endpoint details.
Returns:
(bool) True if the search has been launched
"""
return YuleakClient.post_request('dashboard/{0}/renewall'.format(self.id))
def filters(self):
"""Get the current dashboard active filters (similar to filters list widget in WebUI).
See https://app.yuleak.com/apidoc#get-filters for endpoint details.
Returns:
list of Filter items
"""
results = []
for d in YuleakClient.get_request('dashboard/{0}/filters'.format(self.id)):
results.append(Filter.from_json(d, self))
return results
def add_filter(self, category, value, type_='required'):
"""Add a filter to the current dashboard.
See https://app.yuleak.com/apidoc#post-filters for endpoint details.
Args:
category (str): Filter category (server, domain, alert, date)
value (str): Filter value (all, blacklist, cloudflare ...)
type_ (str): Filter type: required (by default) or ignored
Returns:
True if the filter has been added
"""
return YuleakClient.post_request('dashboard/{0}/filters'.format(self.id),
data={'category': category,
'value': value,
'type': type_})
def search(self, search):
"""Launch a new search (credits will be used) in the current dashboard.
See https://app.yuleak.com/apidoc#post-search for endpoint details.
Args:
search (str): Expression to search
Returns:
(bool) True if the search has been launched
"""
return YuleakClient.post_request('dashboard/{0}/search'.format(self.id), data={'value': search})
def list_new_servers(self):
"""Get list of servers not in resources.
See https://app.yuleak.com/apidoc#get-searchall for endpoint details.
Returns:
list of ip (string)
"""
return YuleakClient.get_request('dashboard/{0}/searchall'.format(self.id))
def searchall(self):
"""Search all servers not listed in resources (credits will be used).
See https://app.yuleak.com/apidoc#post-searchall for endpoint details.
Returns:
(bool) True if the search has been launched
"""
return YuleakClient.post_request('dashboard/{0}/searchall'.format(self.id))
def delete(self):
"""Delete the current dashboard and all its data.
See https://app.yuleak.com/apidoc#post-delete for endpoint details.
Returns:
(bool) True if the dashboard has been deleted
"""
return YuleakClient.delete_request('dashboard/{0}'.format(self.id))
def __repr__(self):
return '<Dashboard {0}> {1}'.format(self.id, self.name) | yuleak-api | /yuleak-api-1.5.0.tar.gz/yuleak-api-1.5.0/yuleak_api/models/dashboard.py | dashboard.py |
import requests
import dateutil.parser
from yuleak_api.client import YuleakClient
from yuleak_api.logs import logger
# Python2 support
try:
FileNotFoundError
except NameError:
FileNotFoundError = IOError
class Geo:
def __init__(self):
self.country_code = None
self.country_name = None
self.city = None
self.latitude = None
self.longitude = None
class Whois:
def __init__(self):
self.range = None
self.name = None
self.organisation = None
self.asn = None
class Domain:
def __init__(self):
self.id = None
self.parent = None
self.value = None
self.in_src = False
self.tags = []
self.risk = 0
self.screenshot= None
@classmethod
def from_json(cls, domain_json, parent=None):
domain = cls()
domain.id = domain_json.get('id')
domain.parent = parent
domain.value = domain_json.get('value')
domain.in_src = domain_json.get('in_src', False)
domain.tags = domain_json.get('tags', [])
domain.risk = domain_json.get('risk', 0)
screenshot_url = domain_json.get('screenshot', None)
if screenshot_url is not None:
domain.screenshot = ScreenShot(screenshot_url)
return domain
def __repr__(self):
return '<Domain {0}> {1}'.format(self.id, self.value)
class ScreenShot:
def __init__(self, url):
self.url = url
def download(self, filepath, timeout=30):
try:
req = requests.get(self.url, timeout=timeout)
with open(filepath, 'wb') as fc:
fc.write(req.content)
return True
except requests.exceptions.RequestException as e:
logger.error(e)
return False
except FileNotFoundError as e:
logger.error(e)
return False
class Service:
def __init__(self):
self.id = None
self.parent = None
self.port = 0
self.date = None
self.first_seend = None
self.risk = 0
self.weak = False
self.protocol = 'tcp'
self.name = None
self.version = None
@classmethod
def from_json(cls, service_json, parent=None):
service = cls()
service.id = service_json.get('id')
service.parent = parent
service.port = service_json.get('port',0)
service.date = dateutil.parser.parse(service_json.get('date'))
service.first_seend = dateutil.parser.parse(service_json.get('first_seen'))
service.risk = service_json.get('risk', 0)
service.weak = service_json.get('weak', False)
service.protocol = service_json.get('protocol', 'tcp')
service.name = service_json.get('name')
service.version = service_json.get('version')
return service
def __repr__(self):
return '<Service {0}> {1} {2} - {3}'.format(self.id, self.protocol, self.port, self.name)
class Alert:
def __init__(self):
self.id = None
self.parent = None
self.date = None
self.first_seen = None
self.type = None
self.value = None
self.risk = 0
self.link = None
@classmethod
def from_json(cls, alert_json, parent=None):
alert = cls()
alert.id = alert_json.get('id')
alert.parent = parent
alert.date = dateutil.parser.parse(alert_json.get('date'))
alert.first_seen = dateutil.parser.parse(alert_json.get('first_seen'))
alert.type = alert_json.get('type')
alert.value = alert_json.get('value')
alert.risk = alert_json.get('risk', 0)
alert.link = alert_json.get('link')
return alert
def __repr__(self):
return '<Alert {0}> {1} - {2}'.format(self.id, self.type, self.value)
class Leak:
def __init__(self):
self.id = None
self.parent = None
self.date = None
self.first_seen = None
self.risk = 0
self.email = None
self.password = None
@classmethod
def from_json(cls, leak_json, parent=None):
leak = cls()
leak.id = leak_json.get('id')
leak.parent = parent
leak.date = dateutil.parser.parse(leak_json.get('date'))
leak.first_seen = dateutil.parser.parse(leak_json.get('first_seen'))
leak.risk = leak_json.get('risk', 0)
leak.email = leak_json.get('email')
leak.password = leak_json.get('password')
return leak
def __repr__(self):
return '<Leak {0}> {1}'.format(self.id, self.email)
class SocialNetwork:
def __init__(self):
self.id = None
self.parent = None
self.date = None
self.first_seen = None
self.platform = None
self.login = None
self.link = None
self.icon = None
self.risk = 0
@classmethod
def from_json(cls, sn_json, parent=None):
sn = cls()
sn.id = sn_json.get('id')
sn.parent = parent
sn.date = dateutil.parser.parse(sn_json.get('date'))
sn.first_seen = dateutil.parser.parse(sn_json.get('first_seen'))
sn.risk = sn_json.get('risk', 0)
sn.platform = sn_json.get('platform')
sn.login = sn_json.get('login')
sn.link = sn_json.get('link')
sn.icon = sn_json.get('icon')
return sn
def __repr__(self):
return '<SocialNetwork {0}> {1} - {2}'.format(self.id, self.platform, self.login)
class ID:
def __init__(self):
self.id = None
self.parent = None
self.date = None
self.first_seen = None
self.risk = 0
self.type = None
self.value = None
@classmethod
def from_json(cls, id_json, parent=None):
id_ = cls()
id_.id = id_json.get('id')
id_.parent = parent
id_.date = dateutil.parser.parse(id_json.get('date'))
id_.first_seen = dateutil.parser.parse(id_json.get('first_seen'))
id_.risk = id_json.get('risk', 0)
id_.type = id_json.get('type')
id_.value = id_json.get('value')
return id_
def __repr__(self):
return '<ID {0}> {1} - {2}'.format(self.id, self.type, self.value)
class Server:
"""Server model"""
def __init__(self, dashboard):
self.dashboard = dashboard
self.id = None
self.ip = None
self.cloudflare = False
self.in_src = False
self.bookmark = None
self.risk = None
self.logo = None
self.os = None
self.equipment = None
self.geo = Geo()
self.whois = Whois()
self.domains = []
self.services = []
self.alerts = []
self.leaks = []
self.social_networks = []
self.ids = []
def add_bookmark(self):
"""Add a bookmark to the current server.
See https://app.yuleak.com/apidoc#post-bookmark for endpoint details.
Returns:
(bool) True if the bookmark have been added
"""
if self.bookmark:
logger.warning('The server is already bookmarked.')
if YuleakClient.post_request('dashboard/{0}/server/{1}/bookmark'.format(self.dashboard.id, self.id)):
self.bookmark = True
return self.bookmark
def del_bookmark(self):
"""Delete the bookmark of the current server.
See https://app.yuleak.com/apidoc#delete-bookmark for endpoint details.
Returns:
(bool) True if the bookmark have been deleted
"""
if not self.bookmark:
logger.warning('The server is not bookmarked.')
if YuleakClient.delete_request('dashboard/{0}/server/{1}/bookmark'.format(self.dashboard.id, self.id)):
self.bookmark = False
return True
else:
return False
def _get_element_by_id(self, element_id):
if element_id == self.id:
return self
for d in self.domains:
if element_id == d.id:
return d
for s in self.services:
if element_id == s.id:
return s
return None
@classmethod
def from_json(cls, server_json, dashboard):
server = cls(dashboard)
server.id = server_json.get('id')
server.ip = server_json.get('ip')
server.cloudflare = server_json.get('cloudflare', False)
server.in_src = server_json.get('in_src', False)
server.bookmark = server_json.get('bookmark')
server.risk = server_json.get('risk', 0)
server.logo = server_json.get('logo')
server.os = server_json.get('os')
server.equipment = server_json.get('equipment')
server.geo.country_code = server_json.get('country_code')
server.geo.country_name = server_json.get('country_name')
server.geo.city = server_json.get('geo_city')
server.geo.latitude = server_json.get('geo_lat')
server.geo.longitude = server_json.get('geo_long')
server.whois.name = server_json.get('whois_name')
server.whois.organisation = server_json.get('whois_organisation')
server.whois.range = server_json.get('whois_range')
server.whois.asn = server_json.get('whois_as')
for d in server_json.get('domains'):
server.domains.append(Domain.from_json(d, server))
for s in server_json.get('services'):
server.services.append(Service.from_json(s, server))
for a in server_json.get('alerts'):
server.alerts.append(Alert.from_json(a, server._get_element_by_id(a.get('parent'))))
for l in server_json.get('leaks'):
server.leaks.append(Leak.from_json(l, server._get_element_by_id(l.get('parent'))))
for s in server_json.get('social_networks'):
server.social_networks.append(SocialNetwork.from_json(s, server._get_element_by_id(s.get('parent'))))
for i in server_json.get('ids'):
server.ids.append(SocialNetwork.from_json(i, server._get_element_by_id(i.get('parent'))))
return server
def __repr__(self):
return '<Server {0}> {1}'.format(self.id, self.ip) | yuleak-api | /yuleak-api-1.5.0.tar.gz/yuleak-api-1.5.0/yuleak_api/models/server.py | server.py |
import random
from faker import Faker
object_methods = [
"currency",
"simple_profile",
"pylist",
"pyset",
"pytuple",
"pystruct",
]
number_methods = [
"random_int",
"random_digit",
"pyfloat",
"pybool",
"pydecimal",
"pyint",
]
date_methods_onlydates = [
# 'unix_time',
"date_time",
"iso8601",
"date",
# 'time',
"date_time_this_century",
"date_time_this_decade",
"date_time_this_year",
"date_time_this_month",
"date_this_century",
"date_this_decade",
"date_this_year",
"date_this_month",
]
date_methods = [
"date_of_birth",
# 'century',
"year",
"month",
"month_name",
"day_of_week",
"day_of_month",
# 'timezone',
# 'am_pm',
"unix_time",
"date_time",
"iso8601",
"date",
"time",
"date_time_this_century",
"date_time_this_decade",
"date_time_this_year",
"date_time_this_month",
"date_this_century",
"date_this_decade",
"date_this_year",
"date_this_month",
]
list_methods = [
"paragraphs",
"words",
"sentences",
"texts",
]
string_methods = [
"name",
"password",
"phone_number",
"first_name",
"last_name",
"name_male",
"name_female",
# https://faker.readthedocs.io/en/master/providers/faker.providers.color.html
"color",
# https://faker.readthedocs.io/en/master/providers/faker.providers.lorem.html
"paragraph",
# 'paragraphs',
"word",
# 'words',
"sentence",
# 'sentences',
"text",
# 'texts',
"job",
# https://faker.readthedocs.io/en/master/providers/faker.providers.company.html
"company",
"address",
"currency_name",
"currency_code",
"email",
"safe_email",
"free_email",
"company_email",
"hostname",
"domain_name",
"domain_word",
"tld",
"ipv4",
"ipv6",
"mac_address",
"slug",
"image_url",
"pystr",
"ssn",
"md5",
"sha1",
"sha256",
"uuid4",
# https://faker.readthedocs.io/en/master/providers/faker.providers.user_agent.html
"chrome",
"firefox",
"opera",
"safari",
"internet_explorer",
"user_agent",
]
class Fakesey:
def __init__(self, *args, **kwargs):
self.config = {}
if "locale" in kwargs.keys():
pilih_locale = kwargs["locale"]
if "," in pilih_locale:
pilih_locale = pilih_locale.split(",")
print("pilih_locale:", pilih_locale)
self.faker = Faker(pilih_locale)
else:
self.faker = Faker()
def gen(self, methodname):
return getattr(self.faker, methodname)()
def generate(self, methodname, *args, **kwargs):
"""
kembali = getattr(faker_instance, f'generate') ('random_int', min, max)
kembali = getattr(faker_instance, f'generate') ('random_int', min)
"""
try:
cek_bcrypt = getattr(self.faker, methodname)(*args, **kwargs)
return cek_bcrypt
except Exception as e:
import traceback
print("gagal fakesey/generate:", e)
print(f"""
methodname: {methodname}
args: {args}
kwargs: {kwargs}
""")
print(traceback.format_exc())
input("Press any key... ")
def _string(self, number=None):
"""
hati2 jangan sampai
random.choice(string_methods)
mengembalikan list
"""
if number:
return self.faker.text(number)
return getattr(self.faker, random.choice(string_methods))()
def _text(self, number=500):
if number:
return self.faker.text(number)
return getattr(self.faker, random.choice(string_methods))()
def _date(self):
# return getattr(self.faker, random.choice(date_methods)) ()
return getattr(self.faker, random.choice(date_methods_onlydates))()
def _number(self):
return getattr(self.faker, random.choice(number_methods))()
def _object(self):
return getattr(self.faker, random.choice(object_methods))()
def _url(self):
return self.faker.url()
palsu = Fakesey()
# if 'locale' in configuration:
# print('gunakan locale:', configuration['locale'])
# palsu = Fakesey(locale=configuration['locale']) | yulibrary | /yulibrary-0.0.2-py3-none-any.whl/langutils/langs/data/fakesey/fakesey.py | fakesey.py |
import redis
from .dirutils import walk_fullpath
from .fileutils import file_content
from .printutils import indah4
from .utils import env_get, env_int
def connect(host=None, port=None, db=None, password=None, strict=False):
"""
utk aplikasi terpisah, env_load() terlebih dahulu, baru panggil fungsi ini
"""
if host is None:
host = env_get("ULIBPY_REDIS_HOST")
if port is None:
port = env_int("ULIBPY_REDIS_PORT")
if db is None:
db = env_int("ULIBPY_REDIS_DBNO")
# if not host:
# host = 'localhost'
# if not port:
# port = 6379
# if not db:
# db = 0
conn_params = {
"host": host,
"port": port,
"db": db,
}
# print('[app.redisutils] redis connect:', conn_params)
if password is not None:
conn_params.update(
{
"password": password,
}
)
if strict:
r = redis.StrictRedis(**conn_params)
else:
r = redis.Redis(**conn_params)
return r
def kasih(r, k, v):
r.set(k, v)
set = kasih
def ambil(r, k):
return r.get(k)
get = ambil
def hapus(r, keys):
return r.delete(keys)
def masuk(r, key, values, depan=True):
"""
https://pythontic.com/database/redis/list
lpush masuk di head ~ insert(0, ..)
rpush masuk di tail
"""
if not depan:
r.rpush(r, key, *values)
else:
r.lpush(r, key, *values)
def keluar(r, key, depan=True):
if not depan:
return r.rpop(key)
return r.lpop(key)
def didalam(r, key):
return r.llen(key)
def ubah(r, listkey, index, value):
return r.lset(listkey, index, value)
def terletak(r, key, index=0):
"""
lpush(kota, 'jakarta', 'bandung', 'surabaya')
lindex 0 1 2
"""
return r.lindex(index)
def ltrim(r, listkey, values):
return r.ltrim(listkey, *values)
def rtrim(r, listkey, values):
return r.rtrim(listkey, *values)
def ada(r, names):
return r.exists(names)
def search_keys(r, pattern):
return r.keys(pattern)
def search_values(r, pattern, start=0, limit=10000):
result = []
all = r.keys("*")
if limit and len(all) > limit:
all = all[start : start + limit]
for k in all:
v = r.get(k)
if pattern in v:
entry = (k, v)
result.append(entry)
return result
def load_file_content(r, basedir):
allfiles = walk_fullpath(basedir, skip_ends=[".pyc"])
for filepath in allfiles:
kasih(r, filepath, file_content(filepath))
indah4(f"{len(allfiles)} files loaded", warna="white")
# next = lpush/rpush/lrange, sadd/smembers, hmset/hgetall | yulibrary | /yulibrary-0.0.2-py3-none-any.whl/langutils/app/redisutils.py | redisutils.py |
import datetime
import errno
import functools
import os
import pathlib
import re
import tempfile
from stat import S_ISREG, ST_CTIME, ST_MODE, ST_MTIME
# import pyperclip
try:
HOMEDIR = os.environ["HOME"]
except:
HOMEDIR = os.environ["USERPROFILE"]
def chdir(folderpath):
if isdir(folderpath):
os.chdir(folderpath)
def absize(innerfunc):
@functools.wraps(innerfunc)
def wrapper(filepath):
return os.path.abspath(innerfunc(filepath))
return wrapper
def dirize(innerfunc):
@functools.wraps(innerfunc)
def wrapper(filepath):
return os.path.dirname(innerfunc(filepath))
return wrapper
def normalize(innerfunc):
@functools.wraps(innerfunc)
def wrapper(*args):
return os.path.normpath(innerfunc(*args))
return wrapper
def normy(path):
return os.path.normpath(path)
def normy_pathlist(pathlist):
return [normy(path) for path in pathlist]
def subtract_normy_pathlist(pathlist, subtractor):
return [item.removeprefix(subtractor) for item in normy_pathlist(pathlist)]
def normalizepath(path):
return os.path.normpath(path)
def absolutenormalizepath(path):
return os.path.abspath(os.path.normpath(path))
def absnormpath(path):
return absolutenormalizepath(path)
def ANpath(path):
return absolutenormalizepath(path)
def printer(valuefunc):
@functools.wraps(valuefunc)
def wrapper(*args, **kwargs):
nilai = valuefunc(*args, **kwargs)
print(nilai)
return wrapper
def parentize(innerfunc):
@functools.wraps(innerfunc)
def wrapper(filepath, times):
nilai = filepath
while times > 0:
nilai = os.path.join(nilai, os.path.pardir)
times -= 1
return nilai
return wrapper
@normalize
def abs_pardir(filepath):
"""seringnya
parent/
ourselves/
filepath
kita pengen parent biasanya, krn ourselves itu module
utk dapatkan ourselves, gunakan here(__file__)
"""
return os.path.join(abs_dir(filepath), os.path.pardir)
# pemahamanku:
# absolute kan, lalu ambil dir, later, norm kan
@normalize
@dirize
@absize
def here(filepath):
"""
kembalikan dirname utk filepath yg diberikan.
cocok utk terma __file__
"""
return filepath
# @printer
@normalize
@parentize
def ayah(filepath, times=1):
return filepath
def basename(filepath):
return os.path.basename(filepath)
def ayah_basename(filepath, times=1):
return basename(ayah(filepath, times))
def ayahbasename(filepath, times=1):
return ayah_basename(filepath, times)
def nonbasename(filepath):
"""
bukan basename = setara dg dirname
"""
return os.path.dirname(filepath)
def dirname(filepath):
return os.path.dirname(filepath)
def get_cwd():
return os.getcwd()
def getcwd():
return get_cwd()
def is_file_not_dir(filepath):
return os.path.basename(filepath) == filepath
def is_dir_not_file(filepath):
return not is_file_not_dir(filepath)
def abs_dir(filepath):
"""
biar aman, abs dulu baru dir.
kita sebut: DA.
jk dia dir maka kembalikan abs-nya
jk dia file maka kembalikan abs dir-nya
"""
if os.path.isdir(filepath):
return os.path.normpath(os.path.abspath(filepath))
return os.path.normpath(os.path.dirname(os.path.abspath(filepath)))
def joiner(*args):
# UPDATE: tambah normy
return normy(os.path.join(*args))
def joinhere(filehere, *args):
"""
joinhere(__file__, 'relative/to/path')
"""
return joiner(ayah(filehere, 1), *args)
def dirs(
dirpath,
find_files=False,
excludes=["__pycache__", ".git", "node_modules"],
skip_hidden=False,
):
curdir = dirpath
if os.path.isfile(dirpath):
curdir = here(dirpath)
if not os.path.isdir(curdir):
print("[dirs] Error not directory:", curdir)
# print('dirutils/dirs/curdir=', curdir, 'files/dirs:', 'files' if find_files else 'dirs')
all_files = os.listdir(curdir)
if skip_hidden:
all_files = [item for item in all_files if not item.startswith(".")]
if find_files:
return [item for item in all_files if os.path.isfile(joiner(curdir, item))]
return [
item
for item in all_files
if os.path.isdir(joiner(curdir, item)) and item not in excludes
]
# print('dirs:', hasil, 'listdir:', [basename(item) for item in os.listdir(curdir)], [item in excludes for item in os.listdir(curdir)], 'excludes:', excludes)
# return hasil
def only_files(dirpath, sort=True):
if sort:
return sorted(
[
item
for item in os.listdir(dirpath)
if os.path.isfile(joiner(dirpath, item))
]
)
else:
return [
item
for item in os.listdir(dirpath)
if os.path.isfile(joiner(dirpath, item))
]
def only_files_without_ext(dirpath, nosort=True):
"""
sudah ada: files_noext
"""
pass
def files(dirpath):
return dirs(dirpath, True)
def files_filter(dirpath, extension=[], only_filename=False, sort=False):
"""
only_filename utk strip dir paths dari filepath
"""
allfiles = dirs(dirpath, find_files=True)
if extension:
allfiles = [
item
for item in allfiles
if any([ext for ext in extension if item.endswith(ext)])
]
if only_filename:
allfiles = [basename(item) for item in allfiles]
if sort:
return sorted(allfiles)
return allfiles
def dirs_files(dirpath):
# all dirs + all files
return dirs(dirpath) + files(dirpath)
def sdirs(dirpath, find_files=False):
return sorted(dirs(dirpath, find_files))
def sfiles(dirpath):
return sorted(files(dirpath))
def files_noext(dirpath, sorted=True):
res = [pathlib.Path(item).stem for item in files(dirpath)]
if sorted:
return sorted(res)
return res
def files_noext_filter_by_ext(dirpath, ext=".mk", sorted=True):
"""
hanya file2 ber-ext mk
"""
res = [pathlib.Path(item).stem for item in files(dirpath) if item.endswith(ext)]
if sorted:
return sorted(res)
return res
def files_with_pattern(dirpath, pattern, sorted_=True):
"""
spt files_noext_filter_by_ext, tapi gak hanya ending, bisa awalan dan tengahan
"""
res = [item for item in sfiles(dirpath) if pattern.lower() in item.lower()]
if sorted_:
return sorted(res)
return res
def isdir(filepath, do_bongkar=True, strip=False):
if strip:
filepath = filepath.strip()
if do_bongkar:
# return os.path.isdir(bongkar(filepath))
pecah = bongkar(filepath)
false_or_path = os.path.isdir(pecah)
if false_or_path:
# kembalikan full path hasil bongkar
return pecah
return false_or_path
return os.path.isdir(filepath)
def isdir_simple(filepath):
return os.path.isdir(filepath)
def isfile(filepath, do_bongkar=True, strip=False):
if strip:
filepath = filepath.strip()
if do_bongkar:
# return os.path.isfile(bongkar(filepath))
pecah = bongkar(filepath)
bool_or_path = os.path.isfile(pecah)
if bool_or_path:
# kembalikan full path hasil bongkar
return pecah
return bool_or_path
return os.path.isfile(filepath)
def bongkar_if_not_dir(filepath, strip=False):
if strip:
filepath = filepath.strip()
if os.path.isdir(filepath):
return filepath
return bongkar(filepath)
def bongkar_if_not_file(filepath, strip=False):
"""
bongkar path:
ULIBPY_*
~
env vars
"""
if strip:
filepath = filepath.strip()
if os.path.isfile(filepath):
return filepath
return bongkar(filepath)
def isfolder_dir(filepath):
"""
dari filepath, cek apa foldernya dir?
"""
return isdir(ayah(filepath, 1))
def listdir(dirpath):
return os.listdir(dirpath)
def listdir_onlydirs(dirpath):
return list(filter(os.path.isdir, os.listdir(dirpath)))
def listdir_onlyfiles(dirpath):
return list(filter(lambda x: not os.path.isdir(x), os.listdir(dirpath)))
def does_not_exist(filepath):
return not os.path.exists(filepath)
def does_exist(filepath):
return os.path.exists(filepath)
def exists_in_dir(basedir, filename):
# filepath = joiner(basedir, filename)
# return os.path.exists(filepath)
return filename in os.listdir(basedir)
def not_exists_in_dir(basedir, filename):
# filepath = joiner(basedir, filename)
# return not os.path.exists(filepath)
return not filename in os.listdir(basedir)
def exists_in_dir_bypattern(basedir, filepattern, complete_path=True):
daftar = only_files(basedir)
daftar = [item for item in daftar if filepattern in item]
if complete_path:
daftar = [joiner(basedir, item) for item in daftar if filepattern in item]
if not daftar:
return None
if len(daftar) == 1:
return daftar[0]
else:
return daftar
def getcwd():
return os.getcwd()
def disini():
return os.getcwd()
def pemisah():
return os.sep
def pemisah_direktori():
return os.sep
def pemisah_unix_to_windows(filepath):
return filepath.replace("/", os.sep)
def pemisah_windows_to_unix(filepath):
return filepath.replace("\\", os.sep)
def path_split(filepath):
return filepath.split(os.sep)
def bongkar(filepath, normalize=True, debug=False):
"""
@returns: string bongkared filepath
intinya: expanduser dan expandvars
https://docs.python.org/3/library/os.path.html#os.path.expandvars
$name
${name}
%name% (win32)
~
UPDATE 14-6-2022, kita tambah bisa __PWD
"""
if "__PWD" in filepath:
if "ULIBPY__PWD__" in os.environ:
filepath = filepath.replace(
"__PWD", os.environ.get("ULIBPY__PWD__", os.getcwd())
)
else:
filepath = filepath.replace("__PWD", os.getcwd())
pertama = os.path.expanduser(filepath)
kedua = os.path.expandvars(pertama)
if filepath == kedua and filepath.startswith("ULIBPY"):
from .utils import env_replace_filepath
kedua = env_replace_filepath(kedua)
if normalize:
kedua = normy(kedua)
if debug:
from rich.pretty import pprint
pprint(os.environ)
print(
f"""[dirutils.bongkar]
input filepath = {filepath}
kedua skrg adlh = {kedua}
cwd adlh = {os.getcwd()}
"""
)
return kedua
def timestamp(time_format="%Y%m%d_%H%M%S"):
return datetime.datetime.now().strftime(time_format)
def new_filename_timestamp(rootname="delete"):
return rootname + "_" + timestamp()
def under_tempdir(newdir, persistent=False):
"""
hasilkan: /tmp/projectdir/
bukan utk filepath
utk direktori baru, krn makedirs
persistent_tempdir adlh ULIBPY_PERSISTENT_TMP, maksudnya krn ditentukan .env maka persistent
"""
dirpath = os.path.join(persistent_tempdir() if persistent else tempdir(), newdir)
if not os.path.exists(dirpath):
os.makedirs(dirpath)
return dirpath
save_dir_under_tempdir = under_tempdir
def file_under_tempdir(content=None, filename=None, ext=""):
"""
buat file dalam tempdir
"""
if not filename:
filename = "untitled_" + timestamp() + ext
filepath = os.path.join(tempdir(), filename)
if not os.path.isfile(filepath):
pathlib.Path(filepath).touch()
if content:
with open(filepath, "w") as fd:
fd.write(content)
return filepath
def save_file_under_tempdir(filename, persistent=False):
"""
hasilkan /tmp/filename
"""
return joiner(persistent_tempdir() if persistent else tempdir(), filename)
def tempdir(end_with_sepdir=False):
if end_with_sepdir:
return tempfile.gettempdir() + pemisah_direktori()
return tempfile.gettempdir()
def persistent_tempdir():
return os.environ.get("ULIBPY_PERSISTENT_TMP", "/tmp")
def absolutify(filepath):
return os.path.abspath(filepath)
def absolute(filepath):
return os.path.isabs(filepath)
def isabsolute(filepath):
return absolute(filepath)
def is_absolute(filepath):
return absolute(filepath)
def relative(filepath):
return not os.path.isabs(filepath)
def isrelative(filepath):
return relative(filepath)
def is_relative(filepath):
return relative(filepath)
def tree(dirpath, excludes='"*.pyc|d"'):
curdir = dirpath
if os.path.isfile(dirpath):
curdir = here(dirpath)
os.system(f"tree -I {excludes} {curdir}")
def walk_fullpath(basedir, skip_ends=None, filtered_ends=None):
"""
skip_ends='.pyc' skip file2 berekstensi pyc
filtered_ends='.py' hanya file2 berekstensi py
TODO:
skip && filter, sekarang masih mutually exclusive
perlu support skip dan filter list, bukan string
"""
# return [os.path.join(dp,f) for dp,dn,fn in os.walk(basedir) for f in fn]
if skip_ends:
return [
os.path.join(dp, f)
for dp, _, fn in os.walk(basedir)
for f in fn
if not f.endswith(skip_ends)
]
elif filtered_ends:
return [
os.path.join(dp, f)
for dp, _, fn in os.walk(basedir)
for f in fn
if f.endswith(filtered_ends)
]
return [os.path.join(dp, f) for dp, _, fn in os.walk(basedir) for f in fn]
# https://docs.python.org/2/library/stat.html
def sort_mtime_walk_fullpath(basedir):
"""
kembalikan list of tuples = time, fullpath
"""
# print('sort_mtime_walk_fullpath, basedir:', basedir)
walker = [
(os.stat(path)[ST_MTIME], path) for path in walk_fullpath(basedir, ".pyc")
]
return sorted(walker, reverse=True)
def latest_mtime_files(basedir, jumlah=1000):
return sort_mtime_walk_fullpath(basedir)[:jumlah]
def latest_files(basedir, jumlah=1000):
"""
item[0] adlh epoch time
item[1] adlh filepath
"""
if isinstance(jumlah, str):
jumlah = int(jumlah)
if not jumlah:
jumlah = 1000
return [item[1] for item in latest_mtime_files(basedir, jumlah)]
def _path_to_mtime(filepath):
return os.stat(filepath)[ST_MTIME]
def timeify_filelist(filelist):
"""
file list belum ada time...
"""
def format(filepath):
from .datetimeutils import format_epoch_longer
waktu = format_epoch_longer(_path_to_mtime(filepath))
# filepath = time_path[1]
pemisah = "\n" + " " * 25
return f"{filepath}{pemisah}{waktu}"
return [format(item) for item in filelist]
def filter_print_latest_files(code, basedir, cetak_waktu=False):
"""
kita nanti pengen bikin gini
|50 word1 word2
jadi dari hasil |50 kita filter yg mengandung word1 dan word2 saja.
"""
from .printutils import print_list_warna
# print(f'cetak latest files [code={code}], [dir={basedir}]')
if not code:
code = "10" # minimal bertujuan utk lihat latest files
m = re.match(r"^(\d+)\s*(.*)", code)
if m:
# print(f"ketemu m dg group: {m.groups()}")
jumlah = m.group(1)
jumlah = int(jumlah)
result = latest_files(basedir, jumlah)
# jk ada words utk ngefilter hasil ls by time
allfilters = m.group(2)
if allfilters:
"""
di sini tentu pake any
"""
splittedfilters = allfilters.split()
# print(f"splitted: {splittedfilters}")
result = [
item
for item in result
if any([word for word in splittedfilters if word in item])
]
# print(f"result: {result}")
if cetak_waktu:
# print(f"sblm timeify")
result_with_time = timeify_filelist(
result
) # latest_files_with_time(basedir, jumlah)
# print(f"sblm print list warna")
print_list_warna(result_with_time)
return result_with_time
else:
print_list_warna(result)
return result
def sort_filelist_tuple(filelist):
berwaktu = [(os.stat(path)[ST_MTIME], path) for path in filelist]
return sorted(berwaktu, reverse=True)
def find_patterns(code, basedir, config=None, kurangi_prefix=None, returning=False):
"""
./wmc.py:
find_patterns(code.replace(';', '', 1), self.konfigurasi.cwd(), self.konfigurasi.config)
config = app.configuration/Configuration/self.all_configs
config['find_dirs_also']
config['case_insensitive_pattern_search']
config['maximum_result']
config['last_result']
;pat -antipat pat2 -antipat2
patterns+antipatterns
ternyata ini hanya cari file
jk ada code yg berupa dir maka tdk masuk result
"""
from .printutils import print_list
if not config:
# from .configuration import Configuration
# config = Configuration.config
# TypeError: 'property' object is not subscriptable
# config = Configuration.all_configs
# AttributeError: type object 'Configuration' has no attribute 'all_configs'
find_dirs_also = False
case_insensitive_pattern_search = True
maximum_result = 100
else:
find_dirs_also = config["find_dirs_also"]
case_insensitive_pattern_search = config["case_insensitive_pattern_search"]
maximum_result = config["maximum_result"]
if not code:
print("cara gunakan: KODE <pat> <pat> -<antipat> -<antipat>...")
return
code = code.strip()
sort_mtime = False
if code.endswith("|"): # sort mtime
sort_mtime = True
code = code.rstrip("|")
# print(f"[dirutils] Searching [{code}] in {basedir}.")
code = code.split()
if not code:
print("cara gunakan: KODE <pat> <pat> -<antipat> -<antipat>...")
return
antipatterns = [
item.replace("-", "", 1) for item in code if re.match(r"^-[\w\d]+", item)
]
patterns = [item for item in code if not re.match(r"^-[\w\d]+", item)]
if antipatterns:
if case_insensitive_pattern_search:
result = walk_fullpath_antipattern_case_insensitive(
basedir,
patterns=patterns,
antipatterns=antipatterns,
find_dirs_also=find_dirs_also,
)
else:
result = walk_fullpath_antipattern(
basedir,
patterns=patterns,
antipatterns=antipatterns,
find_dirs_also=find_dirs_also,
)
else:
result = walk_fullpath_pattern(basedir, code, find_dirs_also=find_dirs_also)
tuple_result = []
if sort_mtime:
# print('masuk sort_mtime')
# from utils import sort_filelist_tuple
# kembalian sort_filelist berbentuk: filepath<delim>waktu
tuple_result = sort_filelist_tuple(
[item.rstrip("*") if item.endswith("*") else item for item in result]
)
# print(f'akhir sort_mtime => {tuple_result}')
original_result = [pair[1] for pair in tuple_result] # [filepath]
result = [pair[0] for pair in tuple_result] # [filepath<delim>waktu]
else:
original_result = result
if kurangi_prefix:
result = kurangi_list(result, kurangi_prefix)
# short result adlh jumlah dibatasi smp 100 utk di-print
short_result = (
result
if len(result) < maximum_result
else result[:maximum_result] + [f"...({len(result)-maximum_result} more)"]
)
print_list(short_result)
if config:
config["last_result"] = original_result # digunakan utk digit_process
if returning:
return original_result
def replace_inplace(cari, ganti):
os.system(f"perl -p -i -e 's/{cari}/{ganti}/g'")
def walk_filenames(basedir, skip_ends=None, filtered_ends=None):
"""
skip_ends='.pyc' skip file2 berekstensi pyc
filtered_ends='.py' hanya file2 berekstensi py
"""
# return [os.path.join(dp,f) for dp,dn,fn in os.walk(basedir) for f in fn]
# if skip_ends:
# return [os.path.join(dp, f) for dp, _, fn in os.walk(basedir) for f in fn if not f.endswith(skip_ends)]
# elif filtered_ends:
# return [os.path.join(dp, f) for dp, _, fn in os.walk(basedir) for f in fn if f.endswith(filtered_ends)]
return [fn for _, _, fn in os.walk(basedir) if fn] # ada [] utk sebuah dir
def walk_fullpath_relative(basedir, skip_ends=None, filtered_ends=None):
"""
spt walk_fullpath tetapi relative terhadap basedir
skip_ends='.pyc' skip file2 berekstensi pyc
filtered_ends='.py' hanya file2 berekstensi py
"""
# return [os.path.join(dp,f) for dp,dn,fn in os.walk(basedir) for f in fn]
if skip_ends:
return [
os.path.join(dp, f)
for dp, _, fn in os.walk(basedir)
for f in fn
if not f.endswith(skip_ends)
]
elif filtered_ends:
return [
os.path.join(dp, f)
for dp, _, fn in os.walk(basedir)
for f in fn
if f.endswith(filtered_ends)
]
return [
os.path.relpath(os.path.join(dp, f), basedir)
for dp, _, fn in os.walk(basedir)
for f in fn
]
def walk_fullpath(basedir, skip_ends=None, filtered_ends=None):
"""
skip_ends='.pyc' skip file2 berekstensi pyc
filtered_ends='.py' hanya file2 berekstensi py
"""
# return [os.path.join(dp,f) for dp,dn,fn in os.walk(basedir) for f in fn]
if skip_ends:
return [
os.path.join(dp, f)
for dp, _, fn in os.walk(basedir)
for f in fn
if not f.endswith(skip_ends)
]
elif filtered_ends:
return [
os.path.join(dp, f)
for dp, _, fn in os.walk(basedir)
for f in fn
if f.endswith(filtered_ends)
]
return [os.path.join(dp, f) for dp, _, fn in os.walk(basedir) for f in fn]
def walk_fullpath_skipdirs(basedir, skipdirs=[".git, __pycache__"]):
"""
khusus walk "files", tdk/bukan "dirs"
"""
denorm = [
os.path.join(dp, f)
for dp, _, fn in os.walk(basedir)
for f in fn
if not any([exc for exc in skipdirs if exc in dp])
]
return [os.path.normpath(item) for item in denorm]
def walk_fulldirs(basedir, skipdirs=[".git"], cover=True):
"""
khusus walk "dirs", tdk/bukan "files"
"""
denorm = [
os.path.join(root, d)
for root, dirs, files in os.walk(basedir)
for d in dirs
if not any([exc for exc in skipdirs if exc in dirs])
]
if cover:
# return [ "["+os.path.normpath(item)+"]" for item in denorm]
return [os.path.normpath(item) + "*" for item in denorm]
else:
return [os.path.normpath(item) for item in denorm]
def walk_full_paths_dirs(basedir, skipdirs=[".git", "__pycache__"]):
"""
ada
walk_fullpath_skipdirs
walk_full_paths_dirs
ini spt walk_fullpath_skipdirs tapi gak skip dirs!!!
"""
files = walk_fullpath_skipdirs(basedir, skipdirs)
dirs = walk_fulldirs(basedir, skipdirs)
return sorted(dirs + files)
def walk_get_files_dirs(basedir, skipdirs=[".git", "__pycache__"]):
return walk_full_paths_dirs(basedir, skipdirs)
def walk_fullpath_pattern(
basedir, patterns=None, pathsep="/", combine_pattern=True, find_dirs_also=False
):
"""
patterns bisa str atau list
gunakan pathsep utk nyatakan dir/file
jk combine_pattern = T dan pats = list maka hanya find yg filenya berisi semua patterns specified
otherwise, any pattern di patterns akan match
"""
# print(f"input {patterns}.")
if isinstance(patterns, str):
patterns = [patterns.replace(pathsep, os.sep)]
elif isinstance(patterns, list):
patterns = [item.replace(pathsep, os.sep) for item in patterns]
# print(f"processing {patterns}.")
walker = walk_get_files_dirs if find_dirs_also else walk_fullpath_skipdirs
allfiles = walker(basedir)
if combine_pattern:
selected = filter(lambda f: all([item in f for item in patterns]), allfiles)
else:
selected = filter(lambda f: any([item in f for item in patterns]), allfiles)
selected = list(selected)
# print(f"files {allfiles}.")
# print(f"returning {selected}.")
return selected
def walk_fullpath_antipattern(
basedir,
patterns=None,
antipatterns=None,
pathsep="/",
combine_pattern=True,
find_dirs_also=False,
):
"""
combine_pattern:
semua pattern yg dispecify hrs masuk ke kriteria pencarian
patterns utk yg diperkenankan ada
antipatterns utk semua yg tdk diperkenankan ada
"""
print(f"[dirutils] walk_fullpath_antipattern + {patterns} and - {antipatterns}.")
if isinstance(patterns, str):
patterns = [patterns.replace(pathsep, os.sep)]
elif isinstance(patterns, list):
patterns = [item.replace(pathsep, os.sep) for item in patterns]
# print(f"processing {patterns}.")
# ini fungsi utama walker: walker find files saja atau dirs juga
walker = walk_get_files_dirs if find_dirs_also else walk_fullpath_skipdirs
allfiles = walker(basedir)
# allfiles = walk_fullpath_skipdirs(basedir) # ini gak kita pake dong
if combine_pattern:
"""
ini bukannya all(patterns + antipatterns) gampangnya?
err...antipatterns hrs dibuang, patterns hrs diambil
"""
# ambil file2 dari daftar yg pattern ada dlm nama file tsb
pre_selected = filter(
lambda fullpath: all([item in fullpath for item in patterns]), allfiles
)
# print('all+preselected, allfiles#', len(allfiles), 'preselected#', len(pre_selected), 'patterns:', patterns)
# dari daftar tersebut, hilangkan yg ada antipatternsnya
selected = filter(
lambda fullpath: all([item not in fullpath for item in antipatterns]),
pre_selected,
)
# print('all+selected, allfiles#', len(allfiles), 'selected#', len(selected), 'patterns:', patterns)
else:
"""
bukannya bisa any(patterns+antipatterns)
err...antipatterns hrs dibuang, patterns hrs diambil
"""
pre_selected = filter(lambda f: any([item in f for item in patterns]), allfiles)
selected = filter(
lambda fullpath: any([item not in fullpath for item in antipatterns]),
pre_selected,
)
selected = list(selected)
# print(f"files {allfiles}.")
# print(f"returning {selected}.")
return selected
def walk_fullpath_pattern_case_sensitive(
basedir,
patterns=None,
antipatterns=None,
pathsep="/",
combine_pattern=True,
find_dirs_also=False,
):
"""
patterns bisa str atau list
gunakan pathsep utk nyatakan dir/file
jk combine_pattern = T dan pats = list maka hanya find yg filenya berisi semua patterns specified
otherwise, any pattern di patterns akan match
"""
# print(f"input {patterns}.")
if isinstance(patterns, str):
patterns = [patterns.replace(pathsep, os.sep)]
elif isinstance(patterns, list):
patterns = [item.replace(pathsep, os.sep) for item in patterns]
# print(f"processing {patterns}.")
walker = walk_get_files_dirs if find_dirs_also else walk_fullpath_skipdirs
allfiles = walker(basedir)
if combine_pattern:
pre_selected = filter(
lambda arsip: all([item in arsip for item in patterns]), allfiles
)
selected = filter(
lambda arsip: all([item not in arsip for item in antipatterns]),
pre_selected,
)
else:
pre_selected = filter(
lambda arsip: any([item in arsip for item in patterns]), allfiles
)
selected = filter(
lambda arsip: any([item not in arsip for item in antipatterns]),
pre_selected,
)
selected = list(selected)
# print(f"files {allfiles}.")
# print(f"returning {selected}.")
return selected
def walk_fullpath_antipattern_case_insensitive(
basedir,
patterns=None,
antipatterns=None,
pathsep="/",
combine_pattern=True,
find_dirs_also=False,
):
"""
patterns utk yg diperkenankan ada
antipatterns utk semua yg tdk diperkenankan ada
find_dirs_also
item/
item.exe
akan diperoleh dari search "item"
"""
print(
f"[dirutils] walk_fullpath_antipattern_case_insensitive + {patterns} and - {antipatterns}, all over any: {combine_pattern}."
)
if isinstance(patterns, str):
patterns = [patterns.replace(pathsep, os.sep)]
elif isinstance(patterns, list):
patterns = [item.replace(pathsep, os.sep) for item in patterns]
# print(f"processing {patterns}.")
walker = (
walk_get_files_dirs if find_dirs_also else walk_fullpath_skipdirs
) # ini fungsi utama walker
allfiles = walker(basedir)
# allfiles = walk_fullpath_skipdirs(basedir) # ini gak kita pake dong
if combine_pattern:
# semua pattern hrs dipenuhi
pre_selected = filter(
lambda arsip: all([item.lower() in arsip.lower() for item in patterns]),
allfiles,
)
selected = filter(
lambda arsip: all(
[item.lower() not in arsip.lower() for item in antipatterns]
),
pre_selected,
)
else:
# any pattern dipenuhi saja cukup
pre_selected = filter(
lambda arsip: any([item.lower() in arsip.lower() for item in patterns]),
allfiles,
)
selected = filter(
lambda arsip: any(
[item.lower() not in arsip.lower() for item in antipatterns]
),
pre_selected,
)
selected = list(selected)
# print(f"files {allfiles}.")
# print(f"returning {selected}.")
return selected
def make_folder(folder, basedir=".", silent=False):
"""
silent = no exception if exist
"""
folderpath = os.path.join(basedir, folder)
if not os.path.isdir(folderpath):
if silent:
create_dir_silent(folderpath)
else:
create_dir(folderpath)
def create_dir_silent(folder):
"""
https://stackoverflow.com/questions/273192/how-can-i-safely-create-a-nested-directory-in-python
mkpath creates the nested directory, and does nothing if the directory already exists.
This works in both Python 2 and 3.
"""
import distutils.dir_util
distutils.dir_util.mkpath(folder)
def create_dir_with_parent(folder):
"""
pathlib.Path.mkdir as used above recursively creates the directory
and does not raise an exception if the directory already exists.
If you don't need or want the parents to be created,
skip the parents argument.
"""
pathlib.Path(folder).mkdir(parents=True, exist_ok=True)
def create_dir(folder):
try:
os.makedirs(folder)
except OSError as e:
if e.errno != errno.EEXIST and os.path.isdir(folder):
raise
def create_if_empty_dir(dirpath):
if not os.path.exists(dirpath):
create_dir(dirpath)
def kurangi(banyak, dikit):
"""
pada dasarnya ini relpath yg terima (lokasi-input, starting-dir)
/a/b/c/d, /a/b
hasilkan: c/d
"""
# ini gak elegant
# return banyak.replace(dikit, '', 1).strip()
return os.path.relpath(banyak, dikit)
def kurangi_list(the_list, dikit):
"""
utk list of filepaths, kita kurangi
"""
return [kurangi(item, dikit) for item in the_list]
def first_part_of_relative_dir(filepath, starting_dir):
"""
filepath: /a/b/c/d
starting_dir: /a
kita pengen terima "basename": b
satu = '/a/b/c/d/'
dua = '/a'
Path.relative_to(Path(satu), Path(dua))
=> WindowsPath('b/c/d')
Path.relative_to(Path(satu), Path(dua)).parts[0]
=> b
"""
from pathlib import Path
b = Path.relative_to(Path(filepath), Path(starting_dir)).parts[0]
return b
def get_latest_file_in_dir(basedir=HOMEDIR):
"""
kita pengen: screenshot -> file -> base64
import (image magick)
gnome-screenshot -a -f namafile -> subprocess sampai errno success
"""
import glob
list_of_files = glob.glob(f"{basedir}/*")
latest_file = max(list_of_files, key=os.path.getctime)
# print(latest_file)
return latest_file
def within_same_folder(filepath, filename):
return joiner(here(filepath), filename)
def first_part(dirpath):
"""
/a/b/c/d -> /a
"""
bagian = pathlib.Path(dirpath).parts
if len(bagian) >= 2:
return bagian[0] + bagian[1]
return ""
def is_windows_drive(filepath):
"""
c:\...
"""
return re.match(r"^[c-z]:", filepath.lower())
def is_wsl_drive(filepath):
return not is_windows_drive(filepath) | yulibrary | /yulibrary-0.0.2-py3-none-any.whl/langutils/app/dirutils.py | dirutils.py |
from anytree import AnyNode, AsciiStyle, Node, PreOrderIter, RenderTree
from anytree.importer import DictImporter, JsonImporter
from anytree.search import find, findall
from .stringutils import tabify_contentlist
from .usutils import tab_tab
def get_root(node):
if not hasattr(node, "parent") or not node.parent:
return node
return get_root(node.parent)
def get_parent(node, match_condition=lambda x: x.counter == 0, pass_condition=None):
"""
condition misalnya lambda x: hasattr(x, 'type') and x.type=='RootNode'
bisa juga tentunya: lambda x: x.counter == -1 dan x.name == 'info' utk peroleh %__TEMPLATE_key=value
cara kerja:
jk sebuah node penuhi match_condition maka return node
jk node gak punya parent maka return node
spt nya pass_condition belum perlu:
jk sebuah node penuhi pass_condition maka kita lewati node itu dan pass kan parentnya utk diproses
"""
if match_condition(node):
return node
# ini jadi mengembalikan semua node dong, krn semua gak punya parent???
# or anynode dah otomatis assign parent? spt nya sih begitu, lihat kondisi utk rekursifnya di sini
if not hasattr(node, "parent") or not node.parent:
return node
return get_parent(node.parent, match_condition, pass_condition)
def get_all_parent_variables(node, result={}):
"""
pemilik vars seringnya tidak satu track dg yg butuh...jadi gak ketemu
"""
while node.counter != 0:
# print('[get_all_parent_variables] process node:', node.counter)
if hasattr(node, "variables"):
result.update(node.variables)
return get_all_parent_variables(node.parent, result)
# skrg sudah pada root, proses dulu semua direct children dari root
# yes, ini berhasil
for anak_kandung in get_direct_children(node):
if hasattr(anak_kandung, "variables"):
result.update(anak_kandung.variables)
return result
def item_workdir_has_input(item):
"""
/input/
/input <- kita cek ini dulu
"""
from .utils import env_get
return "/" + env_get("ULIBPY_FMUS_INPUT_KEYWORD") in item.workdir
def replace_if_input_and_parent_is_dir(item):
"""
AnyNode(counter=7,
level=3,
name='proshop4',
old_name='__TEMPLATE_APPNAME',
operations=['create_dir'],
original='__TEMPLATE_APPNAME,d(/mk)',
type='dir',
workdir='/mnt/c/work/oprek/fshelp/fslang/utils/proshop4')
item.workdir = /mnt/c/work/oprek/fshelp/fslang/utils/input/__init__.py
"""
from .dirutils import ayah
from .printutils import indah4
from .utils import env_get
has_dir_parent = False
kembali = item.workdir
if hasattr(item, "parent") and item.parent:
if item.parent.type == "dir" and ayah(item.parent.workdir, 1) == ayah(
item.workdir, 2
):
# extra checking
if item.parent.name != env_get("ULIBPY_FMUS_INPUT_KEYWORD"):
has_dir_parent = True
# indah4(f"""
# has_dir_parent = {has_dir_parent}
# item.parent.type = {item.parent.type}
# item.parent.name = {item.parent.name}
# ayah(item.parent.workdir,1) = {ayah(item.parent.workdir,1)}
# ayah(item.workdir,2) = {ayah(item.workdir,2)}
# """, warna='yellow', layar='green')
if (
ayah(item.workdir, 1).endswith(env_get("ULIBPY_FMUS_INPUT_KEYWORD"))
and has_dir_parent
):
kembali = kembali.replace(
env_get("ULIBPY_FMUS_INPUT_KEYWORD"), item.parent.name
)
# indah4(f"[replace_if_input_and_parent_is_dir] ganti nama dari {env_get('ULIBPY_FMUS_INPUT_KEYWORD')} ke {item.parent.name}", warna='white', layar='blue')
return kembali
def replace_workdir__INPUT__with_value_from_parent(item):
"""
item:
workdir='C:\\work\\tmp\\__INPUT__\\package.json'
parent:
workdir='C:\\work\\tmp\\emih'
^^^^^^^^^^^^^ ^^^^^^^^^^
sama beda
"""
from .dirutils import ayah, basename
from .utils import env_get
parent = item.parent
input_keyword = env_get("ULIBPY_FMUS_INPUT_KEYWORD")
skip_input_item = ayah(item.workdir, 2) # C:\\work\\tmp
skip_input_parent = ayah(parent.workdir, 1) # C:\\work\\tmp
pengganti_dari_parent = basename(parent.workdir) # emih
current_item_parent = ayah(item.workdir, 1) # C:\\work\\tmp\\__INPUT__
yang_mau_diganti_dari_item = basename(current_item_parent) # __INPUT__
workdir_baru = item.workdir # jika replace berikut gagal
if (skip_input_item == skip_input_parent) and (
yang_mau_diganti_dari_item == input_keyword
):
workdir_baru = item.workdir.replace(input_keyword, pengganti_dari_parent)
return workdir_baru
def get_all_tree_children(akar):
children = []
def get_children(root):
if len(root.children) > 0:
for anak in root.children:
children.append(anak)
get_children(anak)
get_children(akar)
return children
def get_last_absolute_children(akar):
children = get_all_tree_children(akar)
if len(children) > 0:
return children[-1]
return None
def get_direct_children(akar):
return [node for node in akar.children]
def get_last_direct_children(akar):
children = [node for node in akar.children]
if len(children) > 0:
return children[-1]
return None
def set_attr_direct_children(akar, attribute, value):
for node in akar.children:
setattr(node, attribute, value)
def set_attr_direct_children_cond(akar, attribute, value_yes, value_no, condition):
for node in akar.children:
setattr(node, attribute, value_yes if condition else value_no)
def print_ready_children(item):
children = get_all_tree_children(item)
print_ready = [node.level * " " + node.original for node in children]
print_ready = "\n".join(print_ready)
return print_ready
def get_siblings_all(akar, include_me=True):
if include_me:
return [node for node in akar.parent.children]
else:
return [node for node in akar.parent.children if node != akar]
def get_siblings_before(akar):
"""
a masukkan
b masukkan
c masukkan
d <- aku
e
"""
# aku = -1
# for index, node in enumerate(akar.parent.children):
# if node == akar:
# aku = index
# break
# ambil index aku
aku = [index for index, item in enumerate(akar.parent.children) if item == akar][0]
return akar.parent.children[:aku]
def get_siblings_after(akar):
aku = [index for index, item in enumerate(akar.parent.children) if item == akar][0]
return akar.parent.children[aku + 1 :]
def get_previous_sibling(node):
if not hasattr(node, "parent"):
return None
aku = [index for index, item in enumerate(node.parent.children) if item == node][0]
if aku == 0:
return None
return node.parent.children[aku - 1]
def get_tables(root):
node_tables = (
lambda node: hasattr(node, "name")
and node.name == "table"
and node.type == "table"
)
tables = findall(root, node_tables)
return tables
## berhub dg lark
def data(tree):
return tree.data
def anak(tree):
return tree.children
# ini gak jalan, lark Tree gak punya parent
import lark
def tipedata(tree):
return type(tree)
def ispohon(tree):
return isinstance(tree, lark.tree.Tree)
def istoken(tree):
return isinstance(tree, lark.lexer.Token)
def isnode(tree):
return isinstance(tree, AnyNode)
def bapak(tree):
return tree.parent
def sebanyak(tree, n=0):
return len(tree.children) == n
def jumlahanak(tree):
return len(tree.children)
def beranak(tree, n=0):
"bahaya: ini hanya utk minta boolean, bukan integer sbg kembalian"
return len(tree.children) > n
def child(tree, n=0):
if beranak(tree):
if not n: # minta anak pertama
return child1(tree)
# now n > 0
if jumlahanak(tree) >= n:
return tree.children[n - 1]
# minta anak ke-2 tapi cuma ada 1 anak
return None
return None
def child1(tree):
"""
tree = mytree
| _ anak1 = anak2 = anak3 -> mytree
maka di sini mytree punya child1, child2, dan child3
"""
return tree.children[0]
def child2(tree):
return tree.children[1]
def child3(tree):
return tree.children[2]
def child4(tree):
return tree.children[3]
def child5(tree):
return tree.children[4]
def chdata(tree, n=0):
"""
kembalikan child data ke-n-1
chdata(tree, 2) minta child data utk child no 2 (children[1])
pastikan jumlah anak dari tree >= 2
"""
if beranak(tree):
if not n: # jk chdata(sometree) maka minta child pertama spt chdata0
return chdata1(tree)
if jumlahanak(tree) >= n:
"""
chdata(tree, 4): minta data utk child ke 4
"""
return data(tree.children[n - 1])
# jk minta data ke-2 dan cuma ada 1 anak
return None
return None
def chdata1(tree):
if beranak(tree):
return data(child1(tree))
return None
def chdata0(tree):
"""
last data
"""
if beranak(tree):
total = beranak(tree)
return data(tree.children[total - 1])
return None
def token(tree, token_index=0, jenis="str"):
"""
tree value
"""
if jenis == "int":
return int(tree.children[token_index])
if jenis == "float":
return float(tree.children[token_index])
if jenis == "bool":
return bool(tree.children[token_index])
return str(tree.children[token_index])
def chtoken(tree, n=0):
"""
hanya utk direct child!
tree
child1 token
child2 token
child3 token
bukan utk:
tree
child1
child1 <- ini bukan child2 !!
"""
if beranak(tree):
if not n:
# berarti minta ch token anak pertama
# chtoken(tree, n=0) ini setara dg chtoken(tree, n=1)
return token(child1(tree))
# skrg n > 0
if jumlahanak(tree) >= n:
# jk minta n = 2 maka jumlah anak hrs >= 2
return token(child(tree, n))
return None
return None
def tables_from_rootnode(RootNode):
""" """
node_tables = (
lambda node: hasattr(node, "model")
and node.name == "table"
and node.type == "table"
)
tables = findall(RootNode, node_tables)
return tables
def get_first_column(TableNode, get_label=True):
"""
app.transpiler.frontend.fslang.django.__init__
"""
if get_label:
return TableNode.children[0].label
return TableNode.children[0]
column_assignment_doc = """
ini adlh utk sebuah table/tablename/document
paramlist
first, second, third
paramlist_value
first=first, second=second, third=third
pydict
"first": first, "second": second, "third": third
pydict_first
only first: "first": first
pydict_rest
tanpa first: "second": second, "third": third
paramlist_type (minta tabify dan delimiter)
PARAMLIST_COMMA
delimiter ", "
first: string, second: string, third: int
PARAMLIST_NEWLINE0
delimiter \n
first: string
second: string
third: int
PARAMLIST_NEWLINE1
tabify 1 + delimiter \n
first: string
second: string
third: int
"""
ASSIGNMENT_FIRSTCOLUMN = "__TEMPLATE_ASSIGNMENT_FIRSTCOLUMN"
ASSIGNMENT_PARAMLIST_SIMPLE = "__TEMPLATE_ASSIGNMENT_PARAMLIST_SIMPLE"
ASSIGNMENT_PARAMLIST_VALUE = "__TEMPLATE_ASSIGNMENT_PARAMLIST_VALUE"
ASSIGNMENT_PYDICT_ALL = "__TEMPLATE_ASSIGNMENT_PYDICT_ALL"
ASSIGNMENT_PYDICT_FIRST = "__TEMPLATE_ASSIGNMENT_PYDICT_FIRST"
ASSIGNMENT_PYDICT_REST = "__TEMPLATE_ASSIGNMENT_PYDICT_REST"
ASSIGNMENT_PARAMLIST_COMMA = "__TEMPLATE_ASSIGNMENT_PARAMLIST_COMMA"
ASSIGNMENT_PARAMLIST_NEWLINE0 = "__TEMPLATE_ASSIGNMENT_PARAMLIST_NEWLINE0"
ASSIGNMENT_PARAMLIST_NEWLINE1 = "__TEMPLATE_ASSIGNMENT_PARAMLIST_NEWLINE1"
ASSIGNMENT_PARAMLIST_PREFIX = "__TEMPLATE_ASSIGNMENT_PARAMLIST"
def assignment_paramlist_type(
TableNode, pemetaan=None, delimiter=", ", num_tab=0, tabber=tab_tab
):
from app.libpohon.handlers import type_mapper_by_provider
result = []
for col in TableNode.children:
if pemetaan:
jenis = type_mapper_by_provider[pemetaan][col.type]
else:
jenis = col.type
entry = f"{col.label}: {jenis}"
result.append(entry)
result = tabify_contentlist(result, self_tab=tabber(num_tab), delim=delimiter)
return result
def assignment_firstcolumn(TableNode):
return TableNode.children[0].label
def assignment_paramlist(TableNode):
""" """
paramlist = []
for i, col in enumerate(TableNode.children):
paramlist.append(col.label)
return ", ".join(paramlist)
def assignment_paramlist_value(TableNode):
""" """
paramlist = []
for i, col in enumerate(TableNode.children):
nilai = f"{col.label}={col.label}"
paramlist.append(nilai)
return ", ".join(paramlist)
def assignment_pydict_all(TableNode):
""" """
paramlist = []
for i, col in enumerate(TableNode.children):
nilai = f'"{col.label}": {col.label}'
paramlist.append(nilai)
return ", ".join(paramlist)
def assignment_pydict_first(TableNode):
""" """
col_label = TableNode.children[0].label
return f'"{col_label}": {col_label}'
def assignment_pydict_rest(TableNode):
""" """
# jk hanya 1 maka gak usah...
# if len(TableNode.children)==1:
# col_label = TableNode.children[0].label
# return f'"{col_label}": {col_label}'
paramlist = [
f'"{col.label}": {col.label}'
for i, col in enumerate(TableNode.children)
if i > 0
]
return ", ".join(paramlist) | yulibrary | /yulibrary-0.0.2-py3-none-any.whl/langutils/app/treeutils.py | treeutils.py |
import os, re
from .utils import (
env_exist, env_get, env_int, perintahsp_outerr_as_shell, platform
)
from .dirutils import normy
GREP_COMMAND = 'grep'
if platform() in ['win32', 'windows']:
# GREP_COMMAND = 'wsl grep'
GREP_COMMAND = 'C:/work/usr/local/wbin/grep.exe'
if env_exist('ULIBPY_GREP_UTIL_LOCATION'):
GREP_COMMAND = env_get('ULIBPY_GREP_UTIL_LOCATION')
else:
print(f"set ULIBPY_GREP_UTIL_LOCATION to location of grep.exe or use {GREP_COMMAND} as current setting")
def curdir_grep(basedir, pattern, case_sensitive=False, context=0, before=0, after=0, capture=False, no_color=False):
context_opts = ''
if before:
context_opts = f"-B {before}"
if after:
context_opts += f"{' ' if context_opts else ''}-A {after}"
if context:
context_opts = f"-C {context}"
# print('A:', after, 'B:', before, 'C:', context, '=>', context_opts)
basedir = normy(basedir)
# basedir = basedir.replace('\\', '/')
print(f'''[greputils]
basedir = {basedir}
''')
skip_binary = "-I"
color_opt = '' if no_color else ' --color=always -s'
main_opts = f"-n {skip_binary}{color_opt}" # -s silent jk grep dir
if ' ' in basedir:
basedir = f'"{basedir}"'
if env_int('ULIBPY_WMC_CASESENSITIVE_GREP'):
case_sensitive = True
all_opts = f'{GREP_COMMAND} {"" if case_sensitive else "-i"} {main_opts} {context_opts} -e "{pattern}" {basedir}/*'
# os.system('pwd')
# print('curdir {GREP_COMMAND}', pattern, '->', all_opts)
if capture:
return perintahsp_outerr_as_shell(all_opts)
else:
os.system(all_opts)
return None
def system_grep(basedir,
pattern,
case_sensitive=False,
context=0,
before=0,
after=0,
capture=False,
no_color=False):
context_opts = ''
if before:
context_opts = f"-B {before}"
if after:
context_opts += f"{' ' if context_opts else ''}-A {after}"
if context:
context_opts = f"-C {context}"
# print('A:', after, 'B:', before, 'C:', context, '=>', context_opts)
basedir = normy(basedir)
# basedir = basedir.replace('\\', '/')
print(f'''[greputils]
basedir = {basedir}
''')
skip_binary = "-I"
color_opt = '' if no_color else ' --color=always'
if platform() in ['win32', 'windows', 'desktop']:
color_opt = ''
main_opts = f"-n {skip_binary}{color_opt} -r"
if ' ' in basedir:
basedir = f'"{basedir}"'
if env_int('ULIBPY_WMC_CASESENSITIVE_GREP'):
case_sensitive = True
all_opts = f'{GREP_COMMAND} {"" if case_sensitive else "-i"} {main_opts} {context_opts} -e "{pattern}" {basedir}'
# print(f'{GREP_COMMAND} system:', all_opts)
if capture:
return perintahsp_outerr_as_shell(all_opts)
else:
os.system(all_opts)
return None
def system_grep_limitchars(basedir,
pattern,
limit=10,
case_sensitive=False,
capture=False,
no_color=False):
"""
N=10; grep -roP ".{0,$N}\Wactor.{0,$N}" .
N=limit; grep -roP ".{0,$N}" +pattern+ ".{0,$N}" basedir
-P adlh perl style dg .{0,$bilangan}
di sini kita gunakan
grep -i -I --color=always -ro -P ".{0,n}pattern.{0,n}" basedir
"""
skip_binary = "-I"
color_opt = '' if no_color else ' --color=always'
if platform() in ['win32', 'windows', 'desktop']:
color_opt = ''
main_opts = f"{skip_binary}{color_opt} -ro"
# main_opts = f"{skip_binary} --color=always -ro"
if ' ' in basedir:
basedir = f'"{basedir}"'
if env_int('ULIBPY_WMC_CASESENSITIVE_GREP'):
case_sensitive = True
all_opts = f'{GREP_COMMAND} {"" if case_sensitive else "-i"} {main_opts} -P ".{{0,{limit}}}{pattern}.{{0,{limit}}}" {basedir}'
# print(f'{GREP_COMMAND} limit:', all_opts)
if capture:
return perintahsp_outerr_as_shell(all_opts)
else:
os.system(all_opts)
return None
def system_find(basedir, pattern, opts=None, capture=False):
"""
kita tambah sendiri *cari*
"""
case_sensitive = 'name'
if env_int('ULIBPY_WMC_CASESENSITIVE_GREP'):
case_sensitive = 'iname'
all_opts = f'find {basedir} -{case_sensitive} "*{pattern}*"'
if capture:
return perintahsp_outerr_as_shell(all_opts)
else:
os.system(all_opts)
return None
def pattern_search(filepath, code):
"""
"""
from .fileutils import file_lines
all_lines = file_lines(filepath)
# antipatterns = [item.replace('-','',1) for item in code if re.match(r'^-[\w\d]+', item)]
# patterns = [item for item in code if not re.match(r'^-[\w\d]+', item)]
# pre_selected = filter(lambda baris: all(
# [item.lower() in baris.lower() for item in patterns]), all_lines)
# selected = filter(lambda baris: all(
# [item.lower() not in baris.lower() for item in antipatterns]), pre_selected)
# selected = list(selected)
# selected = '\n'.join(selected)
# return selected
return pattern_search_list(all_lines, code)
def pattern_search_list(all_lines, code, aslist=False):
"""
search code yg berisi + dan - dari dalam list all_lines
[satu_pat1,dua_pat2,tiga_pat3,empat_pat4,lima_pat5]
+pat1
-pat2
+pat3
-pat4
"""
if isinstance(code, str):
code = [item.strip() for item in code.split()]
# code di sini sudah jadi list of search terms
antipatterns = [item.replace('-','',1) for item in code if re.match(r'^-[\w\d]+', item)]
patterns = [item for item in code if not re.match(r'^-[\w\d]+', item)]
# step 1: ambil dulu yg patterns (+) dari haystack all_lines
pre_selected = filter(lambda baris: all(
[item.lower() in baris.lower() for item in patterns]), all_lines)
# step 2: filter out yg anti (-)
selected = filter(lambda baris: all(
[item.lower() not in baris.lower() for item in antipatterns]), pre_selected)
# filter -> list
selected = list(selected)
# return selected as list atau stringified
if aslist:
return selected
selected = '\n'.join(selected)
return selected
def pattern_search_string(content, code, aslist=False):
"""
search code yg berisi + dan - dari dalam string content terpisah spasi
"""
return pattern_search_list(content.splitlines(), code, aslist=aslist) | yulibrary | /yulibrary-0.0.2-py3-none-any.whl/langutils/app/greputils.py | greputils.py |
import datetime
import time
from datetime import datetime as DT
from datetime import timedelta
month2 = [
"Jan",
"Feb",
"Mar",
"Apr",
"May",
"Jun",
"Jul",
"Aug",
"Sep",
"Oct",
"Nov",
"Dec",
]
month3 = [
"January",
"February",
"March",
"April",
"May",
"June",
"July",
"August",
"September",
"October",
"November",
"December",
]
def iso():
return datetime.datetime.now().isoformat()
def isofied():
from .stringutils import replace_non_alpha
return replace_non_alpha(datetime.datetime.now().isoformat())
def sekarang():
return datetime.datetime.now()
def today():
return datetime.datetime.today()
def today_ymd():
"""
1978-08-24
"""
return datetime.datetime.today().strftime("%Y-%m-%d")
def today_ydm():
"""
1978-24-08
"""
return datetime.datetime.today().strftime("%Y-%d-%m")
def today_mdy():
"""
08-24-1978
"""
return datetime.datetime.today().strftime("%m-%d-%Y")
def today_dmy():
"""
24-08-1978
"""
return datetime.datetime.today().strftime("%d-%m-%Y")
def waktu_hms():
return datetime.datetime.today().strftime("%H:%M:%S")
def jam_hms():
return datetime.datetime.today().strftime("%H:%M:%S")
def jam_hm():
return datetime.datetime.today().strftime("%H:%M")
def sejam(mulai):
"""
sejam = sudah_sejam (sejak mulai)
"""
return sekarang() >= mulai + timedelta(hours=1)
def sehari(mulai):
"""
mencek jika sehari sudah berlalu terhadap rujukan "mulai"
sehari = sudah_sehari (sejak mulai)
"""
return sekarang() >= mulai + timedelta(hours=24)
def beda_shm(s=1, m=0, h=0):
"""
kembalikan datetime dg jarak h:m:s dari sekarang
beda_shm() = 1 detik dari now
"""
return sekarang() + timedelta(hours=h, minutes=m, seconds=s)
def epoch():
"""
returns: float
"""
epoch_time = int(time.time())
return epoch_time
def epoch_diff(start, end):
"""
returns: float
"""
return end - start
def int_len(myint):
return len(str(abs(myint)))
def is_epoch_ms(epoch):
"""
is ms?
seconds atau ms
"""
if int_len(epoch) == 13:
return True
return False
def fmt(dt, format=None):
if not format:
return dt.strftime("%Y-%m-%d %H:%M:%S")
return dt.strftime(format)
def epoch2dt(epoch, utc=True):
"""
coba cek:
https://stackoverflow.com/questions/12400256/converting-epoch-time-into-the-datetime
"""
if is_epoch_ms(epoch):
epoch = epoch / 1000
if utc:
return datetime.datetime.utcfromtimestamp(epoch)
return datetime.datetime.fromtimestamp(epoch)
def epoch2dtstr(epoch, utc=True, format=None):
"""
2021-08-18 12:12:52
current = epoch()
epoch2dtstr(current)
"""
dt = epoch2dt(epoch, utc=utc)
return fmt(dt, format=format)
def format_epoch_longer(epoch, utc=False):
"""
%A Monday
%a monday
"""
# return fmt_epoch('%A, %-m %B %Y, %-H:%M:%S', utc)
format = "%A, %d %B %Y, %H:%M:%S"
return fmt(epoch2dt(epoch, utc), format)
def year():
return DT.now().year
def waktu(mode="year"):
"""
https://stackoverflow.com/questions/28189442/datetime-current-year-and-month-in-python
"""
if mode == "year":
return DT.now().year
elif mode == "month":
return DT.now().month
elif mode == "day":
return DT.now().day
elif mode == "hour":
return DT.now().hour
elif mode == "minute":
return DT.now().minute
elif mode == "second":
return DT.now().second
def timestamp_for_file():
tanggal = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S, %A")
baris = f"[{tanggal}]"
return baris | yulibrary | /yulibrary-0.0.2-py3-none-any.whl/langutils/app/datetimeutils.py | datetimeutils.py |
import json
import re
import string
default_tab = " " * 2 # '\t'
SQ = "'"
DQ = '"'
BT = "`"
_SQ = SQ
_DQ = DQ
_BT = BT
__SQ = SQ
__DQ = DQ
__BT = BT
__SQ__ = SQ
__DQ__ = DQ
__BT__ = BT
QuoteChar = "$$$"
EmptyReplaceQuoteChar = ""
def jsonify(data, indent=4):
return json.dumps(data, indent=indent)
def max_item_len_in_list(the_list):
return max([len(item) for item in the_list])
def email_valid(email):
"""
https://www.geeksforgeeks.org/check-if-email-address-valid-or-not-in-python/
"""
pat = "^(\w|\.|\_|\-)+[@](\w|\_|\-|\.)+[.]\w{2,3}$"
return re.search(pat, email)
def startswith_absolute_folder(text, pattern_suffix=""):
"""
groups()
group()
AttributeError: 'NoneType' object has no attribute 'group'
group(0)
"""
if pattern_suffix:
"""
jk diberikan ",d"
maka kita True = yes startswith abspath hanya jk text diakhiri dg ",d" maka proses text sbg path
"""
if not text.endswith(pattern_suffix):
return False
pat = "^(\/[^\/]+)+"
return re.match(pat, text)
def strip0(text, prefix):
return text.removeprefix(prefix).strip()
def strip1(text, suffix):
return text.removesuffix(suffix).strip()
def remove_nondigits(text, replacer=""):
pat = "[^0-9]"
return re.sub(pat, replacer, text)
def hitung(text, char="|"):
"""
hitung jumlah char dlm text
"""
return text.count(char)
def ada(text, char):
return hitung(text, char) > 0
def first_occurrence(text, char, start=0, end=-1):
"""
https://stackoverflow.com/questions/2294493/how-to-get-the-position-of-a-character-in-python/2294502
"""
return text.index(char, start, end)
def splitspace(text, count=1, delim=" "):
"""
count=1
'satu dua tiga empat' => ['satu', 'dua tiga empat']
"""
return text.split(delim, count)
def list_startswith(the_list, the_start, lower=True):
if lower:
return [item for item in the_list if item.lower().startswith(the_start.lower())]
else:
return [item for item in the_list if item.startswith(the_start)]
def list_contains(the_list, the_start, lower=True):
if lower:
return [item for item in the_list if the_start.lower() in item.lower()]
else:
return [item for item in the_list if the_start in item]
def list_stringify(the_list, delimiter="\n", sort=True, prefixer=None, suffixer=None):
if prefixer:
the_list = [prefixer + item for item in the_list]
if suffixer:
the_list = [item + suffixer for item in the_list]
if sort:
return delimiter.join(sorted(the_list))
return delimiter.join(the_list)
def gabung_kunci(the_dict, delimiter="\n", sort=True):
if sort:
return "\n".join(sorted(the_dict.keys()))
return "\n".join(the_dict.keys())
def dari_kanan(sumber, karakter):
return sumber.rfind(karakter)
def punctuation_in_string(text, with_space=False):
allow_underscore = string.punctuation.replace("_", "")
if with_space:
allow_underscore += " "
return [kar in allow_underscore for kar in text]
def get_first_punctuation_index(text, with_space=False):
nonwords = r"[^\w]+"
if with_space:
nonwords = r"[^\w\s]+"
all = re.findall(nonwords, text)
# print('all puncs', all)
if all:
return text.index(all[0])
return None
def is_camel_case(s):
return s != s.lower() and s != s.upper() and "_" not in s
def pluralize(s):
return s.lower() + "s"
def merge_lines(s, joiner="", strip=True):
"""
joiner bisa juga space
"""
linified = s.splitlines()
if strip:
linified = [item.strip() for item in linified]
return joiner.join(linified)
def escape_quotes(s):
return s.replace('"', '\\"')
def non_empty_lines(lines):
return [item for item in lines if item.strip()]
def tabify_content(content, self_tab=default_tab, num_tab=1, delim="\n"):
tabify = [num_tab * self_tab + item for item in content.splitlines()]
return delim.join(tabify)
def tabify_content_tab(content, num_tab=1, delim="\n"):
from .usutils import tab_tab
return tabify_content(content, self_tab=tab_tab(), num_tab=num_tab, delim=delim)
def tabify_content_space(content, num_tab=1, delim="\n", space_size=2):
from .usutils import tab_space
return tabify_content(
content, self_tab=tab_space(space_size=space_size), num_tab=num_tab, delim=delim
)
def tabify_contentlist(
content, self_tab=default_tab, num_tab=1, aslist=False, delim="\n", string_ender=""
):
"""
string_ender
jk pengen:
a=1,
b=2,
"""
tabify = [num_tab * self_tab + item for item in content]
if aslist:
return tabify
return delim.join(tabify) + string_ender
def tabify_contentlist_tab(
content, num_tab=1, aslist=False, delim="\n", string_ender=""
):
from .usutils import tab_tab
return tabify_contentlist(
content,
self_tab=tab_tab(),
num_tab=num_tab,
aslist=aslist,
delim=delim,
string_ender=string_ender,
)
def tabify_contentlist_space(
content, num_tab=1, aslist=False, delim="\n", string_ender="", space_size=2
):
from .usutils import tab_space
return tabify_contentlist(
content,
self_tab=tab_space(space_size=space_size),
num_tab=num_tab,
aslist=aslist,
delim=delim,
string_ender=string_ender,
)
def left_right_joinify_content(content, left="", middle="\n", right=""):
"""
default: newline-joined
"""
delimiter = left + middle + right
return delimiter.join(content.splitlines())
def left_right_joinify_contentlist(content, left="", middle="\n", right=""):
"""
default: newline-joined
"""
delimiter = left + middle + right
return delimiter.join(content)
def joinify_content(content, delimiter="\n"):
"""
default: input dan output sama
"""
return delimiter.join(content.splitlines())
def joinfy_contentlist(content, delimiter="\n"):
return delimiter.join(content)
def clean_list_to_string(alist):
"""
lst = [1,2,3,4,5] sbg list
str(lst) = ['1','2','3','4','5'] sbg string
clean_list_to_string = [1,2,3,4,5] sbg string
"""
return str(alist).replace("'", "")
def dashToCamel(text):
"""
dashToCamel('satu-dua-tiga-empat-lima')
"""
hasil = text
while "-" in hasil:
b = hasil.index("-")
hasil = hasil[:b] + hasil[b + 1].upper() + hasil[b + 2 :]
return hasil
d2C = dashToCamel
def dash_to_camel(text):
return dashToCamel(text)
def sort_list(da_list, panjang_duluan=False):
return sorted(da_list, key=len, reverse=panjang_duluan)
def list_take_shortest(da_list):
if len(da_list) == 1:
return da_list[0]
a = sort_list(da_list)
# print('LTS list:', da_list)
if len(a):
return a[0]
return None
def list_take_longest(da_list):
# print('LTL list:', da_list)
if len(da_list) == 1:
return da_list[0]
a = sort_list(da_list, panjang_duluan=True)
if len(a):
return a[0]
return None
def newlinify(baris):
if not baris.endswith("\n"):
baris += "\n"
return baris
def replace_non_alpha(text, pengganti="_", exclude="."):
"""
exclude adlh \W yg gak direplace dg _
kita pengen . gak direplace oleh _
"""
# return re.sub('\W+', pengganti, text)
return re.sub(r"[^\w" + exclude + "]", pengganti, text)
def splitstrip0(thelist):
"""
split berbasis space dan selalu ALL fields
"""
return [item.strip() for item in thelist.split()]
def splitstrip(thelist, delimiter=" ", maxsplit=-1):
"""
bisa specify delimiter dan jumlah fields yg displit
"""
return [item.strip() for item in thelist.split(delimiter, maxsplit)]
def joinsplitstrip(thelist, delimiter=" ", maxsplit=-1):
return splitstrip(thelist, delimiter, maxsplit)
def joinsplitlines(thelist, pemisah="\n"):
return pemisah.join(thelist.splitlines())
def joinsplitstriplines(thelist, pemisah="\n"):
return pemisah.join([item.strip() for item in thelist.splitlines()])
def multiple_spaces_to_single_space(original_text, replacer=" "):
"""
https://pythonexamples.org/python-replace-multiple-spaces-with-single-space-in-text-file/
"""
# return ' '.join(original_text.split())
return re.sub("\s+", replacer, original_text)
"""
dipake utk repace/insert utk fileoperation...@ia, @ra, @rs, dst.
misal:
"target": "es5" menjadi "target": "es6"
"module": "commonjs"
@rs="__DQtarget__DQ: __DQes6__DQ"="__DQtarget__DQ: __DQes5__DQ"
def sanitize_prohibited_chars(self, content):
kita bisa tulis DQ sbg pengganti double quote
@re,baris_cari_dalam_mk_file,"something DQemphasizedDQ and other"
lihat di h feature
pubspec.yaml,f(f=pubspec.yaml,@ra=flutter_sdk_no="sdk: DQ>=2.")
sebetulnya lebih baik jk kita gunakan
__DQ daripada DQ doang...
for k,v in chars_to_sanitize_in_file_operation.items():
content = content.replace(k, v)
return content
TODO:
pake juga utk:
- get permissive di fileutils...agar kita bisa bikin --% dan --# sbg daleman dalam sebuah entry
- utk grammar.py agar bisa dipake di filename,f(...), dirname,d(...) dst
kita juga punya __AT utk @ utk nama direktori/file
mending operasi digabungkan di sini dg sanitize_chars.
"""
chars_to_sanitize_in_file_operation = {
"__DQ": '"',
"__SQ": "'",
"__BT": "`",
"__NL": "\n",
"__SL": "/",
"__BS": "\\",
"__PP": "|",
"__EQ": "=",
"__DOLLAR__": "$",
"__AT__": "@", # jangan lupa, yg panjang mendahului yg pendek
"__AT": "@",
"__PRC__": "%", # jangan lupa, yg panjang mendahului yg pendek
"__PRC": "%", # ini krn %TEMP%,d dianggap sbg %TEMPLATE_SAVE_VAR etc
"__CL": ":",
"__SC": ";",
"__LP": "(",
"__RP": ")",
"__LK__": "[", # jangan lupa, yg panjang mendahului yg pendek
"__LK": "[",
"__RK__": "]", # jangan lupa, yg panjang mendahului yg pendek
"__RK": "]",
"__LB": "{",
"__RB": "}",
"__LT": "<",
"__GT": ">",
"__TAB1": "\t",
"__TAB2": "\t\t",
"__TAB3": "\t\t\t",
"__TAB4": "\t\t\t\t",
"__SPC1": " ",
"__SPC2": " " * 2,
"__SPC3": " " * 3,
"__SPC4": " " * 4,
"\\n": "\n",
"\\t": "\t",
}
def sanitize_chars(content):
for k, v in chars_to_sanitize_in_file_operation.items():
content = content.replace(k, v)
return content
def split_by_pos(strng, sep, pos):
"""
https://stackoverflow.com/questions/36300158/split-text-after-the-second-occurrence-of-character
>>> strng = 'some-sample-filename-to-split'
>>> split(strng, '-', 3)
('some-sample-filename', 'to-split')
>>> split(strng, '-', -4)
('some', 'sample-filename-to-split')
>>> split(strng, '-', 1000)
('some-sample-filename-to-split', '')
>>> split(strng, '-', -1000)
('', 'some-sample-filename-to-split')
"""
strng = strng.split(sep)
return sep.join(strng[:pos]), sep.join(strng[pos:]) | yulibrary | /yulibrary-0.0.2-py3-none-any.whl/langutils/app/stringutils.py | stringutils.py |
import json
import os
import random
import subprocess
import time
import uuid
import webbrowser
from platform import uname as std_uname
from dotenv import load_dotenv
try:
import readline
except ImportError as err:
import pyreadline
def u():
return str(uuid.uuid4())
def platform():
"""
kembalian:
linux
windows
wsl
uname_result(system='Windows', node='DESKTOP-7EO5LQL', release='10', version='10.0.19041', machine='AMD64')
uname_result(system='Windows', node='user-PC', release='10', version='10.0.19044', machine='x86')
uname_result(system='Linux', node='DESKTOP-7EO5LQL', release='5.4.72-microsoft-standard-WSL2', version='#1 SMP Wed Oct 28 23:40:43 UTC 2020', machine='x86_64')
uname_result(system='Linux', node='localhost', release='4.4.111-21737876', version='#1 SMP PREEMPT Thu Jul 15 19:28:19 KST 2021', machine='aarch64')
$ uname -a
Linux localhost 4.4.111-21737876 #1 SMP PREEMPT Thu Jul 15 19:28:19 KST 2021 aarch64 Android
"""
kembalian = [
"linux", # 0
"windows", # 1
"wsl", # 2
"termux", # 3
"desktop", # 4
]
machine = std_uname()
sistem = machine.system.lower()
rilis = machine.release.lower()
mesin = machine.machine.lower()
# print(f'sis: {sistem}, ril: {rilis}, uname: {machine}')
if "windows" in sistem: # sys.platform == 'win32'
if mesin == "x86":
return kembalian[4] # pc desktop 32-bit
return kembalian[1]
elif "microsoft" in rilis and "linux" in sistem:
return kembalian[2]
elif sistem == "linux" and machine.machine == "aarch64":
return kembalian[3]
else:
return kembalian[0]
def uname():
"""
desktop
uname_result(system='Windows', node='user-PC', release='10', version='10.0.19044', machine='x86')
"""
return std_uname()
def isdesktop():
sistem = uname()
return (
sistem.node == "user-PC"
and sistem.system == "Windows"
and sistem.machine == "x86"
)
def env_get(kunci, default=None):
if kunci in os.environ:
return os.environ[kunci]
return default
# platform == linux
PBPASTE = "xclip -selection clipboard -o"
PBCOPY = "xclip -selection clipboard"
if platform() != "linux":
PBPASTE = "pbpaste"
PBCOPY = "pbcopy"
TRANSLATE = "https://translate.google.com/?hl=en&ie=UTF-8&sl=__SOURCE&tl=__TARGET__TEXTPLACEHOLDER__&op=translate"
GOOGLESEARCH = "https://www.google.com/search?q=__TEXTPLACEHOLDER__"
# https://translate.google.com/?hl=en&ie=UTF-8&sl=en&tl=id&op=translate
# https://translate.google.com/?hl=en&ie=UTF-8&sl=en&tl=id&text=libertarian&op=translate
# https://www.google.com/search?client=firefox-b-e&q=christopher+hitchens
# https://www.google.com/search?q=christopher+hitchens
WEBSITES = {
"https://translate.google.com/?hl=en&ie=UTF-8&sl=en&tl=id&op=translate",
"https://www.smh.com.au/",
"https://www.dailymail.co.uk/ushome/index.html",
"https://www.dailymail.co.uk/auhome/index.html",
"https://www.scmp.com/",
"https://stackoverflow.com/questions",
"https://www.upwork.com/freelance-jobs/python/",
"http://fulgent.be/m/college/jobs.html",
"https://leetcode.com/explore/",
"https://www.glassdoor.com/Job/jakarta-senior-software-developer-jobs-SRCH_IL.0,7_IC2709872_KO8,33.htm?fromAge=3&includeNoSalaryJobs=true",
"https://www.jobstreet.co.id/en/job-search/senior-software-engineer-jobs/?sort=createdAt",
"https://remoteok.io/remote-dev-jobs?location=worldwide",
"https://remotive.io/remote-jobs/software-dev?live_jobs%5Btoggle%5D%5Bus_only%5D=true&live_jobs%5BsortBy%5D=live_jobs_sort_by_date&live_jobs%5Bmenu%5D%5Bcategory%5D=Software%20Development",
"https://angel.co/jobs?ref=onboarding",
}
PROGRAMS = {
# "C:\Program Files\Mozilla Firefox\firefox.exe"
# /mnt/c/Program Files/Mozilla Firefox
# 'ff' : '/usr/bin/firefox -no-remote -P',
# "C:\Program Files\Google\Chrome\Application\chrome.exe"
# "/mnt/c/Program Files/Google/Chrome/Application/chrome.exe"
# 'ff' : env_get('SIDOARJO_FIREFOX'),
"ff": '"C:/Program Files/Mozilla Firefox/firefox.exe" -no-remote -P',
"ffacer": '"C:/Program Files/Mozilla Firefox/firefox.exe" -no-remote -P',
"chr": "chromium-browser",
# 'chrome' : '/opt/google/chrome/chrome',
# 'chrome' : '"/mnt/c/Program Files/Google/Chrome/Application/chrome.exe"',
"chrome": '"C:/Program Files/Google/Chrome/Application/chrome.exe"',
"term": '"cmd /k start"',
"qterm": "qterminal",
"term2": "gnome-terminal",
"gterm": "gnome-terminal",
"xterm": "xterm",
"xemu": "x-term-emulator",
"xdg": "xdg-open",
"atom": "atom",
"code": "code",
"note": "notepad",
"npp": '"C:/Program Files/Notepad++/notepad++.exe"',
}
# initial
curdir = os.path.dirname(__file__)
SCHNELLDIR = env_get("ULIBPY_BASEDIR", os.path.join(curdir, ".."))
def pbcopy_pbpaste():
global SCHNELLDIR, PBPASTE, PBCOPY
if env_get("ULIBPY_BASEDIR"):
SCHNELLDIR = env_get("ULIBPY_BASEDIR")
if platform() == "linux":
pass
elif platform() == "wsl":
if env_get("PBCOPY_WSL"):
PBCOPY = env_get("PBCOPY_WSL")
if env_get("PBPASTE_WSL"):
PBPASTE = env_get("PBPASTE_WSL")
# print(f'wsl => schnelldir {SCHNELLDIR}, pbcopy {PBCOPY} dan pbpaste {PBPASTE}')
LANGUAGES = [
"awk",
"sh",
"bat",
"clang", # utk modern cpp
"cpp",
"cs",
"css",
"clj",
"dart",
"ex",
"erl",
"go",
"groovy",
"hs",
"java",
"js",
"kt",
"perl",
"php",
"py", # agar bisa :py -> if code in languages
# tapi ini di handle di cleanup_bahasa
"python",
"r",
"rb",
"rs",
"scala",
"sed",
"swift",
"ts",
]
try:
import pyperclip
except ImportError as err:
pass
from importlib import import_module as std_import_module
from faker import Faker
faker_instance = Faker()
def import_from_string(fq_classname):
"""
app.transpiler.frontend.fslang.flask.Coordinator
module:
app.transpiler.frontend.fslang.flask
class_name:
Coordinator
krn teknik berikut
module_path, class_name = fq_classname.rsplit('.', 1)
kita juga bisa:
app.notfutils.pynotif
dimana pynotif adlh fungsi
import_from_string('app.notfutils.pynotif')(judul, badan)
class_str: str = 'A.B.YourClass'
"""
try:
module_path, class_name = fq_classname.rsplit(".", 1)
module = std_import_module(module_path)
return getattr(module, class_name)
except (ImportError, AttributeError) as e:
raise ImportError(fq_classname)
def faker(methodname, *args, **kwargs):
if hasattr(faker_instance, methodname):
return getattr(faker_instance, methodname)(*args, **kwargs)
return None
def print_faker(methodname, *args, **kwargs):
hasil = None
if hasattr(faker_instance, methodname):
hasil = getattr(faker_instance, methodname)(*args, **kwargs)
if hasil:
print(hasil)
def printcopy_faker(methodname, *args, **kwargs):
hasil = None
if hasattr(faker_instance, methodname):
hasil = getattr(faker_instance, methodname)(*args, **kwargs)
if hasil:
trycopy(hasil)
print(hasil)
def acak(min=0, max=100):
return random.randint(min, max)
def ambil(datalist):
return random.choice(datalist)
def sampling(datalist, k=1, stringify=False):
"""
random.choices population tetap
random.sample population berkurang
"""
if stringify:
return "\n".join(random.sample(datalist, k))
return random.sample(datalist, k)
def tidur(s=0.0, ms=0.0):
if s:
time.sleep(s)
elif ms:
time.sleep(ms / 1000.0)
def env_add(kunci, nilai):
os.environ[kunci] = nilai
def env_set(kunci, nilai):
if isinstance(nilai, int):
nilai = str(nilai)
os.environ[kunci] = nilai
def env_exist(kunci):
return kunci in os.environ
def env_get_fuzzy(code):
# envulibs = env_ulibpy_values()
result = [item for item in env_ulibpy() if code.lower() in item.lower()]
if result:
return result[0]
return None
def env_ulibpy():
daftar = os.environ
return [item for item in daftar if item.startswith("ULIBPY")]
def env_ulibpy_values():
daftar = os.environ
return [f"{item} = {env_get(item)}" for item in daftar if item.startswith("ULIBPY")]
def env_replace_filepath(filepath, normalize=False):
for envvar in env_ulibpy():
expanded = env_get(envvar)
# if envvar in filepath:
# print(f'expanding {envvar} => {expanded}')
filepath = filepath.replace(envvar, expanded)
if normalize:
from .dirutils import normy
filepath = normy(filepath)
return filepath
def expand_ulib_path(filepath):
# print('old filepath:', filepath)
if "ULIBPY_" in filepath:
# from .utils import env_replace_filepath
filepath = env_replace_filepath(filepath, normalize=True)
# print('new filepath:', filepath)
return filepath
def env_print(only_ulibpy=True):
daftar = os.environ
if only_ulibpy:
daftar = [item for item in daftar if item.startswith("ULIBPY")]
print(json.dumps(daftar, indent=4))
def env_int(kunci, default=0):
if kunci in os.environ:
return int(os.environ[kunci])
return default
def env_reload():
# ULIBPY_BASEDIR
if "ULIBPY_BASEDIR" in os.environ:
filepath = env_get("ULIBPY_BASEDIR")
else:
filepath = os.path.join(os.path.dirname(__file__), os.path.pardir)
env_file = os.path.join(filepath, ".env")
load_dotenv(env_file)
print("environ reloaded...")
def env_load(env_file=os.path.join(SCHNELLDIR, ".env")):
load_dotenv(env_file)
def env_expand(source, special_resources="gaia", bongkarin=False):
"""
expand ULIBPY_... in string
kita bikin special agar ngetik gak capek:
gaia => ULIBPY_RESOURCES
bongkarin => expand ~, shell vars, dll
"""
from .dirutils import bongkar
source = source.replace(special_resources, "ULIBPY_RESOURCES")
for k in os.environ:
if k.startswith("ULIBPY"):
source = source.replace(k, os.environ[k])
if bongkarin:
return bongkar(source)
else:
return source
def env_expand_removeprefix(source, prefix, remove_strip=True):
source = source.removeprefix(prefix)
if remove_strip:
source = source.strip()
return env_expand(source)
def trycopy(content):
try:
pyperclip.copy(content)
except Exception as err:
pass
def trypaste():
try:
content = pyperclip.paste()
return content
except Exception as err:
return None
def try_copy(content):
trycopy(content)
def try_paste():
return trypaste()
def yesno(message, yes_callback=None, warna="bright_magenta"):
from .printutils import indah0
indah0(message, warna=warna)
yesno = input(" ")
if yesno == "y" or yesno == "yes" or yesno == "Y":
if yes_callback:
yes_callback()
return True
return False
def list_set(myarr):
"""
input: list
output: list
process:
- unik-kan
- balikkan lagi ke list
"""
return list(set(myarr))
def datadir(filename=None):
rootdir = env_get("ULIBPY_ROOTDIR")
datadir_relative = env_get("ULIBPY_DATA_FOLDER")
datafolder = os.path.join(rootdir, datadir_relative)
if filename:
return os.path.join(datafolder, filename)
return datafolder
class TabCompleter:
def __init__(self, options):
self.options = sorted(options)
def complete(self, text, state):
if state == 0: # on first trigger, build possible matches
if text: # cache matches (entries that start with entered text)
self.matches = [s for s in self.options if s and s.startswith(text)]
else: # no text entered, all matches possible
self.matches = self.options[:]
# return match indexed by state
try:
return self.matches[state]
except IndexError:
return None
def complete(completer, read_history=False):
readline.set_completer(completer.complete)
readline.parse_and_bind("tab: complete")
if read_history:
try:
readline.read_history_file()
except FileNotFoundError as notfound:
print(f"readline: Gagal baca history file {notfound}.")
def complete_from_list(keywords, read_history=False):
"""
cara pake
cmds = []
complete_from_list(cmds)
"""
complete(TabCompleter(keywords), read_history)
def input_until_end(ender="###", line_callback=None):
"""
line_callback
jk kita pengen bisa proses per line, misal utk exec sesuatu dan skip masukkan ke result
"""
print(f"Enter line until {ender}.")
result = []
baris = input(">> ")
while baris != ender:
if line_callback:
masukkan = line_callback(baris)
if masukkan:
result.append(baris)
# jk return False, kita gak mau masukkan baris yg sdh diproses tsb.
else:
result.append(baris)
baris = input(">> ")
if baris:
return "\n".join(result)
return None
def perintah(cmd):
"""
alt pake subprocess
"""
if env_int("ULIBPY_FMUS_DEBUG") > 1:
from .printutils import indah_debug
indah_debug(f"perintah: {cmd}", warna="red")
os.system(cmd)
def perintahsp(prefix, cmd):
"""
alt pake subprocess
https://linuxhint.com/execute_shell_python_subprocess_run_method/#:~:text=To%20capture%20the%20output%20of,named%20%E2%80%9Ccapture_output%3DTrue%E2%80%9D.&text=You%20can%20individually%20access%20stdout,stdout%E2%80%9D%20and%20%E2%80%9Coutput.
output = subprocess.run(["cat", "data.txt"], capture_output=True)
subprocess.run(["cat", "data.txt"])
"""
output = subprocess.run(prefix.split() + [cmd], capture_output=True)
return output
def perintahsp_simple(complete_command):
subprocess.run(complete_command.split())
def perintahsp_simple_chdir(complete_command, workdir=None):
from .dirutils import chdir, disini
olddir = disini()
if workdir:
chdir(workdir)
# print('perintahsp_simple_chdir mulai di:', disini())
subprocess.run(complete_command.split(), shell=True)
if workdir:
chdir(olddir)
# print('perintahsp_simple_chdir berarkhir di:', disini())
def perintahsp_capture(complete_command):
output = subprocess.run(complete_command.split(), capture_output=True)
return output
def perintahsp_outerr(complete_command):
"""
gagal utk:
curl http://localhost:8080/urls -X POST -H "Content-type: application/json" -d '{ "name": "usef" }'
"""
output = subprocess.run(complete_command.split(), capture_output=True)
_stdout = output.stdout.decode("utf8")
_stderr = output.stderr.decode("utf8")
return _stdout, _stderr
def perintah_shell(command):
"""
juga perlu coba:
subprocess.Popen(command, shell=True).wait()
"""
if env_int("ULIBPY_FMUS_DEBUG") > 1:
print(f"run shell: {command}.")
subprocess.run(command, shell=True)
def perintah_shell_wait(command):
"""
juga perlu coba:
subprocess.Popen(command, shell=True).wait()
juga ada
communicate()
"""
if env_int("ULIBPY_FMUS_DEBUG") > 1:
print(f"run shell: {command}.")
# subprocess.run(command, shell=True)
subprocess.Popen(command, shell=True).wait()
def perintahsp_outerr_as_shell(complete_command):
"""
berhasil utk:
curl http://localhost:8080/urls -X POST -H "Content-type: application/json" -d '{ "name": "usef" }'
"""
cmdlist = complete_command.split()
# print(f'perintahsp_outerr_as_shell: asli [{complete_command}], listify {cmdlist}')
output = subprocess.run(complete_command, capture_output=True, shell=True)
_stdout = output.stdout.decode("utf8")
_stderr = output.stderr.decode("utf8")
# if _stdout is None:
# '''
# utk hindari
# out, err = process_curl(program, True)
# TypeError: cannot unpack non-iterable NoneType object
# wkt _stdout = None
# '''
# _stdout = ''
return _stdout, _stderr
def get_suffix_angka(text, cari="0123456789", pipa=None):
"""
mystr[len(mystr.rstrip('0123456789')):]
mystr[len(mystr.rstrip('|0123456789')):]
a = get_suffix_angka(mystr)
b = get_suffix_angka(mystr, '|')
if b == '|'+a: berarti mystr diakhiri dengan |<angka>
"""
if pipa:
berpipa = pipa + cari
bisa_berangka_dikiri = text[len(text.rstrip(berpipa)) :]
return bisa_berangka_dikiri.lstrip(cari)
else:
return text[len(text.rstrip(cari)) :]
def python_package(pkg):
# https://stackoverflow.com/questions/247770/how-to-retrieve-a-modules-path
import importlib
import inspect
package = importlib.import_module(pkg)
if package:
lokasi_file = inspect.getfile(package)
lokasi = os.path.dirname(lokasi_file)
return lokasi
return None
def replace_wiekes(result, wiekes):
prefix = env_get("ULIBPY_WIEKES_TEMPLATE_PREFIX")
capper = env_get("ULIBPY_WIEKES_CAPITALIZE_SYMBOL")
wiekeplural = env_get("ULIBPY_WIEKES_PLURALIZE_SYMBOL")
wiekelower = env_get("ULIBPY_WIEKES_LOWERIZE_SYMBOL")
wiekeupper = env_get("ULIBPY_WIEKES_UPPERIZE_SYMBOL")
replacers = wiekes
templates = [prefix + str(angka).zfill(2) for angka in range(1, len(replacers) + 1)]
for index, wieke in enumerate(replacers):
result = result.replace(templates[index] + capper, wieke.capitalize())
result = result.replace(templates[index] + wiekeplural, wieke + "s")
result = result.replace(templates[index] + wiekelower, wieke.lower())
result = result.replace(templates[index] + wiekeupper, wieke.upper())
result = result.replace(templates[index], wieke)
return result
def run_in_server(command):
if env_exist("ULIBPY_SERVER_SSH"):
from .printutils import indah0
prefix = env_get("ULIBPY_SERVER_SSH")
print("Ready to roll di server", prefix)
output = perintahsp(prefix, command)
_stdout = output.stdout.decode("utf8")
_stderr = output.stderr.decode("utf8")
if _stderr:
indah0(_stderr, warna="red", bold=True, newline=True)
indah0(_stdout, warna="cyan", bold=True, newline=True)
else:
print("Gak ada alamat server ULIBPY_SERVER_SSH di env")
def vscode_edit_file(filepath):
cmd = f"code {filepath}"
# print('cmd dg --goto line:', cmd)
# Arguments in `--goto` mode should be in the format of `FILE(:LINE(:CHARACTER))`
# perintahsp_simple(cmd)
perintah(cmd)
def vscode_edit_at_line(filepath, lineno):
if not lineno or lineno < 0:
lineno = 0
cmd = f'code --goto "{filepath}:{lineno}:1"'
# print('cmd dg --goto line:', cmd)
# Arguments in `--goto` mode should be in the format of `FILE(:LINE(:CHARACTER))`
# perintahsp_simple(cmd)
perintah(cmd)
def buka(alamat):
webbrowser.open(alamat)
class Launcher:
def __init__(self):
pass
@staticmethod
def launch(key):
if key in PROGRAMS:
cmd = PROGRAMS[key]
completecmd = cmd + " 2>/dev/null &"
if platform() in ["win32", "windows"]:
completecmd = cmd
print(completecmd)
os.system(completecmd)
elif " " in key:
from .stringutils import splitspace
cmd, args = splitspace(key)
# print(f'''
# cmd {cmd}
# type c {type(cmd)}
# args {args}
# type a {type(args)}
# ''')
if cmd == "o":
"""
key = l o trans
cmd args
jk args punya , berarti ini trans
jk args punya | berarti ini google search
l o trans,en#sublime
l o goog|christopher walken
"""
__SOURCE = "en"
__TARGET = "id"
__TEXTPLACEHOLDER__ = ""
if "|" in args:
# l o goog|...
goog, __placeholder = args.split("|")
__TEXTPLACEHOLDER__ = __placeholder.replace(" ", "+")
alamat = GOOGLESEARCH.replace(
"__TEXTPLACEHOLDER__", __TEXTPLACEHOLDER__
)
url = [alamat]
elif "," in args and not args.startswith("http"):
"""
l o trans,en
l o trans,en,id
perlu juga: kasih text
l o trans,en#sublime
"""
if args.count(",") == 1:
"""
jk cuma 1 language diberikan
dia jadi source atau target? source saja dulu
"""
trans, __SOURCE = args.split(",")
elif args.count(",") == 2:
trans, __SOURCE, __TARGET = args.split(",")
if "#" in __SOURCE:
__SOURCE, __placeholder = __SOURCE.split("#")
__TEXTPLACEHOLDER__ = f"&text={__placeholder}"
alamat = (
TRANSLATE.replace("__SOURCE", __SOURCE)
.replace("__TARGET", __TARGET)
.replace("__TEXTPLACEHOLDER__", __TEXTPLACEHOLDER__)
)
url = [alamat]
else:
url = [item for item in WEBSITES if args.lower() in item.lower()]
# jika l o alamat dan alamat tdk ada dlm WEBSITES
if not url:
url = [args]
else:
print("url:", url)
if url:
if len(url) == 1:
buka(url[0])
else:
from .printutils import print_list_warna
print_list_warna(url)
return url
elif cmd in PROGRAMS:
cmd = PROGRAMS[cmd]
args = env_expand(args, bongkarin=True)
if cmd == "qterminal":
cmd = f"{cmd} -w "
cmd = cmd + f' "{args}"'
print(cmd)
os.system(cmd + " 2>/dev/null &")
def half_backslash(filepath):
return filepath.replace("\\\\", "\\")
def double_backslash(filepath):
return filepath.replace("\\", "\\\\")
def quad_backslash(filepath):
return double_backslash(double_backslash(filepath))
def wslify(filepath, rewindows=False, no_double_back=True, manual=False):
"""
linux path -> wsl path
windows path -> wsl path
rewindows = true
kita gunakan jk original filepath ada di linux
manual jk env var kosong, maka prefix wsl$ kita kasih secara hardcoded
rewindows = ubah / jadi \\
no_double_back = double backslash diawal utk wsl diubah jadi single backslash
manual = kasih \\wsl dst. di awal
"""
prefix = env_get("ULIBPY_WSL_ADDRESS")
if manual:
prefix = "\\\\wsl$\\Ubuntu-20.04"
hasil = filepath
if prefix:
# return prefix + filepath.replace('/', os.sep)
hasil = prefix + filepath
if rewindows:
hasil = prefix + filepath.replace("/", "\\")
if no_double_back:
hasil = hasil.replace("\\\\", "\\")
return hasil
def linuxpath_to_wslpath(filepath, manual=False, untested_new_feature=False):
"""
jadi aneh utk W=*outlay gagal
maka buatkan untested_new_feature False secara default
"""
prefix = env_get("ULIBPY_WSL_ADDRESS")
if manual:
prefix = "\\\\wsl$\\Ubuntu-20.04"
hasil = filepath
if prefix:
hasil = prefix + double_backslash(filepath.replace("/", "\\"))
if untested_new_feature:
hasil = double_backslash(hasil) # krn backslash dimakan shell...
return hasil
def wslpath_to_linuxpath(filepath):
"""
pathsource bisa windows (/mnt/c/...) atau linux (/home/usef/...)
kita sebut pathsource: linuxsource dan windowssource
"""
prefix = env_get("ULIBPY_WSL_ADDRESS")
halfback_prefix = half_backslash(prefix)
if filepath.startswith(prefix):
# [\\wsl$\Ubuntu-20.04\home\usef\work\ulibs\schnell\app\transpiler\frontend\fslang\misc\work.fmus
filepath = filepath.removeprefix(prefix).replace("\\", "/")
if filepath.startswith("/mnt/c"):
# ini kita masih dlm linux, jadi dont send windows path
# kembalikan c:/path/to/target bukan c:\path\to\target
# return filepath.replace('/mnt/c/', 'c:/')
return filepath
elif filepath.startswith("c:\\"):
# buka code di windows path
# c:\work\oprek\cmake-qt\ecommdj\fshelp\work
return linuxify(filepath)
else:
return filepath
elif filepath.startswith(halfback_prefix):
print(f"wslpath_to_linuxpath diawali: {halfback_prefix}")
filepath = filepath.removeprefix(halfback_prefix).replace("\\", "/")
print(f"wslpath_to_linuxpath menjadi: {filepath}")
return filepath
elif filepath.startswith("c:\\"):
# buka code di windows path
# c:\work\oprek\cmake-qt\ecommdj\fshelp\work
return linuxify(filepath)
return filepath
def linuxify(filepath):
"""
kita gunakan jk original filepath ada di windows (c:\...)
c:\tmp -> /mnt/c/tmp
"""
lower_drive = filepath[0].lower() + filepath[1:]
res = lower_drive.replace("c:\\", "/mnt/c/")
res = res.replace("c:/", "/mnt/c/")
res = res.replace("\\", "/")
return res
def is_windows():
# return not platform() == 'linux'
return platform() in ["win32", "windows", "desktop"]
def windowsify(filepath):
return filepath.replace("/", "\\")
def is_the_same_folder(filepath1, filepath2):
return windowsify(filepath1.lower()) == windowsify(filepath2.lower())
def not_the_same_folder(filepath1, filepath2):
return windowsify(filepath1.lower()) != windowsify(filepath2.lower())
def salin_objek(sumber):
import copy
return copy.copy(sumber)
def wslpath2winpath(filepath):
# /mnt/c/fullstack/django_pg/kt.us
if filepath.startswith("/mnt/c/"):
return filepath.replace("/mnt/c/", "c:/")
return filepath
def wslpath2winpath_condition(filepath):
"""
filepath = wslpath2winpath_condition(filepath)
"""
# print('wslpath:', filepath)
if platform() == "wsl":
if filepath.startswith("/mnt/c/"):
return filepath.replace("/mnt/c/", "c:/")
else:
# /home/usef ... etc
return linuxpath_to_wslpath(filepath, untested_new_feature=False)
return filepath
def winpath_to_wslpath(filepath):
return filepath.replace("\\", "/")
def import_module_original(dotted_filepath, redot=False):
"""
import_module_original('a/b/c/d', redot=True)
import_module_original('a.b.c.d')
"""
# from importlib import import_module
if redot:
dotted_filepath = dotted_filepath.replace("/", ".")
module = std_import_module(dotted_filepath)
return module
def import_module(MODULE_NAME, MODULE_PATH):
"""
'generator': '/home/usef/work/ulibs/schnell/app/transpiler/frontend/fslang/z/quick/campur/wp5/wd4/__init__.py',
'fmusfile': '/home/usef/work/ulibs/schnell/app/transpiler/frontend/fslang/z/quick/campur/wp5/wd4/index-input.mk'
spec_from_file_location(name,
location=None,
*, <- sering lihat gini, ini artinya positional args kan ya...
loader=None,
submodule_search_locations=_POPULATE)
Return a module spec based on a file location.
To indicate that the module is a package, set
submodule_search_locations to a list of directory paths.
An empty list is sufficient, though its not otherwise useful to the import system.
The loader must take a spec as its only __init__() arg.
"""
# https://stackoverflow.com/questions/67631/how-to-import-a-module-given-the-full-path
# MODULE_PATH = "/path/to/your/module/__init__.py"
# MODULE_NAME = "mymodule"
import importlib
import sys
spec = importlib.util.spec_from_file_location(MODULE_NAME, MODULE_PATH)
if not spec:
from .dirutils import ayah
print("[app.utils] respeccing...")
submodule_search_locations = ayah(MODULE_PATH, 1)
spec = importlib.util.spec_from_file_location(
MODULE_NAME,
MODULE_PATH,
submodule_search_locations=(submodule_search_locations,),
)
if not spec:
from importlib._bootstrap_external import \
_get_supported_file_loaders
a = _get_supported_file_loaders()
print("[app.utils] double respeccing...")
print(a)
# print(f'''[utils/import_module]
# MODULE_NAME = {MODULE_NAME}
# MODULE_PATH = {MODULE_PATH}
# spec = {spec}
# ''')
module = importlib.util.module_from_spec(spec)
sys.modules[spec.name] = module
spec.loader.exec_module(module)
return module
def micro(filepath=None):
from .dirutils import bongkar, joiner
from .envvalues import schnelldir
microdir = joiner(schnelldir(), "vendor/micro/micro")
if is_windows:
microdir = linuxify(microdir) # c:/ jadi /mnt/c
if not filepath:
perintah_shell(f"wsl {microdir}")
else:
"""
biasanya dari ULIBPY_BASEDIR/data/oprek.py dll
"""
filepath = bongkar(filepath)
filepath = linuxify(filepath)
perintah_shell(f"wsl {microdir} {filepath}") | yulibrary | /yulibrary-0.0.2-py3-none-any.whl/langutils/app/utils.py | utils.py |
import datetime
import json
import os
import pathlib
import re
import stat
from pathlib import Path
from shutil import copyfile
from .utils import env_exist, env_expand, env_get, env_int, trycopy, trypaste
def chmod(filepath, stringmode="600"):
"""
https://stackoverflow.com/questions/15607903/python-module-os-chmodfile-664-does-not-change-the-permission-to-rw-rw-r-bu
"""
os.chmod(filepath, int(stringmode, base=8))
def chmodrwx(filepath):
"""Removes 'group' and 'other' perms. Doesn't touch 'owner' perms.
S_IRUSR (00400) read by owner
S_IWUSR (00200) write by owner
S_IXUSR (00100) execute/search by owner
S_IRGRP (00040) read by group
S_IWGRP (00020) write by group
S_IXGRP (00010) execute/search by group
S_IROTH (00004) read by others
S_IWOTH (00002) write by others
S_IXOTH (00001) execute/search by others
Note: Although Windows supports chmod(), you can only set the file’s read-only flag with it (via the stat.S_IWRITE and stat.S_IREAD constants or a corresponding integer value). All other bits are ignored.
"""
mode = os.stat(filepath).st_mode
mode -= mode & (stat.S_IRWXG | stat.S_IRWXO)
os.chmod(filepath, mode)
def get_umask():
umask = os.umask(0)
os.umask(umask)
return umask
def chmod_plus_x(filepath):
"""
https://stackoverflow.com/questions/12791997/how-do-you-do-a-simple-chmod-x-from-within-python/55591471#55591471
"""
os.chmod(
filepath,
os.stat(filepath).st_mode
| ((stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) & ~get_umask()),
)
def absolute(filepath):
return os.path.isabs(filepath)
def to_absolute(filepath):
return os.path.abspath(filepath)
def editfile(dir_or_file):
os.system(f"code {dir_or_file}")
def perintah(command):
os.system(command)
def copy_file(src, dst):
copyfile(src, dst)
def copy_content(filepath):
trycopy(file_content(filepath))
def json_file_content(json_filepath):
try:
with open(json_filepath) as fd:
return json.load(fd)
except Exception as err:
print(f"[fileutils] opening: {json_filepath}", err)
return None
def json_file_print(json_filepath):
json_body = json_file_content(json_filepath)
print(json.dumps(json_body, indent=4))
return json_body
def json_from_string(content):
return json.loads(content)
def json_stringify(content, indent=True):
if indent:
return json.dumps(content, indent=4)
return json.dumps(content)
def file_sentences(filepath):
data = None
with open(filepath, "r", encoding="utf-8") as fd:
data = fd.read().replace("\n", "")
return data
def file_content(filepath):
"""
retval berupa segelondongan text/string
https://stackoverflow.com/questions/45529507/unicodedecodeerror-utf-8-codec-cant-decode-byte-0x96-in-position-35-invalid
update utk:
'utf-8' codec can't decode byte 0x93 in position 68384: invalid start byte
errors='ignore'
"""
return pathlib.Path(filepath).read_text(encoding="utf8", errors="ignore")
def file_content_binary(filepath):
import io
content = None
with io.open(filepath, "rb") as binary_file:
content = binary_file.read()
return content
def file_content_ascii(filepath):
return pathlib.Path(filepath).read_text(encoding="utf-8")
def file_content_safe(filepath):
"""
retval berupa segelondongan text/string
kadang kasus https://stackoverflow.com/questions/42339876/error-unicodedecodeerror-utf-8-codec-cant-decode-byte-0xff-in-position-0-in
invalid start byte.
"""
# return pathlib.Path(filepath).read_text(encoding='utf-8')
path_obj = pathlib.Path(filepath)
try:
content = path_obj.read_text(encoding="utf-8")
except Exception as err:
# https://stackoverflow.com/questions/42339876/error-unicodedecodeerror-utf-8-codec-cant-decode-byte-0xff-in-position-0-in
# bisa jadi dia utf16
content = path_obj.read_bytes()
if env_int("ULIBPY_FMUS_DEBUG") > 1:
print("file_content:", filepath, "PRE decode bytes to utf8")
content = content.decode("utf-16")
if env_int("ULIBPY_FMUS_DEBUG") > 1:
print("file_content:", filepath, "POST decode bytes to utf8")
return content
def file_content_old(filepath):
"""
retval berupa segelondongan text/string
"""
content = None
with open(filepath, encoding="utf-8") as fd:
content = fd.read()
return content
def file_copy(lama, baru):
file_write(baru, file_content(lama))
def count_lines(filepath):
return len(file_lines)
def file_lines(filepath, strip_newline=False, skip_emptylines=False):
"""
retval [line1, line2, ...]
"""
content = None
with open(filepath, encoding="utf-8") as fd:
content = fd.readlines()
if skip_emptylines:
content = [item for item in content if item.strip()]
if strip_newline:
return [item.rstrip() for item in content]
else:
return content
def file_blocks(filepath, delimiter="#####", strip_newline=False):
"""
kembalikan list of block dlm file terpisah delimiter
digunakan di app.transpiler.snippets utk cari di dalam snippets.txt
"""
content = file_content(filepath)
content = content.split(delimiter)
return [item.strip() if strip_newline else item for item in content if item.strip()]
def non_empty_lines(lines):
return [item for item in lines if item.strip()]
def file_words(filepath):
"""
kembalikan list of words
pd gabung, empty line jadi extra space
dg split(), multiple space setara satu space, jadi hilang dlm hasil akhir
"""
content = file_lines(filepath)
# hilangkan empty lines
# bisa juga [item for item in content if item.strip()]
gabung = " ".join([item.strip() for item in content])
return gabung.split()
def line_contains(filepath, kunci):
return [item for item in file_lines(filepath) if kunci in item]
def create_if_empty_file(filepath):
if not os.path.exists(filepath):
pathlib.Path(filepath).touch()
def get_extension(filepath, no_dot=True):
"""
with_dot -> .txt
no_dot -> txt
"""
if no_dot:
return pathlib.Path(filepath).suffix[1:]
return pathlib.Path(filepath).suffix
def get_filename_full(filepath):
"""
renaming path_filename
/home/usef/untitled.txt -> untitled.txt
"""
return os.path.basename(filepath)
def get_filename_part(filepath):
"""
get_filename dg nama yg benar
"""
return pathlib.Path(filepath).stem
def get_filename(filepath):
"""
harusnya dinamai: get_filename_part
path_filename -> untitled.txt
get_filename -> untitled
biasanya os.path.splitext(path)[0]
lebih baik pake Path
>>> Path('/a/b/c/d/untitled.txt').stem
'untitled'
untitled.txt -> untitled
"""
return pathlib.Path(filepath).stem
def get_lastpath_and_filename(filepath):
"""
ini beresiko jk parent gak dapat etc
"""
# return pathlib.Path(filepath).stem
return pathlib.Path(filepath).parent.stem + "/" + pathlib.Path(filepath).stem
def path_filename(filepath):
"""
/home/usef/untitled.txt -> untitled.txt
"""
return os.path.basename(filepath)
def path_dirname(filepath):
"""
/home/usef/untitled.txt -> /home/usef
"""
return os.path.dirname(filepath)
def get_dirname(filepath):
return path_dirname(filepath)
def file_remove(filepath):
"""
os.remove() removes a file.
If the file doesn't exist, os.remove() throws an exception, so it may be necessary to check os.path.isfile() first, or wrap in a try
the exception thrown by os.remove() if a file doesn't exist is FileNotFoundError
missing_ok=True, added in 3.8 solves that!
os.rmdir() removes an empty directory.
shutil.rmtree() deletes a directory and all its contents.
Path objects from the Python 3.4+ pathlib module also expose these instance methods:
pathlib.Path.unlink() removes a file or symbolic link.
file_to_rem = pathlib.Path("/tmp/<file_name>.txt")
file_to_rem.unlink()
Path.unlink(missing_ok=False)
Unlink method used to remove the file or the symbolik link.
If missing_ok is false (the default), FileNotFoundError is raised if the path does not exist.
If missing_ok is true, FileNotFoundError exceptions will be ignored (same behavior as the POSIX rm -f command).
Changed in version 3.8: The missing_ok parameter was added.
pathlib.Path.rmdir() removes an empty directory.
"""
if os.path.exists(filepath):
os.remove(filepath)
else:
print(f"{filepath} not found")
def dir_remove(dirpath):
os.rmdir(dirpath)
def write_list(filepath, daftar, combiner="\n"):
with open(filepath, mode="w", encoding="utf8") as fd:
fd.write(combiner.join(daftar))
def write_file(filepath, text, write_mode="w"):
with open(filepath, mode=write_mode, encoding="utf8") as fd:
fd.write(text)
def file_write(filepath, text, write_mode="w"):
write_file(filepath, text, write_mode=write_mode)
def append_file(filepath, text):
with open(filepath, mode="a", encoding="utf8") as fd:
fd.write(text)
def file_append(filepath, text):
append_file(filepath, text)
def clipboard_to_file(filepath):
content = trypaste()
with open(filepath, "w", encoding="utf-8") as fd:
fd.write(content)
def del_lines(filepath, baris_regex):
"""
https://stackoverflow.com/questions/4710067/using-python-for-deleting-a-specific-line-in-a-file
contoh baris_regex: "^p$" atau "^p\\s*$"
tentu juga:
"^#"
"""
with open(filepath, mode="r+", encoding="utf8") as fd:
content = fd.readlines()
fd.seek(0)
for line in content:
m = re.match(baris_regex, line)
if not m:
fd.write(line)
fd.truncate() # hapus sisa baris2
def mk_file_to_dict(filepath, reverse=True):
"""
utk auto completer agar
int main() { .. | main function
"""
dictionary_result = {}
dictionary_result_reverse = {}
entry_header = None
entry_body = []
collecting = False
with open(filepath, encoding="utf-8") as fd:
for line in fd.readlines():
if collecting:
if re.search(r"^--#", line):
entry_content = "".join(entry_body)
entry_content = entry_content.strip()
dictionary_result.update({entry_header: entry_content})
dictionary_result_reverse.update({entry_content: entry_header})
entry_body = []
collecting = False
else:
entry_body.append(line)
elif re.search(r"^\s*--%\s+", line):
# entah kenapa entry kedua dst suka \n--% baris
entry_header = line.replace("--%", "", 1).strip()
collecting = True
if reverse:
return dictionary_result_reverse
return dictionary_result
def kurangi(banyak, dikit):
"""
string arithmetic:
kurangi(sebuah_nama_panjang, sebuah_nama)
hasilkan: _panjang
"""
return banyak.replace(dikit, "", 1).strip()
def line_number_expression(content_length, line_expression):
"""
memproses expression spt berikut:
1
~
1-5,17,~
17-~
@returns:
list of line numbers indexed from 0
@usage:
content = file read lines
line_nos = line_number_expression(len(content), line_expression)
kita butuh len(content) krn butuh representasi ~ sbg last line
content_with_indexes = [(index, baris) for (index, baris) in enumerate(content)]
result = [
(process(baris) if index in line_nos else baris)
for (index, baris) in content_with_indexes]
if result:
write lines to file(result)
"""
result = []
# content_length = len(line_of_contents)
for expr in [item.strip() for item in line_expression.split(",")]:
if "-" in expr:
start, end = [item.strip() for item in expr.split("-")]
start = int(start) - 1
if end == "~":
end = content_length # krn utk range
else:
end = int(end)
for k in range(start, end):
result.append(k)
else:
if expr == "~":
k = content_length - 1 # krn utk indexing
else:
k = int(expr) - 1
result.append(k)
return result
def against_regex(regexfile, filepath):
"""
u -e"/file>rx/rx.txt|users.txt"
isi rx.txt:
(?P<id>\d+)\t(?P<firstname>\w+)\t(?P<lastname>\w+|\-|\s)\t(?P<phone>\d+)\t(?P<email>[\.\w+]+@[\w]+\.[\w]+)\t(?P<pwd>[\w\$\/\.]+)\t(?P<enabled>1|0)\t(?P<activated>1|0)\t(?P<token>\w+|\\N)\t(?P<confirm_code>\d+)\t(?P<note>\\N)\t(?P<cr_date>[\d\-]+\s[\d:]+\.[\d\+]+)\t(?P<activated_date>[\d\-]+\s[\d:]+\.[\d\+]+|\\N)\t(?P<old_email>\\N)\t(?P<old_email_verify_code>\\N)\t(?P<old_phone>\\N)\t(?P<old_phone_verify_code>\\N)\t(?P<new_email>\\N)\t(?P<new_email_verify_code>\\N)\t(?P<new_phone>\\N)\t(?P<new_phone_verify_code>\\N)\t(?P<kyc_flag>1|0)\t(?P<setuju_snk>1|0)\t(?P<tgl_setuju_snk>[\d\-]+\s[\d:]+\.[\d\+]+|\\N)\t(?P<progres_registrasi>\d+)\t(?P<progres_kyc>\d+)\t(?P<lastlogin>[\d\-]+\s[\d:]+\.[\d\+]+|\\N)\t(?P<customerno>\\N)\t(?P<flag_login>1|0)\t(?P<fcm_token>[\w\d\-_:]+|\\N)\t(?P<role>\w+|\\N)\t(?P<referral_code>\w+|\\N)\t(?P<referrer_id>\\N)\t(?P<profile_image>[\w\d:\/\.]+|\\N)\t(?P<change_data_info>\\N)
"""
from .dirutils import isfile
from .printutils import indah4
if not isfile(regexfile) and isfile(filepath):
print(f"no regexfile {regexfile} and no targetfile {filepath}")
return
regexpattern = file_content(regexfile).strip()
content = file_lines(filepath)
# indah4(f'''[against_regex]
# pattern = [{regexpattern}]
# ''', warna='white')
# result = []
# content_with_indexes = [(index, baris) for (index, baris) in enumerate(content)]
# result = [(baris.replace('\n', appender+'\n') if index in lines else baris) for (index,baris) in content_with_indexes]
match_counter = 0
for (index, baris) in enumerate(content):
# coba = re.match(regexpattern, baris)
coba = re.search(regexpattern, baris)
if coba:
match_counter += 1
def view_lines_between(filepath, baris_cari_start, baris_cari_end=None):
"""
print lines antara /baris_cari_start/ dan /baris_cari_end/
"""
from .dirutils import isfile
if not isfile(filepath):
print(f"{filepath} not found")
return None
content = None
with open(filepath, "r", encoding="utf-8") as fd:
content = fd.readlines()
# print('content:', content if len(content)<10 else f'{len(content)} lines')
# print('\n\n\n', '='*40, filepath)
if content:
mulai = [item for item in content if baris_cari_start in item]
mulai = mulai[-1] # mulai paling late
index_mulai = content.index(mulai)
if baris_cari_end:
akhir = [item for item in content if baris_cari_end in item]
if len(mulai) >= 1 and len(akhir) >= 1: # ambil yg pertama match
# mulai = mulai[0]
# print('found akhir:', akhir)
# get akhir yg > mulai
filtered_bigger = [
item for item in akhir if content.index(item) >= index_mulai
]
if filtered_bigger:
akhir = filtered_bigger[0] # akhir paling early
index_akhir = content.index(akhir)
# print(f'index mulai {index_mulai} dan index akhir {index_akhir}')
content = content[index_mulai : index_akhir + 1]
return content
else:
return content[index_mulai:]
return None
def tab_to_space_all(filepath, tabstop=2):
content = file_content(filepath)
write_file(content.replace("t", tabstop * " "))
def tab_to_space_start(filepath, tabstop=2):
if env_exist("ULIBPY_TABSPACE"):
tabstop = env_int("ULIBPY_TABSPACE")
content = file_content(filepath)
baca = content.splitlines()
hasil = []
for line in baca:
m = re.match("^(\s+)(\S.*)+", line)
if m:
ubah = m.group(1)
isi = m.group(2)
# print(line, f' => [{ubah}]')
hasil.append(ubah.replace("\t", tabstop * " ") + isi)
else:
# print('*no*', line)
hasil.append(line)
result = "\n".join(hasil)
write_file(filepath, result)
def space_to_tab_start(filepath, tabstop=2):
if env_exist("ULIBPY_TABSPACE"):
tabstop = env_int("ULIBPY_TABSPACE")
content = file_content(filepath)
baca = content.splitlines()
hasil = []
for line in baca:
m = re.match("^(\s+)(\S.*)+", line)
if m:
ubah = m.group(1)
isi = m.group(2)
# print(line, f' => [{ubah}]')
# hasil.append(ubah.replace('\t', tabstop*' ') + isi)
hasil.append(ubah.replace(tabstop * " ", "\t") + isi)
else:
# print('*no*', line)
hasil.append(line)
result = "\n".join(hasil)
write_file(filepath, result)
def find_entry_by_content(filepath, content_pattern, search_with_in=True):
"""
utk content-based search
teknik sementara:
if content_pattern in baris
"""
content = []
with open(filepath, encoding="utf-8") as fd:
content = fd.readlines()
if search_with_in:
lokasi = [
(index, item)
for (index, item) in enumerate(content)
if content_pattern in item
]
else:
# search dg regex
lokasi = [
(index, item, re.match(content_pattern, item))
for (index, item) in enumerate(content)
if re.match(content_pattern, item)
]
if lokasi:
ketemu = lokasi[0]
if len(lokasi) > 1:
print(f"multi {len(lokasi)} matches:", lokasi)
# cari top, for item in reversed(list[:mundur])
# cari bottom, for item in list[maju:]
def ulib_history():
# import sys, tempfile
disini = os.path.realpath(__file__)
disini = os.path.dirname(disini) # schnell/app
disini = os.path.join(disini, os.pardir, os.pardir, "data")
disini = os.path.abspath(disini)
filename = "ulibpy.hist"
file_location = os.path.join(disini, filename)
# print('ulib_history', file_location)
# if env_exist('ULIBPY_HISTORY_FILE'):
# file_location = env_get('ULIBPY_HISTORY_FILE')
# if sys.platform == 'win32' and env_exist('ULIBPY_HISTORY_FILE_WIN32'):
# file_location = os.path.join(tempfile.gettempdir(), env_get('ULIBPY_HISTORY_FILE_WIN32'))
return file_location | yulibrary | /yulibrary-0.0.2-py3-none-any.whl/langutils/app/fileutils.py | fileutils.py |
import datetime
import json
import re
from .jsonutils import MyJsonify
from langs.data.fakesey import palsu
format_date = "%Y/%m/%d"
format_datetime = "%Y/%m/%d %H:%M:%S"
faker = palsu.faker
def getfakers(
funcname,
callnum=1,
funcargs=None,
kwfuncargs=None,
as_string=False,
as_list=False,
as_is=False,
quote_string=False,
format_date=format_date,
format_datetime=format_datetime,
):
# print('getfakers meminta sebanyak:', callnum)
hasil = []
for _ in range(callnum):
if funcargs:
ok = getattr(faker, funcname)(*funcargs)
elif kwfuncargs:
ok = getattr(faker, funcname)(**kwfuncargs)
else:
ok = getattr(faker, funcname)()
if not as_is:
if isinstance(ok, int):
ok = str(ok)
elif isinstance(ok, list):
ok = str(ok)
elif isinstance(ok, dict):
ok = json.dumps(ok, indent=2, cls=MyJsonify)
if quote_string and (
isinstance(ok, str)
or isinstance(ok, datetime.datetime)
or isinstance(ok, datetime.date)
):
if isinstance(ok, datetime.datetime):
ok = ok.strftime(format_datetime)
elif isinstance(ok, datetime.date):
ok = ok.strftime(format_date)
ok = '"' + ok + '"'
# TODO
# mugkin jk callnum=1, langsung saja, return ok ?
if callnum == 1:
return ok
hasil.append(ok)
if as_string:
return ", ".join(hasil)
elif as_list:
return hasil
return "[" + ", ".join(hasil) + "]"
# agar tidak perlu specify word, pyint, dll scr manual dari tipe data kolom, dst.
"""
address()
century()
city()
company()
company_email()
country()
country_code()
currency()
currency_code()
currency_name()
date()
date_of_birth()
date_of_birth(minimum_age=18,maximum_age=65)
date_this_month()
date_time()
date_time_between(start_date='-15y', end_date='now')
date_time_this_month()
day_of_month()
day_of_week()
email()
first_name()
free_email()
image_url()
image_url(100,100)
iso8601()
job()
last_name()
language_code()
latitude()
longitude()
latlng()
locale()
month()
month_name()
name()
name_male()
name_female()
paragraph()
phone_number()
postcode()
profile()
pybool()
pydecimal()
pydict() pydict(nb_elements=10, variable_nb_elements=True, value_types=None, *allowed_types)
>>> f.pydict(5,False,value_types='str')
{'peace': 'YWGfiqtZGChdSdUXVnlL', 'simple': 'OqtRxgsfaHdaMBYnPCzL', 'make': 'YbXXdhwicIyOKOzIWBbm', 'body': 'VOyXIBAFQFUNtUxisyeU', 'million': 'owesTiokIEebMIuSGdcQ'}
>>> f.pydict(5,False,value_types='int')
{'admit': 9350, 'shake': 2298, 'and': 7656, 'start': 9454, 'society': 6189}
>>> f.pydict(5,False,value_types=['int','str'])
{'whose': 'LYwXjEBPCKrnKzCPcalq', 'new': 'PXwFullGUjroiErfoOxU', 'not': 'WHrXoaNjdVXLgBoIXcDa', 'race': 'pOPkoSAEEQbUDQOZiJik', 'much': 4919}
pyfloat() pyfloat(left_digits=None, right_digits=None, positive=False, min_value=None, max_value=None)
pyint() pyint(min_value=0, max_value=9999, step=1)
pylist() pylist(nb_elements=10, variable_nb_elements=True, value_types=None, *allowed_types)
pyset() pyset(nb_elements=10, variable_nb_elements=True, value_types=None, *allowed_types)
random_digit()
random_element(['Automotive','Health Care','Manufacturing','High Tech','Retail'])
random_int()
random_int(start,end)
random_number()
safe_email()
sentence()
simple_profile()
simple_profile('F')
simple_profile('M')
state()
state_abbr()
street_address()
street_name()
street_suffix()
text()
time()
timezone()
url()
word()
words()
words(4)
year()
"""
faker_mapper = {
"bool": "pybool",
"boolean": "pybool",
# 'date' : 'date', # dt
# 'date' : 'date_this_year', # dt
"date": "date_between", # dt
"decimal": "pydecimal",
"float": "pyfloat",
"int": "pyint",
"integer": "pyint",
"dict": "pydict",
"list": "pylist",
"set": "pyset",
"number": "pyint",
"string": "word",
# date_time_between(start_date='-15y', end_date='now')
# 'timestamp': 'date_time', # ts
# 'timestamp': 'date_time_this_year', # ts
"timestamp": "date_time_between", # ts
"time.Time": "date_time_between", # ts
"contain_age": "random_int",
"contain_city": "city",
"contain_country": "country",
"contain_email": "email",
"contain_address": "address",
"contain_name": "name",
"contain_phone": "phone_number",
"contain_firstname": "first_name",
"contain_lastname": "last_name",
"contain_malename": "name_male",
"contain_femalename": "name_female",
}
def get_by_datatypes(
tipedata,
callnum=1,
funcargs=None,
kwfuncargs=None,
as_string=False,
as_list=False,
as_is=False,
quote_string=False,
):
hasil = getfakers(
faker_mapper[tipedata],
callnum=callnum,
funcargs=funcargs,
kwfuncargs=kwfuncargs,
as_string=as_string,
as_list=as_list,
as_is=as_is,
quote_string=quote_string,
)
return hasil
def get_by_type_or_name(
tipedata,
namadata,
callnum=1,
funcargs=None,
kwfuncargs=None,
as_string=False,
as_list=False,
as_is=False,
quote_string=False,
format_date=format_date,
format_datetime=format_datetime,
):
"""
bisa juga manual
get_by_type_or_name('random_element', funcargs=['satu','dua','tiga'])
"""
pengunci = tipedata
# print('tipedata', tipedata)
if tipedata == "timestamp" or tipedata == "date":
quote_string = True
kwfuncargs = {"end_date": "now", "start_date": "-3y"}
if "email" in namadata.lower():
pengunci = "contain_email"
elif "address" in namadata.lower():
pengunci = "contain_address"
elif "age" in namadata.lower():
pengunci = "contain_age"
funcargs = [1, 99]
elif "city" in namadata.lower():
pengunci = "contain_city"
elif "country" in namadata.lower():
pengunci = "contain_country"
elif "first" in namadata.lower() and "name" in namadata.lower():
pengunci = "contain_firstname"
elif "last" in namadata.lower() and "name" in namadata.lower():
pengunci = "contain_lastname"
elif "male" in namadata.lower() and "name" in namadata.lower():
pengunci = "contain_malename"
elif "female" in namadata.lower() and "name" in namadata.lower():
pengunci = "contain_femalename"
elif "name" in namadata.lower():
pengunci = "contain_name"
elif "phone" in namadata.lower():
pengunci = "contain_phone"
# experimental
elif "dict" in namadata.lower():
pengunci = "dict"
elif "list" in namadata.lower():
pengunci = "list"
# print('list name:', namadata)
if "int" in namadata.lower() and "str" in namadata.lower():
kwfuncargs = {"value_types": [str, int]}
elif "int" in namadata.lower():
kwfuncargs = {"value_types": [int]}
elif "float" in namadata.lower():
# print('tipe list of floats')
kwfuncargs = {"value_types": [float]}
else:
kwfuncargs = {"value_types": [str]}
if "num" in namadata:
# print(f'num in {namadata}')
get = re.match(r"^.*num(\d+).*$", namadata)
if get:
jumlah = get.group(1)
# print(f'match num in {get.groups()}, jumlah = {jumlah}')
kwfuncargs["nb_elements"] = int(jumlah)
kwfuncargs["variable_nb_elements"] = False
petakan = pengunci # misal text, paragraph yg gak ada peta nya
if pengunci in faker_mapper:
petakan = faker_mapper[pengunci]
hasil = getfakers(
petakan,
callnum=callnum,
funcargs=funcargs,
kwfuncargs=kwfuncargs,
as_string=as_string,
as_list=as_list,
as_is=as_is,
quote_string=quote_string,
format_date=format_date,
format_datetime=format_datetime,
)
return hasil | yulibrary | /yulibrary-0.0.2-py3-none-any.whl/langutils/app/fakerutils.py | fakerutils.py |
import functools
import json
import math
import os
import pprint
import re
import textwrap
from itertools import islice
from itertools import zip_longest as zeal
import click
from anytree.render import RenderTree
from pygments import highlight
from pygments.formatters import (NullFormatter, TerminalFormatter,
TerminalTrueColorFormatter)
from pygments.formatters.terminal import TerminalFormatter
from pygments.lexer import RegexLexer, words
# https://github.com/pygments/pygments/tree/master/pygments/lexers
# https://github.com/pygments/pygments/blob/master/pygments/lexers/_mapping.py
# https://pygments.org/docs/quickstart/
from pygments.lexers import (ClojureLexer, CppLexer, CSharpLexer, GoLexer,
HaskellLexer, HtmlLexer, JavascriptLexer,
NginxConfLexer, PythonLexer, ScalaLexer,
YamlLexer, get_lexer_by_name,
get_lexer_for_filename)
from pygments.lexers.python import PythonLexer
from pygments.token import Keyword, Name, Text
from .datetimeutils import format_epoch_longer
from .dirutils import latest_files, timeify_filelist
from .fileutils import file_content
from .utils import env_exist, env_get, env_int, trycopy, trypaste
lexer_map = {
"clj": ClojureLexer,
"cpp": CppLexer,
"cs": CSharpLexer,
"go": GoLexer,
"hs": HaskellLexer,
"html": HtmlLexer,
"java": JavascriptLexer,
"nginx": NginxConfLexer,
"py": PythonLexer,
"scala": ScalaLexer,
"yaml": YamlLexer,
}
class MyLexer(PythonLexer):
fuck_object = None
def my_callback(lexer, match):
kata = match.group(1)
if kata in MyLexer.fuck_object.keywords:
yield match.start(), Name.Builtin, kata
else:
yield match.start(), Text, kata
tokens = {
"root": [
# (words(('file', 'capcay')), Name.Builtin),
# (words(('file', 'capcay')), Name.Builtin),
(r"\s+", Text),
(r"(\w+)", my_callback),
(r"\W+", Text),
],
}
def __init__(self, keywords):
self.keywords = keywords
MyLexer.fuck_object = self
self.stripall = True
self.tabsize = 2
self.ensurenl = True
self.filters = []
# print('hasil tokens:', self.tokens)
# def get_tokens_unprocessed(self, text):
# for index, token, value in PythonLexer.get_tokens_unprocessed(self, text):
# if token is Name and value in self.keys:
# yield index, Keyword.Pseudo, value
# # yield index, Name.Builtin, value
# else:
# yield index, token, value
def indah(
message,
warna="green",
layar=None,
width=80,
newline=False,
bold=True,
blink=False,
underline=True,
reverse=True,
):
"""
warna apa aja? https://pypi.org/project/colorama/
black red green yellow blue magenta cyan reset"""
try:
# click.echo ( click.style(message.center(width), fg=warna, bg=layar, bold=True, blink=True, underline=True, reverse=True).decode('utf-8') )
click.echo(
click.style(
message.center(width),
fg=warna,
bg=layar,
bold=bold,
blink=blink,
underline=underline,
reverse=reverse,
),
nl=newline,
)
except Exception as e:
print(str(e))
print(message)
def indah0(
message,
warna="green",
newline=False,
layar=None,
bold=False,
blink=False,
underline=False,
reverse=False,
):
"""
left justified
default: 80 column, centered
"""
indah(
message,
warna=warna,
width=0,
newline=newline,
layar=layar,
bold=bold,
blink=blink,
underline=underline,
reverse=reverse,
)
def indah1(
message, warna="green", layar=None, blink=False, underline=False, reverse=False
):
"""
newline
bold
"""
# indah0(message, warna, layar, bold=True, newline=True)
indah(
message,
warna=warna,
width=0,
newline=True,
layar=layar,
bold=True,
blink=blink,
underline=underline,
reverse=reverse,
)
def indah2(
message, warna="green", layar=None, blink=False, underline=False, reverse=False
):
"""
newline
bold
copy
"""
# indah0(message, warna, layar, bold=True, newline=True)
indah(
message,
warna=warna,
width=0,
newline=True,
layar=layar,
bold=True,
blink=blink,
underline=underline,
reverse=reverse,
)
trycopy(message)
def indah3(
message,
warna="green",
layar=None,
blink=False,
underline=False,
reverse=False,
newline=True,
):
"""
safe indah2 jk message kosong
mengcopy pesan ke clipboard
newline
bold
copy
"""
if not message:
return
indah(
message,
warna=warna,
width=0,
newline=newline,
layar=layar,
bold=True,
blink=blink,
underline=underline,
reverse=reverse,
)
trycopy(message)
def indah4(
message,
warna="green",
layar="black",
blink=False,
underline=False,
reverse=False,
newline=True,
):
"""
versi no copy clipboard dari indah3
"""
if not message:
return
indah(
message,
warna=warna,
width=0,
newline=newline,
layar=layar,
bold=True,
blink=blink,
underline=underline,
reverse=reverse,
)
def indahnl(
message,
warna="green",
layar=None,
bold=False,
newline=False,
blink=False,
reverse=False,
underline=False,
):
"""
newline
"""
# indah0(message, warna, layar, bold=True, newline=True)
indah(
message,
warna=warna,
width=0,
newline=True,
layar=layar,
bold=bold,
blink=blink,
underline=underline,
reverse=reverse,
)
def indahr(
message,
warna="green",
layar=None,
bold=False,
newline=False,
blink=False,
underline=False,
):
"""
newline
reverse
"""
# indah0(message, warna, layar, bold=True, newline=True)
indah(
message,
warna=warna,
width=0,
newline=True,
layar=layar,
bold=bold,
blink=blink,
underline=underline,
reverse=True,
)
def indahb(
message,
warna="green",
layar=None,
newline=False,
blink=False,
underline=False,
reverse=False,
):
"""
newline
bold
"""
# indah0(message, warna, layar, bold=True, newline=True)
indah(
message,
warna=warna,
width=0,
newline=True,
layar=layar,
bold=True,
blink=blink,
underline=underline,
reverse=reverse,
)
def indahu(
message,
warna="green",
layar=None,
newline=False,
bold=False,
blink=False,
reverse=False,
):
"""
newline
underline
"""
# indah0(message, warna, layar, bold=True, newline=True)
indah(
message,
warna=warna,
width=0,
newline=True,
layar=layar,
bold=bold,
blink=blink,
underline=True,
reverse=reverse,
)
def print_list(the_list, genap="yellow", ganjil="green"):
for index, filename in enumerate(the_list):
tulisan = f"{index}. {filename}"
warna = genap if (index % 2 == 0) else ganjil
# indah0(tulisan, warna=warna, newline=True)
indah4(tulisan, warna=warna)
def print_list_warna(
the_list,
genap="yellow",
ganjil="green",
bold=True,
berwarna=True,
special_ends=None,
start=0,
prefix="",
extra_warna={},
extra_suffix="",
no_index=False,
):
"""
contoh special_ends
*.py
maka highlight yg endswith tsb
UPD:
tambah extra_suffix utk bs kasih extra newline di antara baris
tambah no_index jk gak mau ada index
"""
for index, filename in enumerate(the_list, start):
# print(f"proses {index} dan {filename}.")
tulisan = (
prefix
+ ("" if no_index else f"{index}. ")
+ f"{filename}"
+ (extra_suffix if extra_suffix else "")
)
if berwarna:
warna = genap if (index % 2 == 0) else ganjil
if extra_warna:
for k, v in extra_warna.items():
if index % k == 0:
warna = v
if special_ends and filename.endswith(special_ends):
indah0(tulisan, warna="white", layar="red", bold=bold, newline=True)
else:
indah0(tulisan, warna=warna, bold=bold, newline=True)
else:
print(tulisan)
def print_json(data, indent=4, warna="yellow", layar="black"):
indah4(json.dumps(data, indent=indent), warna=warna, layar=layar)
def pp(data):
pprint.pprint(data)
def ppr(data):
from rich.pretty import pprint
pprint(data)
def print_tree(node):
from anytree import RenderTree
print(RenderTree(node))
def get_tree(node):
from anytree import RenderTree
return RenderTree(node)
def filter_print_latest_files(code, basedir, cetak_waktu=False):
"""
kita nanti pengen bikin gini
|50 word1 word2
jadi dari hasil |50 kita filter yg mengandung word1 dan word2 saja.
"""
# print(f'cetak latest files [code={code}], [dir={basedir}]')
if not code:
code = "10" # minimal bertujuan utk lihat latest files
m = re.match(r"^(\d+)\s*(.*)", code)
if m:
# print(f"ketemu m dg group: {m.groups()}")
jumlah = m.group(1)
jumlah = int(jumlah)
result = latest_files(basedir, jumlah)
# jk ada words utk ngefilter hasil ls by time
allfilters = m.group(2)
if allfilters:
"""
di sini tentu pake any
"""
splittedfilters = allfilters.split()
# print(f"splitted: {splittedfilters}")
result = [
item
for item in result
if any([word for word in splittedfilters if word in item])
]
# print(f"result: {result}")
if cetak_waktu:
# print(f"sblm timeify")
result_with_time = timeify_filelist(
result
) # latest_files_with_time(basedir, jumlah)
# print(f"sblm print list warna")
print_list_warna(result_with_time)
return result_with_time
else:
print_list_warna(result)
return result
def print_file(filepath):
print(file_content(filepath))
def indah_file(filepath, warna="green", layar="black"):
indah3(file_content(filepath), warna=warna, layar=layar)
def print_copy(content):
print(content)
trycopy(content)
def print_copy_file(filename, warna="white", pygments=False, lexer="py"):
header = f"{'='*40} {filename}"
content = file_content(filename)
trycopy(content)
# print(header)
indah0(header, warna=warna, newline=True)
if not pygments:
print(content)
else:
default_lexer = lexer_map[lexer]()
filename, extension = os.path.splitext(filename)
if extension:
choose = [item for item in lexer_map.keys() if extension == "." + item]
if choose:
choose = choose[0]
default_lexer = lexer_map[choose]()
print(highlight(content, default_lexer, TerminalTrueColorFormatter()))
# print(highlight(content, get_lexer_for_filename(filename), NullFormatter()))
# print(highlight(content, get_lexer_for_filename(filename), TerminalTrueColorFormatter()))
def dir_w_old(list_files, jumlah_kolom=None, screen_width=None):
if env_exist("ULIBPY_DIR_W_SCREENWIDTH") or not screen_width:
screen_width = int(env_get("ULIBPY_DIR_W_SCREENWIDTH"))
if env_exist("ULIBPY_DIR_W_COLNUMBER") or not jumlah_kolom:
jumlah_kolom = env_int("ULIBPY_DIR_W_COLNUMBER")
pecah = lambda asli, banyak: [
asli[i : i + banyak] for i in range(0, len(asli), banyak)
]
terbagi = pecah(list_files, jumlah_kolom)
kolomku = f"{{: >{screen_width / jumlah_kolom}}}"
# [ print(f"{kolomku*3}".format(*item)) for item in b(list(range(0,9)),3) ]
for item in terbagi:
print(f"{kolomku*len(item)}".format(*item))
def dir_w(
sumber_array,
jumlah_kolom=None,
screen_width=None,
warna="blue",
layar=None,
bold=True,
):
if env_exist("ULIBPY_DIR_W_SCREENWIDTH") or not screen_width:
screen_width = int(env_get("ULIBPY_DIR_W_SCREENWIDTH"))
if env_exist("ULIBPY_DIR_W_COLNUMBER") or not jumlah_kolom:
jumlah_kolom = env_int("ULIBPY_DIR_W_COLNUMBER")
def print_transposed(terbagi):
kolomku = f"{{: <{int(screen_width/jumlah_kolom)}}}"
for item in terbagi:
# bersihkan elem dari item yg None
item = [el for el in item if el is not None]
indah0(
f"{kolomku*len(item)}".format(*item),
warna=warna,
layar=layar,
bold=bold,
newline=True,
)
def transpose(array):
return list(map(list, zeal(*array)))
def ice(array, *args):
return list(islice(array, *args))
ambil = math.ceil(len(sumber_array) / jumlah_kolom)
urut = [
ice(sumber_array, ambil * oper, ambil * (oper + 1))
for oper in range(jumlah_kolom)
]
transposed = transpose(urut)
print_transposed(transposed)
# filedir/library.py
def print_enumerate(contentlist):
for index, item in enumerate(contentlist):
print("{:4d}| {:s}".format(index, item))
def indah_enumerate(contentlist, warna="white"):
for index, item in enumerate(contentlist):
cetak = "{:4d}| {:s}".format(index, item)
indah0(cetak, newline=True, bold=True, warna=warna)
def print_copy_enumerate_filtercontent(string_content, filterpattern, warna="green"):
index_lines = enumerate(string_content.splitlines())
content = [
"{:4d}| {:s}".format(no, baris)
for (no, baris) in index_lines
if filterpattern in baris
]
trycopy(content)
for line in content:
indah3(line, warna=warna)
def print_copy_enumerate(content):
trycopy(content)
for index, item in enumerate(content.splitlines()):
print("{:4d}| {:s}".format(index, item))
def print_copy_enumerate_list(contentlist, delimiter=""):
"""
spt print_copy_enumerate
tapi input adlh list, jd gak perlu splitlines() dulu
"""
trycopy(delimiter.join(contentlist))
for index, item in enumerate(contentlist):
print("{:4d}| {:s}".format(index, item))
def print_debug(*args, **kwargs):
if env_int("ULIBPY_FMUS_DEBUG"):
print(*args, **kwargs)
def indah_debug(*args, **kwargs):
if env_int("ULIBPY_FMUS_DEBUG"):
indah4(*args, **kwargs)
class Debug:
def __init__(self, isDebug=False, printToFile=""):
# self.isDebug = isDebug
self.isDebug = env_int("ULIBPY_FMUS_DEBUG")
# input(f'nilai debug adlh [{self.isDebug}] dan args [{isDebug}] ')
if printToFile:
self.filename = printToFile
self.fd = open(self.filename, "a")
def stop(self):
if hasattr(self, "filename"):
if self.fd:
os.close(self.fd)
def __call__(self, *args, **kwargs):
"""
kita kasih kwargs: forced
if forced == True maka print bagaimanapun
"""
# print('debug is called', 'debug?', self.isDebug, 'kwargs', kwargs)
if self.isDebug:
if hasattr(self, "filename"):
print(*args, **kwargs, file=self.fd)
else:
# indah0(*args, **kwargs)
if len(args) == 1 and isinstance(args[0], str):
pesan = args[0]
indah0(pesan, **kwargs, reverse=True)
else:
print(*args, **kwargs)
else:
if kwargs and "forced" in kwargs and kwargs["forced"]:
del kwargs["forced"]
input("forcing debug!")
if len(args) == 1 and isinstance(args[0], str):
pesan = args[0]
indah0(pesan, **kwargs)
else:
print(*args, **kwargs)
def pigmen(content, keywords):
print(highlight(content, MyLexer(keywords), TerminalFormatter()))
# lexer = MyLexer()
# state_item = (words(tuple(keywords)), Name.Builtin)
# lexer.tokens = {
# 'root': [
# state_item,
# (r'\s+', Text),
# (r'\w+', Text),
# (r'\W+', Text),
# ],
# }
# print(highlight(content, lexer, TerminalFormatter()))
def print_file_pigmen(filepath, keywords):
pigmen(file_content(filepath), keywords)
def printex(msg="", printer=print):
import traceback
printer(msg)
printer(traceback.format_exception())
def tryex(block_content, msg="", printer=print):
import traceback
try:
block_content()
except Exception as err:
printer(f"{msg}: {err}")
printer(traceback.format_exception()) | yulibrary | /yulibrary-0.0.2-py3-none-any.whl/langutils/app/printutils.py | printutils.py |
import traceback
from .dirutils import joiner
from .fileutils import append_file, file_write
from .utils import env_get
TAB_SPACE_MULT = 2
TAB = " " * TAB_SPACE_MULT
JSON_INDENT = TAB_SPACE_MULT
TABS = TAB * 2
SCHNELL_BASEDIR = env_get("ULIBPY_BASEDIR")
if SCHNELL_BASEDIR:
TEMPLATESDIR = joiner(SCHNELL_BASEDIR, "db/bantuan/templates")
type_mapper_by_provider = {
"django_orm": {
"array_of": "ARRAY(__SUBTYPE__)",
"empty_array": "Array",
"auto": "{ type: Number, required: true, }",
"bigint": "BIGINT",
"blob": "String",
"boolean": "BOOLEAN",
"date": "DATE",
"decimal": "DECIMAL",
"django_many_to_many": "[{ type: String }]",
"django_one_to_many": "String",
"django_one_to_one": "[{ type: String }]",
"django_foreign_key": "{ type: STRING, allowNull: false, references: __DQModelRujukan__DQ, }",
"double": "DOUBLE",
"enum": "ENUM",
"float": "FLOAT",
"image": "STRING",
"integer": "INTEGER",
"number": "DECIMAL",
"serial": "String",
"slug": "STRING",
"string": "STRING",
"text": "TEXT",
"timestamp": '"TIMESTAMP"',
"uuid": "UUID",
"uuidv1": "UUIDV1",
"uuidv4": "UUIDV4",
"varchar": "STRING",
},
"hibernate": {
"array_of": "ARRAY(__SUBTYPE__)",
"empty_array": "Array",
"auto": "{ type: Number, required: true, }",
"bigint": "BIGINT",
"blob": "String",
"boolean": "BOOLEAN",
"date": "DATE",
"decimal": "DECIMAL",
"django_many_to_many": "[{ type: String }]",
"django_one_to_many": "String",
"django_one_to_one": "[{ type: String }]",
"django_foreign_key": "{ type: STRING, allowNull: false, references: __DQModelRujukan__DQ, }",
"double": "DOUBLE",
"enum": "ENUM",
"float": "FLOAT",
"image": "STRING",
"integer": "INTEGER",
"number": "DECIMAL",
"serial": "String",
"slug": "STRING",
"string": "STRING",
"text": "TEXT",
"timestamp": '"TIMESTAMP"',
"uuid": "UUID",
"uuidv1": "UUIDV1",
"uuidv4": "UUIDV4",
"varchar": "STRING",
},
"mongoose": {
"array_of": "Array",
"empty_array": "Array",
"auto": "String",
"bigint": "String",
"blob": "String",
"boolean": "Boolean",
"date": "Date",
"decimal": "Schema.Types.Decimal128",
"django_many_to_many": "Schema.Types.ObjectId",
"django_one_to_many": "Schema.Types.ObjectId",
"django_one_to_one": "Schema.Types.ObjectId",
"django_foreign_key": "Schema.Types.ObjectId",
"double": "Number",
"enum": "String",
"float": "Number",
"image": "String",
"integer": "Number",
"number": "String",
"serial": "String",
"slug": "String",
"string": "String",
"text": "String",
"timestamp": "Date",
"uuid": "Schema.Types.ObjectId",
"uuidv1": "Schema.Types.ObjectId",
"uuidv4": "Schema.Types.ObjectId",
"varchar": "String",
},
"mybatis": {
"array_of": "ARRAY(__SUBTYPE__)",
"empty_array": "Array",
"auto": "{ type: Number, required: true, }",
"bigint": "BIGINT",
"blob": "String",
"boolean": "BOOLEAN",
"date": "DATE",
"decimal": "DECIMAL",
"django_many_to_many": "[{ type: String }]",
"django_one_to_many": "String",
"django_one_to_one": "[{ type: String }]",
"django_foreign_key": "{ type: STRING, allowNull: false, references: __DQModelRujukan__DQ, }",
"double": "DOUBLE",
"enum": "ENUM",
"float": "FLOAT",
"image": "STRING",
"integer": "INTEGER",
"number": "DECIMAL",
"serial": "String",
"slug": "STRING",
"string": "STRING",
"text": "TEXT",
"timestamp": '"TIMESTAMP"',
"uuid": "UUID",
"uuidv1": "UUIDV1",
"uuidv4": "UUIDV4",
"varchar": "STRING",
},
"nest_mongo": {
"array_of": "ARRAY(__SUBTYPE__)",
"empty_array": "Array",
"auto": "{ type: Number, required: true, }",
"bigint": "BIGINT",
"blob": "String",
"boolean": "BOOLEAN",
"date": "DATE",
"decimal": "DECIMAL",
"django_many_to_many": "[{ type: String }]",
"django_one_to_many": "String",
"django_one_to_one": "[{ type: String }]",
"django_foreign_key": "{ type: STRING, allowNull: false, references: __DQModelRujukan__DQ, }",
"double": "DOUBLE",
"enum": "ENUM",
"float": "FLOAT",
"image": "STRING",
"integer": "INTEGER",
"number": "DECIMAL",
"serial": "String",
"slug": "STRING",
"string": "STRING",
"text": "TEXT",
"timestamp": '"TIMESTAMP"',
"uuid": "UUID",
"uuidv1": "UUIDV1",
"uuidv4": "UUIDV4",
"varchar": "STRING",
},
"nest_postgres": {
"array_of": "ARRAY(__SUBTYPE__)",
"empty_array": "Array",
"auto": "{ type: Number, required: true, }",
"bigint": "BIGINT",
"blob": "String",
"boolean": "BOOLEAN",
"date": "DATE",
"decimal": "DECIMAL",
"django_many_to_many": "[{ type: String }]",
"django_one_to_many": "String",
"django_one_to_one": "[{ type: String }]",
"django_foreign_key": "{ type: STRING, allowNull: false, references: __DQModelRujukan__DQ, }",
"double": "DOUBLE",
"enum": "ENUM",
"float": "FLOAT",
"image": "STRING",
"integer": "INTEGER",
"number": "DECIMAL",
"serial": "String",
"slug": "STRING",
"string": "STRING",
"text": "TEXT",
"timestamp": '"TIMESTAMP"',
"uuid": "UUID",
"uuidv1": "UUIDV1",
"uuidv4": "UUIDV4",
"varchar": "STRING",
},
"prisma": {
"array_of": "[]",
"empty_array": "Array",
"auto": "String",
"bigint": "String",
"blob": "String",
"boolean": "Boolean",
"date": "Date",
"decimal": "String",
"django_many_to_many": "String",
"django_one_to_many": "String",
"django_one_to_one": "String",
"django_foreign_key": "String",
"double": "Number",
"enum": "String",
"float": "Number",
"image": "String",
"integer": "Int",
"number": "String",
"serial": "String",
"slug": "String",
"string": "String",
"text": "String",
"timestamp": "DateTime",
"uuid": "String",
"uuidv1": "String",
"uuidv4": "String",
"varchar": "String",
},
"sequelize": {
"array_of": "ARRAY(__SUBTYPE__)",
"empty_array": "Array",
"auto": "{ type: Number, required: true, }",
"bigint": "BIGINT",
"blob": "String",
"boolean": "BOOLEAN",
"date": "DATE",
"decimal": "DECIMAL",
"django_many_to_many": "[{ type: String }]",
"django_one_to_many": "String",
"django_one_to_one": "[{ type: String }]",
"django_foreign_key": '{ type: STRING, allowNull: false, references: "ModelRujukan", }',
"double": "DOUBLE",
"enum": "ENUM",
"float": "FLOAT",
"image": "STRING",
"integer": "INTEGER",
"number": "DECIMAL",
"serial": "String",
"slug": "STRING",
"string": "STRING",
"text": "TEXT",
"timestamp": '"TIMESTAMP"',
"uuid": "UUID",
"uuidv1": "UUIDV1",
"uuidv4": "UUIDV4",
"varchar": "STRING",
},
"sql_mssql": {
"array_of": "ARRAY(__SUBTYPE__)",
"empty_array": "Array",
"auto": "{ type: Number, required: true, }",
"bigint": "BIGINT",
"blob": "String",
"boolean": "BOOLEAN",
"date": "DATE",
"decimal": "DECIMAL",
"django_many_to_many": "[{ type: String }]",
"django_one_to_many": "String",
"django_one_to_one": "[{ type: String }]",
"django_foreign_key": "{ type: STRING, allowNull: false, references: __DQModelRujukan__DQ, }",
"double": "DOUBLE",
"enum": "ENUM",
"float": "FLOAT",
"image": "STRING",
"integer": "INTEGER",
"number": "DECIMAL",
"serial": "String",
"slug": "STRING",
"string": "STRING",
"text": "TEXT",
"timestamp": '"TIMESTAMP"',
"uuid": "UUID",
"uuidv1": "UUIDV1",
"uuidv4": "UUIDV4",
"varchar": "STRING",
},
"sql_mysql": {
"array_of": "Array",
"empty_array": "Array",
"auto": "String",
"bigint": "String",
"blob": "String",
"boolean": "BOOLEAN",
"date": "Date",
"decimal": "Schema.Types.Decimal128",
"django_many_to_many": "Schema.Types.ObjectId",
"django_one_to_many": "Schema.Types.ObjectId",
"django_one_to_one": "Schema.Types.ObjectId",
"django_foreign_key": "Schema.Types.ObjectId",
"double": "Number",
"enum": "String",
"float": "Number",
"image": "String",
"integer": "INT", # id INT PRIMARY KEY
"number": "String",
"serial": "String",
"slug": "String",
"string": "String",
"text": "String",
"timestamp": "Date",
"uuid": "Schema.Types.ObjectId",
"uuidv1": "Schema.Types.ObjectId",
"uuidv4": "Schema.Types.ObjectId",
"varchar": "String",
},
"sql_postgres": {
"array_of": "ARRAY(__SUBTYPE__)",
"empty_array": "Array",
"auto": "{ type: Number, required: true, }",
"bigint": "BIGINT",
"blob": "String",
"boolean": "BOOLEAN",
"date": "DATE",
"decimal": "DECIMAL",
"django_many_to_many": "[{ type: String }]",
"django_one_to_many": "String",
"django_one_to_one": "[{ type: String }]",
"django_foreign_key": "{ type: STRING, allowNull: false, references: __DQModelRujukan__DQ, }",
"double": "DOUBLE",
"enum": "ENUM",
"float": "FLOAT",
"image": "STRING",
"integer": "INTEGER",
"number": "DECIMAL",
"serial": "String",
"slug": "STRING",
"string": "STRING",
"text": "TEXT",
"timestamp": '"TIMESTAMP"',
"uuid": "UUID",
"uuidv1": "UUIDV1",
"uuidv4": "UUIDV4",
"varchar": "STRING",
},
"sql_sqlite": {
"array_of": "ARRAY(__SUBTYPE__)",
"empty_array": "Array",
"auto": "{ type: Number, required: true, }",
"bigint": "BIGINT",
"blob": "String",
"boolean": "BOOLEAN",
"date": "DATE",
"decimal": "DECIMAL",
"django_many_to_many": "[{ type: String }]",
"django_one_to_many": "String",
"django_one_to_one": "[{ type: String }]",
"django_foreign_key": "{ type: STRING, allowNull: false, references: __DQModelRujukan__DQ, }",
"double": "DOUBLE",
"enum": "ENUM",
"float": "FLOAT",
"image": "STRING",
"integer": "INTEGER",
"number": "DECIMAL",
"serial": "String",
"slug": "STRING",
"string": "STRING",
"text": "TEXT",
"timestamp": '"TIMESTAMP"',
"uuid": "UUID",
"uuidv1": "UUIDV1",
"uuidv4": "UUIDV4",
"varchar": "STRING",
},
"struct_go": {
# 'array_of' : 'ARRAY(__SUBTYPE__)',
"array_of": "__SUBTYPE__[]",
"empty_array": "[]",
"auto": "{ type: Number, required: true, }",
"bigint": "BIGINT",
"blob": "String",
"boolean": "BOOLEAN",
"date": "DATE",
"decimal": "DECIMAL",
"django_many_to_many": "[{ type: String }]",
"django_one_to_many": "String",
"django_one_to_one": "[{ type: String }]",
"django_foreign_key": "{ type: STRING, allowNull: false, references: __DQModelRujukan__DQ, }",
"double": "DOUBLE",
"enum": "ENUM",
"float": "FLOAT",
"image": "STRING",
"integer": "INTEGER",
"number": "DECIMAL",
"serial": "String",
"slug": "STRING",
"string": "STRING",
"text": "TEXT",
"timestamp": '"TIMESTAMP"',
"uuid": "UUID",
"uuidv1": "UUIDV1",
"uuidv4": "UUIDV4",
"varchar": "STRING",
},
"struct_kt": {
# 'array_of' : 'ARRAY(__SUBTYPE__)',
"array_of": "__SUBTYPE__[]",
"empty_array": "[]",
"auto": "{ type: Number, required: true, }",
"bigint": "BIGINT",
"blob": "String",
"boolean": "BOOLEAN",
"date": "DATE",
"decimal": "DECIMAL",
"django_many_to_many": "[{ type: String }]",
"django_one_to_many": "String",
"django_one_to_one": "[{ type: String }]",
"django_foreign_key": "{ type: STRING, allowNull: false, references: __DQModelRujukan__DQ, }",
"double": "DOUBLE",
"enum": "ENUM",
"float": "FLOAT",
"image": "STRING",
"integer": "INTEGER",
"number": "DECIMAL",
"serial": "String",
"slug": "STRING",
"string": "STRING",
"text": "TEXT",
"timestamp": '"TIMESTAMP"',
"uuid": "UUID",
"uuidv1": "UUIDV1",
"uuidv4": "UUIDV4",
"varchar": "STRING",
},
"struct_rs": {
# 'array_of' : 'ARRAY(__SUBTYPE__)',
"array_of": "__SUBTYPE__[]",
"empty_array": "[]",
"auto": "{ type: Number, required: true, }",
"bigint": "BIGINT",
"blob": "String",
"boolean": "BOOLEAN",
"date": "DATE",
"decimal": "DECIMAL",
"django_many_to_many": "[{ type: String }]",
"django_one_to_many": "String",
"django_one_to_one": "[{ type: String }]",
"django_foreign_key": "{ type: STRING, allowNull: false, references: __DQModelRujukan__DQ, }",
"double": "DOUBLE",
"enum": "ENUM",
"float": "FLOAT",
"image": "STRING",
"integer": "INTEGER",
"number": "DECIMAL",
"serial": "String",
"slug": "STRING",
"string": "STRING",
"text": "TEXT",
"timestamp": '"TIMESTAMP"',
"uuid": "UUID",
"uuidv1": "UUIDV1",
"uuidv4": "UUIDV4",
"varchar": "STRING",
},
"struct_ts": {
# 'array_of' : 'ARRAY(__SUBTYPE__)',
"array_of": "__SUBTYPE__[]",
"empty_array": "[]",
"auto": "{ type: number, required: true, }",
"bigint": "number",
"blob": "string",
"boolean": "boolean",
"date": "string",
"decimal": "number",
"django_many_to_many": "[{ type: string }]",
"django_one_to_many": "string",
"django_one_to_one": "[{ type: string }]",
"django_foreign_key": "{ type: string, allowNull: false, references: __DQModelRujukan__DQ, }",
"double": "number",
"enum": "string",
"float": "number",
"image": "string",
"integer": "number",
"number": "number",
"serial": "string",
"slug": "string",
"string": "string",
"text": "string",
"timestamp": "string",
"uuid": "string",
"uuidv1": "string",
"uuidv4": "string",
"varchar": "string",
},
}
type_mapper = {
"empty_array": {
"mongoose": "[]",
"sequelize": "ARRAY",
},
"array_of": {
"mongoose": "Array",
"sequelize": "ARRAY(__SUBTYPE__)",
},
"auto": {
"django": "models.AutoField",
"flask": "sqlalchemy.Integer",
"graphene": "graphene.ID",
"pydantic": "int",
"sequelize": "{ type: Number, required: true, }",
"sqlalchemy": "Integer",
},
"bigint": {
"django": "BIGINT",
"flask": "sqlalchemy.Integer",
"graphene": "graphene.Int",
"pydantic": "int",
"sequelize": "BIGINT",
"sqlalchemy": "Integer",
},
"blob": {},
"boolean": {
"django": "models.BooleanField",
"flask": "sqlalchemy.Boolean",
"graphene": "graphene.Boolean",
"mongoose": "Boolean",
"pydantic": "bool",
"sboot": "Boolean",
"sequelize": "BOOLEAN",
"sqlalchemy": "Boolean",
},
"date": {
"django": "models.DateField",
"flask": "sqlalchemy.DateTime",
"graphene": "graphene.types.datetime.DateTime",
"mongoose": "Date",
"pydantic": "datetime.date",
"sequelize": "DATE",
"sqlalchemy": "DateTime",
},
"decimal": {
"django": "models.DecimalField",
"sequelize": "DECIMAL",
},
"double": {
"django": "DOUBLE",
"sequelize": "DOUBLE",
},
"email": {
"django": "models.EmailField",
},
"enum": {
"django": "ENUM",
"sequelize": "ENUM",
},
"float": {
"django": "FLOAT",
"flask": "sqlalchemy.Float",
"graphene": "graphene.Float",
"nest": "number",
"pydantic": "float",
"sboot": "Float",
"sequelize": "FLOAT",
"sqlalchemy": "Float",
},
"id": {
"graphene": "graphene.ID",
},
"image": {
"django": "models.ImageField",
"sequelize": "STRING",
},
"integer": {
"django": "models.IntegerField",
"flask": "sqlalchemy.Integer",
"graphene": "graphene.Int",
"pydantic": "int",
"sboot": "Int",
"sequelize": "INTEGER",
"sqlalchemy": "Integer",
},
"number": {
"django": "NUMBER",
"mongoose": "Number",
"sequelize": "DECIMAL",
},
"serial": {
"flask": "sqlalchemy.Integer",
},
"slug": {
"django": "models.SlugField",
"sequelize": "STRING",
},
"string": {
"django": "models.CharField",
"djongo": "models.CharField",
"flask": "sqlalchemy.String",
"graphene": "graphene.String",
"mongoose": "String",
"nest": "string",
"pydantic": "str",
"sboot": "String",
"sequelize": "STRING",
"sqlalchemy": "String",
},
"text": {
"django": "models.TextField",
"djongo": "models.CharField",
"flask": "sqlalchemy.Text",
"graphene": "graphene.String",
"pydantic": "str",
"sequelize": "TEXT",
"sqlalchemy": "Text",
},
"timestamp": {
# apa beda auto_now=True dan auto_now_add=True?
"django": "models.DateTimeField",
"djongo": "models.DateTimeField",
"flask": "sqlalchemy.TimeStamp",
"graphene": "graphene.types.datetime.DateTime",
"pydantic": "datetime.datetime",
"sequelize": "TEXT",
"sequelize": "__DQTIMESTAMP__DQ",
"sqlalchemy": "TimeStamp",
},
"url": {
"djongo": "models.URLField",
},
"uuid": {
"sequelize": "UUID",
},
"uuid1": {
"sequelize": "UUIDV1",
},
"uuid4": {
"sequelize": "UUIDV4",
},
"varchar": {
"django": "models.CharField",
"flask": "sqlalchemy.String",
"graphene": "graphene.String",
"pydantic": "str",
"sequelize": "STRING",
"sqlalchemy": "String",
},
# django specific
"django_foreign_key": {
"django": "models.ForeignKey",
"djongo": "models.ForeignKey",
"flask": "sqlalchemy.ForeignKey",
"mongoose": "Schema.ObjectId",
"sequelize": "{ type: STRING, allowNull: false, references: __DQModelRujukan__DQ, }",
},
"django_one_to_one": {
"django": "models.OneToOneField",
"sequelize": "[{ type: String }]",
},
"django_one_to_many": {
"django": "models.OneToManyField",
},
"django_many_to_many": {
"django": "models.ManyToManyField",
"sequelize": "[{ type: String }]",
},
}
def columnify(tables, provider="django"):
tables_with_columns = {}
table_attributes = {}
for tblidx, tbl in enumerate(tables, 1):
tablename_lower = tbl.model.lower()
tablename_case = tbl.model
columns_with_types = []
columns_names = []
for colidx, column in enumerate(tbl.children):
mapper = type_mapper.get(provider, "default")
tipe_kolom = mapper.get(column.type, column.type)
nama_kolom = column.label
coltype = f"{nama_kolom}: {tipe_kolom}"
colname = nama_kolom
if hasattr(column, "allowNull"):
pass
if hasattr(column, "auto_increment"):
pass
if hasattr(column, "auto_now"):
pass
if hasattr(column, "auto_now_add"):
pass
if hasattr(column, "blank"):
pass
if hasattr(column, "db_index"):
pass
if hasattr(column, "decimal_places"):
pass
if hasattr(column, "default"):
pass
if hasattr(column, "defaultValue"):
pass
if hasattr(column, "editable"):
pass
if hasattr(column, "foreignKeyOnDelete"):
pass
if hasattr(column, "max_length"):
pass
if hasattr(column, "max_digits"):
pass
if hasattr(column, "primaryKey"):
pass
if hasattr(column, "references"):
pass
if hasattr(column, "referencesKey"):
pass
if hasattr(column, "related_name"):
pass
if hasattr(column, "relTo"):
pass
if hasattr(column, "unique"):
pass
if hasattr(column, "values"):
pass
if hasattr(column, "verbose_name"):
pass
columns_with_types.append(coltype)
columns_names.append(colname)
tables_with_columns[tablename_case] = {
"columns_with_types": columns_with_types,
"columns_names": columns_names,
"table_attributes": table_attributes,
}
return tables_with_columns
def kolom_as_params(table, exclude=""):
"""
exclude
"""
columns = [f"{col.label}: {col.type}" for col in table.children]
if exclude:
columns = [
f"{col.label}: {col.type}" for col in table.children if col.label != exclude
]
return ", ".join(columns)
def kolom_as_args(table, exclude=None):
columns = [col.label for col in table.children]
if exclude:
columns = [col.label for col in table.children if col.label != exclude]
return ", ".join(columns)
def jenis_kolom(jenis, backend="django"):
if jenis not in type_mapper:
return jenis
pertama = type_mapper.get(jenis)
if backend not in pertama:
return jenis
return pertama.get(backend, jenis)
def tab_real(num=1, tab="\t"):
return num * tab
def tab(num=1, space=" ", tab="\t", use_space=True, space_size=TAB_SPACE_MULT):
"""
utk space:
tab='\n'
atau
use_space=True (default)
"""
if use_space:
return num * space * space_size
else:
return num * tab
def tabber(num_tab=1, use_tab=True, space_size=2):
tabber = (
tab(num=num_tab, use_space=False)
if use_tab
else tab(num=num_tab, space=space_size * " ", use_space=True)
)
return tabber
def tab_tab(num=1):
return tab(num=num, use_space=False)
def tab_space(num=1, space_size=2):
return tab(num=num, use_space=True, space_size=space_size)
def tabify_content(content, self_tab=tab(), num_tab=1):
tabify = [num_tab * self_tab + item for item in content.splitlines()]
return "\n".join(tabify)
def tabify_contentlist(content, self_tab=tab(), num_tab=1):
tabify = [num_tab * self_tab + item for item in content]
return "\n".join(tabify)
def append_entry(filepath_output, header, body):
"""
/apps/{tablename}/models.py
"""
start = "--%"
end = "--#"
# header = f'/apps/{tablename}/models.py'
entry_model = f"\n{start} {header}\n" + body + f"\n{end}\n"
append_file(filepath_output, entry_model)
return entry_model
def gen_template_db_init(
RootNode, return_backend=False, print_info=False, use_real_tab=False
):
dbvalues = {}
dblines = []
dbinfo = RootNode
if hasattr(dbinfo, "username"):
dbvalues["username"] = dbinfo.username
dblines.append(f"%__TEMPLATE_DBUSER={dbinfo.username}")
if hasattr(dbinfo, "password"):
dbvalues["password"] = dbinfo.password
dblines.append(f"%__TEMPLATE_DBPASS={dbinfo.password}")
if hasattr(dbinfo, "host"):
dbvalues["host"] = dbinfo.host
dblines.append(f"%__TEMPLATE_DBHOST={dbinfo.host}")
if hasattr(dbinfo, "port"):
dbvalues["port"] = dbinfo.port
dblines.append(f"%__TEMPLATE_DBPORT={dbinfo.port}")
if hasattr(dbinfo, "dbname"):
dbvalues["dbname"] = dbinfo.dbname
dblines.append(f"%__TEMPLATE_DBNAME={dbinfo.dbname}")
db_backend = "sqlite"
if (
"host" in dbvalues
or "port" in dbvalues
or "username" in dbvalues
or "password" in dbvalues
):
db_backend = "postgres"
# %__TEMPLATE_DBUSER=usef
# %__TEMPLATE_DBPASS=rahasia
# %__TEMPLATE_DBHOST=gisel.ddns.net
# %__TEMPLATE_DBPORT=9022
# %__TEMPLATE_DBNAME=ecomm
if use_real_tab:
template_db_init = "\n".join([tab_real(1) + item for item in dblines])
else:
template_db_init = "\n".join([tab(1) + item for item in dblines])
if print_info:
print("=" * 20, "dblines")
print(template_db_init)
if return_backend:
return template_db_init, db_backend
return template_db_init
def gen_template_app_init(tables, print_info=False, use_real_tab=False):
applines = []
for index, tbl in enumerate(tables, 1):
appidx = str(index).zfill(2)
try:
tablename = tbl.model
except AttributeError as err:
# AttributeError: 'AnyNode' object has no attribute 'model'
print("Ketemu error:", err)
print(
'Cek apakah semua kolom kecuali kolom akhir terpisah dg terminator ";".'
)
print(traceback.format_exc())
# perlu utk lower...ini akan jadi nama direktori utk masing2 app
applines.append(f"%__TEMPLATE_APP{appidx}={tablename.lower()}")
if use_real_tab:
template_app_init = "\n".join([tab_real(1) + item for item in applines])
else:
template_app_init = "\n".join([tab(1) + item for item in applines])
if print_info:
print("=" * 20, "applines")
print(template_app_init)
return template_app_init
def generate_app_content(tables, app_content_template, print_info=False):
contentlines = []
for index, tbl in enumerate(tables, 1):
appidx = str(index).zfill(2)
tablename = tbl.model
content = app_content_template(appidx, tablename)
contentlines.append(content)
# template_app_content = '\n'.join([tab(2)+item for item in contentlines])
template_app_content = "\n".join(contentlines)
if print_info:
print("=" * 20, "contentlines")
print(template_app_content)
return template_app_content
def write_mkfile(mkfile_content, filepath_output):
file_write(filepath_output, mkfile_content) | yulibrary | /yulibrary-0.0.2-py3-none-any.whl/langutils/app/usutils.py | usutils.py |
import msgpack
import base64
import json
from typing import Dict, Union, Sequence, Mapping
from uuid import uuid4
from datetime import datetime
from .errors import YumError
wtype = Union[int, float, bool, None, Sequence, Mapping, datetime]
_DTFORMAT = "%Y%m%d%H%M%S%f"
def decode_datetime(obj):
if "_dt_" in obj:
obj = datetime.strptime(obj["as_str"], _DTFORMAT)
return obj
def encode_datetime(obj):
if isinstance(obj, datetime):
return {"_dt_": True, "as_str": obj.strftime(_DTFORMAT)}
return obj
def _encode(obj) -> str:
kvs = {}
if isinstance(obj, Ticket):
kvs["id"] = obj.id
kvs["code"] = obj.code
kvs["birth_time"] = obj.birth_time
kvs["body"] = (obj.body,)
kvs["status"] = obj.status
if isinstance(obj, Feedback):
kvs["for_id"] = obj.for_id
bs = msgpack.packb(kvs, default=encode_datetime)
if isinstance(bs, bytes):
return base64.urlsafe_b64encode(bs).decode()
raise YumError(reason="encode error")
def _decode(obj, msg: str) -> None:
bs = base64.urlsafe_b64decode(msg)
t = msgpack.unpackb(bs, object_hook=decode_datetime)
if isinstance(obj, Ticket):
obj.id = t.get("id")
obj.code = t.get("code")
obj.birth_time = t.get("birth_time")
b = t.get("body")
if isinstance(b, list):
obj.body = b[0]
elif isinstance(b, dict):
obj.body = b
obj.status = t.get("status")
if isinstance(obj, Feedback):
obj.for_id = t.get("for_id")
class Ticket(object):
def __init__(self, code: str, status: int = 200) -> None:
super(Ticket, self).__init__()
self.id = str(uuid4()).replace("-", "")
self.birth_time = datetime.now()
self.code = code
self.status = status
self.body = {}
def __repr__(self) -> str:
j = self.__dict__.copy()
j["body"] = self.body.copy()
j["birth_time"] = self.birth_time.strftime("%Y-%m-%d %H:%M:%S")
return json.dumps(j, ensure_ascii=False)
def put(self, name: str, value: wtype) -> None:
self.body[name] = value
def put_all(self, values: Dict) -> None:
self.body.update(values)
def take(self, name: str, default_value: wtype = None) -> wtype:
return self.body.get(name, default_value)
def encode(self) -> str:
return _encode(self)
def decode(self, msg: str) -> None:
_decode(self, msg)
class Feedback(Ticket):
def __init__(self, ticket: Ticket, status: int = 200) -> None:
super(Feedback, self).__init__(ticket.code, status)
self.for_id = ticket.id | yum-hal | /yum-hal-0.1.3.tar.gz/yum-hal-0.1.3/yum_hal/ticket.py | ticket.py |
# Yumee
**Embed metadata into your music files, whatever the type**
Yumee stands for *Yet Unother MEtadata Embedder*
## Features
- Automatic type detection based on the file extension
- Currently supported : MP3, M4A, FLAC, OGG (Vorbis), OPUS
- Detection of badly formatted files
- Easy to use, straightforward interface
- Possible to use via DI integration
## Installation
### Pip
```
pip install yumee
```
### Poetry
[Poetry](https://python-poetry.org/) is a Python dependency management and packaging tool. I actually use it for this project.
```
poetry add yumee
```
## Usage
There are 2 ways to use this library : using the SongMetadataEmbedder object or via the DI.
### Using SongMetadataEmbedder
The library exposes the SongMetadataEmbedder class. This class has 1 method : `open_file`.
`open_file` opens an audio file at a provided path and returns a `BaseSongFile` to manipulate its metadata.
Once you have a `BaseSongFile`, you have access to methods like `embed` or `extract`. `embed` modifies the metadata of the SongFile according to the data provided. `extract` returns the metadata that was embedded in the file.
**Example 1 :**
```python
from pathlib import Path
from yumee import SongMetadataEmbedder
embedder = SongMetadataEmbedder()
path = Path("path/to/file.mp3")
with embedder.open_file(path) as song_file:
song_file.title = "New Title"
```
*It is recommended to use 'open_file' with the 'with' statement as it will ensure that the modifications are saved as you exit the block. Otherwise, you have to make sure to call 'save' to save the modifications.*
**Example 2 :**
```python
from pathlib import Path
from yumee import SongMetadataEmbedder, SongMetadata
embedder = SongMetadataEmbedder()
path = Path("path/to/file.mp3")
metadata = SongMetadata(title="New Title")
song_file = embedder.open_file(path)
song_file.embed(metadata)
```
*The 'embed' method automatically saves the modifications done. This is why I don't use 'open_file' with a 'with' statement.*
### Using DI
The library also exposes a `BaseSongFileProvider` interface and a `add_yumee` function for [Taipan-DI](https://github.com/Billuc/Taipan-DI).
In this function, SongFileProviders are registered as a Pipeline. Each SongFileProvider correspond to a specific file type and generates a `BaseSongFile`. Resolve the pipeline and execute it to have a `BaseSongFile` you can then manipulate.
**Example :**
```python
from yumee import BaseSongFileProvider, add_yumee
from taipan_di import DependencyCollection
services = DependencyCollection()
add_yumee(services)
provider = services.build()
song_file_provider = provider.resolve(BaseSongFileProvider)
path = Path("path/to/file.mp3")
with song_file_provider.exec(path) as song_file:
...
```
## Inspirations
This library is partially inspired by spotDL's [spotify-downloader](https://github.com/spotDL/spotify-downloader) and utilises [mutagen](https://mutagen.readthedocs.io/en/latest/).
## TODO
This library isn't stable yet and a lot of things can still be improved.
If there is something you want to see added or if something does not work as you want it to, feel free to open an issue.
Here is a list of features I have in mind and will be working on :
- ~~Generate SongMetadata from a SongFile~~
- Support Wav
- ISRC tag
- MP3 separator support
- Popularity tag (ID3)
| yumee | /yumee-0.0.3.tar.gz/yumee-0.0.3/README.md | README.md |
====
yuml
====
.. image:: https://travis-ci.org/wandernauta/yuml.png?branch=master
This is an unofficial command line client for the `yUML <http://yuml.me>`_ web
app. Use it to draw class diagrams, activity diagrams and usecase diagrams
from the command line, integrate yUML into your documentation workflow or what
have you.
``yuml`` (the tool) supports all the styles and formats that yUML (the service)
does, so you can take your pick between *scruffy* (the default), *nofunky* and
*plain* for the styles and *png*, *pdf*, *svg* or *jpg* for the format.
Have fun.
Options
-------
Only the -o option is required.
-i Read yuml from ``FILE`` instead of stdin
-o Store output in ``FILE``
-f Use specified format (one of ``png``, ``pdf``, ``svg``, or ``jpg``)
-t Use this diagram type (one of ``class``, ``activity``, or ``usecase``)
-s Use this diagram style (one of ``scruffy``, ``nofunky``, ``plain``)
--dir Lay out elements in this direction (one of ``LR``, ``RL``, ``TD``)
--scale Scale output to percentage
-v Print some debug info
Example
-------
echo "[This]-[That]" | ./yuml -s nofunky -o diagram.png
Installation
------------
To install ``yuml``, try:
sudo pip install https://github.com/wandernauta/yuml/zipball/master
...or the equivalent for your system.
License
-------
``yuml`` is distributed under the terms of the ISC license. See the COPYING.rst
file for more details.
| yuml | /yuml-0.2.tar.gz/yuml-0.2/README.rst | README.rst |
Copyright (c) 2012-2016 Wander Nauta
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
The software is provided "as is" and the author disclaims all warranties with
regard to this software including all implied warranties of merchantability and
fitness. In no event shall the author be liable for any special, direct,
indirect, or consequential damages or any damages whatsoever resulting from
loss of use, data or profits, whether in an action of contract, negligence or
other tortious action, arising out of or in connection with the use or
performance of this software.
| yuml | /yuml-0.2.tar.gz/yuml-0.2/COPYING.rst | COPYING.rst |
================================================
yumm - your universal mongo model
================================================
Python library with several tools to work with geometry
Usage
-----
Imports:
^^^^^^^^
.. code-block:: python
import uuid
import numpy as np
from itertools import count
from udbs import db_handler, BaseClass, ObserverDBMeta
from sqlalchemy import Integer, String
from udbs.db_types import (Pandas_DBType,
Universal_DBType,
List_DBType,
DB_List,
TrackedList,
TrackedDataFrame,
Integer_DBType,
TrackedInteger,
TrackedFloat,
DB_Tuple,
Tuple_DBType,
TrackedTuple,
DB_NPNDARRAY,
Numpy_DBType,
Tracked_NDARRAY
)
# do not echo sqlalchemy output
db_handler.echo = False
# do not profile execution
db_handler.profile = False
Create Databases:
^^^^^^^^^^^^^^^^^
- create or connect to a database:
.. code-block:: python
db = db_handler.create_db(name='database_1',
dialect='sqlite',
database=r'C:\udbs\db_test.db'
)
- create or connect to a second database:
.. code-block:: python
db2 = db_handler.create_db(name='database_2',
dialect='sqlite',
database=r'C:\udbs\db_test_2.db'
)
Create a class:
^^^^^^^^^^^^^^^
The tracked class must have the metaclass=ObserverDBMeta. The tracked attributes are defined in the *_mapped_attributes*
dictionary. The keys of the dictionary are the names of the attributes to be tracked. The values of the dictionary are
the types of the attributes.
Valid types are all sqlalchemy types and the types in sqlalchemy.sql.sqltypes and all types in udbs.db_types.
The type Universal_DBType can handle all common data types. Unknown types are pickled.
Additional the types in udbs.db_types return a tracked mutable object when queried which can also be referenced.
Valid udbs.db_types:
* Bool_DBType
* Integer_DBType
* Float_DBType
* String_DBType
* Tuple_DBType
* List_DBType
* Pandas_DBType
* Numpy_DBType
* Universal_DBType
The attribute *id* is automatically added to the class as the unique primary key in the database:
(Column(id_type, primary_key=True)).The primary key has to be a UUID.
.. code-block:: python
class MyTest(metaclass=ObserverDBMeta):
"""
Define which attributes should be saved in the database as a dictionary.
The keys are the name of the attribute, the value the type of the value.
In this Example the attribute 'pid', 'name', 'df' and 'unknown' are saved in the database.
"""
_mapped_attributes = {'pid': Integer,
'name': String,
'df': Pandas_DBType,
'unknown': Universal_DBType}
new_id = count(0)
def __init__(self, *args, **kwargs):
# the attribute 'id' is automatically added to the class as the unique primary key in the database:
# (Column(id_type, primary_key=True)).The primary key has to be a UUID.
self.id = kwargs.get('id', uuid.uuid4())
self.pid = kwargs.get('pid', next(self.new_id))
self.name = kwargs.get('name', f'MyTest {self.pid}')
self.df = kwargs.get('df', None)
self.unknown = kwargs.get('unknown', [1, 2, 3])
Inheritance:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Inheritance is supported.
.. code-block:: python
class MyTest2(MyTest):
_mapped_attributes = {'test_attr': Integer}
def __init__(self, *args, **kwargs):
MyTest.__init__(self, *args, **kwargs)
self.test_attr = kwargs.get('test_attr', None)
my_test2 = MyTest2(test_attr=7, df=df)
print(MyTest.unknown)
Create class instances:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
create instances of *MyTest*:
.. code-block:: python
fake = Faker()
from faker import Faker
def create_rows(num=100):
output = [{"name": fake.name(),
"address": fake.address(),
"name": fake.name(),
"email": fake.email(),
"bs": fake.bs(),
"address": fake.address(),
"city": fake.city(),
"state": fake.state(),
"date_time": fake.date_time(),
"paragraph": fake.paragraph(),
"Conrad": fake.catch_phrase(),
"randomdata": random.randint(1000, 2000)} for x in range(num)]
return output
df = create_rows(20)
my_new_test = MyTest(df=df, name='random_name', unknown=2.17)
my_new_test2 = MyTest(df=df, name='some_test_name', unknown=[18, 28.15, {'some_key': np.random.rand(5, 5)}])
create a another instance of *MyTest*: notice: the database where the object should be stored is given. If None is given
the default database is taken.
.. code-block:: python
my_new_test_3 = MyTest(df=df, db=db2)
Reference instances:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Once a object is created, the assigned values can be tracked.
.. code-block:: python
my_new_test_4 = MyTest(df=my_new_test.df, name=my_new_test_2.name, db=db2)
In this case there is no new dataframe saved in the database as it already exists. Instead the reference to the existing
dataframe is stored in the database. So if Changes to my_new_test_4.df are made, these also affect my_new_test.df.
Load class instances:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The class instances can be loaded by:
.. code-block:: python
my_new_tests = db.query_all(MyTest)
my_new_tests_2 = db2.query_all(MyTest, return_type=list)
The default return type is a set with all instances. With the key return_type=list this can be changed to a list.
If only one instance with given id should be loaded use:
.. code-block:: python
my_new_test = db.query_single_id(MyTest, id)
You can also do filtered queries:
.. code-block:: python
my_new_test = db.query_filtered(MyTest, 'name', ['some_test_name', 5])
Requirements
------------
Python 3.7+.
Windows Support
---------------
Summary: On Windows, use ``py`` instead of ``python3`` for many of the examples in this
documentation.
This package fully supports Windows, along with Linux and macOS, but Python is typically
`installed differently on Windows <https://docs.python.org/3/using/windows.html>`_.
Windows users typically access Python through the
`py <https://www.python.org/dev/peps/pep-0397/>`_ launcher rather than a ``python3``
link in their ``PATH``. Within a virtual environment, all platforms operate the same and use a
``python`` link to access the Python version used in that virtual environment.
Dependencies
------------
Dependencies are defined in:
- ``requirements.in``
- ``requirements.txt``
- ``dev-requirements.in``
- ``dev-requirements.txt``
Virtual Environments
^^^^^^^^^^^^^^^^^^^^
It is best practice during development to create an isolated
`Python virtual environment <https://docs.python.org/3/library/venv.html>`_ using the
``venv`` standard library module. This will keep dependant Python packages from interfering
with other Python projects on your system.
On \*Nix:
.. code-block:: bash
$ python3 -m venv venv
$ source venv/bin/activate
On Windows ``cmd``:
.. code-block:: bash
> py -m venv venv
> venv\Scripts\activate.bat
Once activated, it is good practice to update core packaging tools (``pip``, ``setuptools``, and
``wheel``) to the latest versions.
.. code-block:: bash
(venv) $ python -m pip install --upgrade pip setuptools wheel
Packaging
---------
This project is designed as a Python package, meaning that it can be bundled up and redistributed
as a single compressed file.
Packaging is configured by:
- ``pyproject.toml``
- ``setup.py``
- ``MANIFEST.in``
To package the project as both a
`source distribution <https://docs.python.org/3/distutils/sourcedist.html>`_ and a
`wheel <https://wheel.readthedocs.io/en/stable/>`_:
.. code-block:: bash
(venv) $ python setup.py sdist bdist_wheel
This will generate ``dist/fact-1.0.0.tar.gz`` and ``dist/fact-1.0.0-py3-none-any.whl``.
Read more about the `advantages of wheels <https://pythonwheels.com/>`_ to understand why
generating wheel distributions are important.
Upload Distributions to PyPI
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Source and wheel redistributable packages can be
`uploaded to PyPI <https://packaging.python.org/tutorials/packaging-projects/>`_ or installed
directly from the filesystem using ``pip``.
To upload to PyPI:
.. code-block:: bash
(venv) $ python -m pip install twine
(venv) $ twine upload dist/*
Testing
-------
Automated testing is performed using `tox <https://tox.readthedocs.io/en/latest/index.html>`_.
tox will automatically create virtual environments based on ``tox.ini`` for unit testing,
PEP8 style guide checking, and documentation generation.
.. code-block:: bash
# Run all environments.
# To only run a single environment, specify it like: -e pep8
# Note: tox is installed into the virtual environment automatically by pip-sync command above.
(venv) $ tox
Unit Testing
^^^^^^^^^^^^
Unit testing is performed with `pytest <https://pytest.org/>`_. pytest has become the defacto
Python unit testing framework. Some key advantages over the built in
`unittest <https://docs.python.org/3/library/unittest.html>`_ module are:
#. Significantly less boilerplate needed for tests.
#. PEP8 compliant names (e.g. ``pytest.raises()`` instead of ``self.assertRaises()``).
#. Vibrant ecosystem of plugins.
pytest will automatically discover and run tests by recursively searching for folders and ``.py``
files prefixed with ``test`` for any functions prefixed by ``test``.
The ``tests`` folder is created as a Python package (i.e. there is an ``__init__.py`` file
within it) because this helps ``pytest`` uniquely namespace the test files. Without this,
two test files cannot be named the same, even if they are in different sub-directories.
Code coverage is provided by the `pytest-cov <https://pytest-cov.readthedocs.io/en/latest/>`_
plugin.
When running a unit test tox environment (e.g. ``tox``, ``tox -e py37``, etc.), a data file
(e.g. ``.coverage.py37``) containing the coverage data is generated. This file is not readable on
its own, but when the ``coverage`` tox environment is run (e.g. ``tox`` or ``tox -e -coverage``),
coverage from all unit test environments is combined into a single data file and an HTML report is
generated in the ``htmlcov`` folder showing each source file and which lines were executed during
unit testing. Open ``htmlcov/index.html`` in a web browser to view the report. Code coverage
reports help identify areas of the project that are currently not tested.
Code coverage is configured in ``pyproject.toml``.
To pass arguments to ``pytest`` through ``tox``:
.. code-block:: bash
(venv) $ tox -e py37 -- -k invalid_factorial
Code Style Checking
^^^^^^^^^^^^^^^^^^^
`PEP8 <https://www.python.org/dev/peps/pep-0008/>`_ is the universally accepted style
guide for Python code. PEP8 code compliance is verified using `flake8 <http://flake8.pycqa.org/>`_.
flake8 is configured in the ``[flake8]`` section of ``tox.ini``. Extra flake8 plugins
are also included:
- ``pep8-naming``: Ensure functions, classes, and variables are named with correct casing.
Automated Code Formatting
^^^^^^^^^^^^^^^^^^^^^^^^^
Code is automatically formatted using `black <https://github.com/psf/black>`_. Imports are
automatically sorted and grouped using `isort <https://github.com/timothycrosley/isort/>`_.
These tools are configured by:
- ``pyproject.toml``
To automatically format code, run:
.. code-block:: bash
(venv) $ tox -e fmt
To verify code has been formatted, such as in a CI job:
.. code-block:: bash
(venv) $ tox -e fmt-check
Generated Documentation
^^^^^^^^^^^^^^^^^^^^^^^
Documentation that includes the ``README.rst`` and the Python project modules is automatically
generated using a `Sphinx <http://sphinx-doc.org/>`_ tox environment. Sphinx is a documentation
generation tool that is the defacto tool for Python documentation. Sphinx uses the
`RST <https://www.sphinx-doc.org/en/latest/usage/restructuredtext/basics.html>`_ markup language.
This project uses the
`napoleon <http://www.sphinx-doc.org/en/master/usage/extensions/napoleon.html>`_ plugin for
Sphinx, which renders Google-style docstrings. Google-style docstrings provide a good mix
of easy-to-read docstrings in code as well as nicely-rendered output.
.. code-block:: python
"""Computes the factorial through a recursive algorithm.
Args:
n: A positive input value.
Raises:
InvalidFactorialError: If n is less than 0.
Returns:
Computed factorial.
"""
The Sphinx project is configured in ``docs/conf.py``.
Build the docs using the ``docs`` tox environment (e.g. ``tox`` or ``tox -e docs``). Once built,
open ``docs/_build/index.html`` in a web browser.
Generate a New Sphinx Project
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
To generate the Sphinx project shown in this project:
.. code-block:: bash
# Note: Sphinx is installed into the virtual environment automatically by pip-sync command
# above.
(venv) $ mkdir docs
(venv) $ cd docs
(venv) $ sphinx-quickstart --no-makefile --no-batchfile --extensions sphinx.ext.napoleon
# When prompted, select all defaults.
Modify ``conf.py`` appropriately:
.. code-block:: python
# Add the project's Python package to the path so that autodoc can find it.
import os
import sys
sys.path.insert(0, os.path.abspath('../src'))
...
html_theme_options = {
# Override the default alabaster line wrap, which wraps tightly at 940px.
'page_width': 'auto',
}
Modify ``index.rst`` appropriately:
::
.. include:: ../README.rst
apidoc/modules.rst
Project Structure
-----------------
Traditionally, Python projects place the source for their packages in the root of the project
structure, like:
.. code-block::
fact
├── fact
│ ├── __init__.py
│ ├── cli.py
│ └── lib.py
├── tests
│ ├── __init__.py
│ └── test_fact.py
├── tox.ini
└── setup.py
However, this structure is `known
<https://docs.pytest.org/en/latest/goodpractices.html#tests-outside-application-code>`_ to have bad
interactions with ``pytest`` and ``tox``, two standard tools maintaining Python projects. The
fundamental issue is that tox creates an isolated virtual environment for testing. By installing
the distribution into the virtual environment, ``tox`` ensures that the tests pass even after the
distribution has been packaged and installed, thereby catching any errors in packaging and
installation scripts, which are common. Having the Python packages in the project root subverts
this isolation for two reasons:
#. Calling ``python`` in the project root (for example, ``python -m pytest tests/``) `causes Python
to add the current working directory
<https://docs.pytest.org/en/latest/pythonpath.html#invoking-pytest-versus-python-m-pytest>`_
(the project root) to ``sys.path``, which Python uses to find modules. Because the source
package ``fact`` is in the project root, it shadows the ``fact`` package installed in the tox
environment.
#. Calling ``pytest`` directly anywhere that it can find the tests will also add the project root
to ``sys.path`` if the ``tests`` folder is a a Python package (that is, it contains a
``__init__.py`` file). `pytest adds all folders containing packages
<https://docs.pytest.org/en/latest/goodpractices.html#conventions-for-python-test-discovery>`_
to ``sys.path`` because it imports the tests like regular Python modules.
In order to properly test the project, the source packages must not be on the Python path. To
prevent this, there are three possible solutions:
#. Remove the ``__init__.py`` file from ``tests`` and run ``pytest`` directly as a tox command.
#. Remove the ``__init__.py`` file from tests and change the working directory of
``python -m pytest`` to ``tests``.
#. Move the source packages to a dedicated ``src`` folder.
The dedicated ``src`` directory is the `recommended solution
<https://docs.pytest.org/en/latest/pythonpath.html#test-modules-conftest-py-files-inside-packages>`_
by ``pytest`` when using tox and the solution this blueprint promotes because it is the least
brittle even though it deviates from the traditional Python project structure. It results is a
directory structure like:
.. code-block::
fact
├── src
│ └── fact
│ ├── __init__.py
│ ├── cli.py
│ └── lib.py
├── tests
│ ├── __init__.py
│ └── test_fact.py
├── tox.ini
└── setup.py
Type Hinting
------------
`Type hinting <https://docs.python.org/3/library/typing.html>`_ allows developers to include
optional static typing information to Python source code. This allows static analyzers such
as `PyCharm <https://www.jetbrains.com/pycharm/>`_, `mypy <http://mypy-lang.org/>`_, or
`pytype <https://github.com/google/pytype>`_ to check that functions are used with the correct
types before runtime.
For
`PyCharm in particular <https://www.jetbrains.com/help/pycharm/type-hinting-in-product.html>`_,
the IDE is able to provide much richer auto-completion, refactoring, and type checking while
the user types, resulting in increased productivity and correctness.
This project uses the type hinting syntax introduced in Python 3:
.. code-block:: python
def factorial(n: int) -> int:
Type checking is performed by mypy via ``tox -e mypy``. mypy is configured in ``setup.cfg``.
Licensing
---------
Licensing for the project is defined in:
- ``LICENSE.txt``
- ``setup.py``
This project uses a common permissive license, the MIT license.
You may also want to list the licenses of all of the packages that your Python project depends on.
To automatically list the licenses for all dependencies in ``requirements.txt`` (and their
transitive dependencies) using
`pip-licenses <https://github.com/raimon49/pip-licenses>`_:
.. code-block:: bash
(venv) $ tox -e licenses
...
Name Version License
colorama 0.4.3 BSD License
exitstatus 1.3.0 MIT License
PyCharm Configuration
---------------------
To configure PyCharm 2018.3 and newer to align to the code style used in this project:
- Settings | Search "Hard wrap at"
- Editor | Code Style | General | Hard wrap at: 99
- Settings | Search "Optimize Imports"
- Editor | Code Style | Python | Imports
- ☑ Sort import statements
- ☑ Sort imported names in "from" imports
- ☐ Sort plain and "from" imports separately within a group
- ☐ Sort case-insensitively
- Structure of "from" imports
- ◎ Leave as is
- ◉ Join imports with the same source
- ◎ Always split imports
- Settings | Search "Docstrings"
- Tools | Python Integrated Tools | Docstrings | Docstring Format: Google
- Settings | Search "Force parentheses"
- Editor | Code Style | Python | Wrapping and Braces | "From" Import Statements
- ☑ Force parentheses if multiline
Integrate Code Formatters
^^^^^^^^^^^^^^^^^^^^^^^^^
To integrate automatic code formatters into PyCharm, reference the following instructions:
- `black integration <https://black.readthedocs.io/en/stable/editor_integration.html#pycharm-intellij-idea>`_
- The File Watchers method (step 3) is recommended. This will run ``black`` on every save.
- `isort integration <https://github.com/timothycrosley/isort/wiki/isort-Plugins>`_
- The File Watchers method (option 1) is recommended. This will run ``isort`` on every save.
| yumm | /yumm-0.0.1.tar.gz/yumm-0.0.1/README.rst | README.rst |
from mongoengine import Document, StringField, DateTimeField, FileField, ListField, GenericLazyReferenceField
from datetime import datetime
from .fields import DataFrameField, NPArrayField
class Dataframe(Document):
name = StringField()
type = StringField()
data = DataFrameField()
date_created = DateTimeField(default=datetime.utcnow)
date_modified = DateTimeField(default=datetime.utcnow)
referenced_by = ListField(GenericLazyReferenceField(null=True))
meta = {'allow_inheritance': True}
def __init__(self, *args, **kwargs):
super(Dataframe, self).__init__(*args, **kwargs)
self.name = kwargs.get('name', None)
self.type = kwargs.get('name', 'Dataframe')
self.data = kwargs.get('data', None)
self.referenced_by = kwargs.get('comp_refs', [])
self._c_refs = set()
def save(
self,
force_insert=False,
validate=True,
clean=True,
write_concern=None,
cascade=None,
cascade_kwargs=None,
_refs=None,
save_condition=None,
signal_kwargs=None,
**kwargs,
):
self.date_modified = datetime.utcnow
return Document.save(self,
force_insert=force_insert,
validate=validate,
clean=clean,
write_concern=write_concern,
cascade=cascade,
cascade_kwargs=cascade_kwargs,
_refs=_refs,
save_condition=save_condition,
signal_kwargs=signal_kwargs,
**kwargs)
class NumpyArray(Document):
name = StringField()
type = StringField()
data = NPArrayField()
date_created = DateTimeField(default=datetime.utcnow)
date_modified = DateTimeField(default=datetime.utcnow)
referenced_by = ListField(GenericLazyReferenceField(null=True))
meta = {'allow_inheritance': True}
def __init__(self, *args, **kwargs):
super(NumpyArray, self).__init__(*args, **kwargs)
self.name = kwargs.get('name', None)
self.type = kwargs.get('name', 'NumpyArray')
self.data = kwargs.get('data', None)
self.referenced_by = kwargs.get('comp_refs', [])
self._c_refs = set()
def save(
self,
force_insert=False,
validate=True,
clean=True,
write_concern=None,
cascade=None,
cascade_kwargs=None,
_refs=None,
save_condition=None,
signal_kwargs=None,
**kwargs,
):
self.date_modified = datetime.utcnow
return Document.save(self,
force_insert=force_insert,
validate=validate,
clean=clean,
write_concern=write_concern,
cascade=cascade,
cascade_kwargs=cascade_kwargs,
_refs=_refs,
save_condition=save_condition,
signal_kwargs=signal_kwargs,
**kwargs)
class File(Document):
name = StringField()
type = StringField()
data = FileField()
file_format = StringField()
date_created = DateTimeField(default=datetime.utcnow)
date_modified = DateTimeField(default=datetime.utcnow)
referenced_by = ListField(GenericLazyReferenceField(null=True))
meta = {'allow_inheritance': True}
def __init__(self, *args, **kwargs):
super(File, self).__init__(*args, **kwargs)
self.name = kwargs.get('name', None)
self.type = kwargs.get('name', 'File')
self.data = kwargs.get('data', None)
self.file_format = kwargs.get('file_format')
self._c_refs = set()
self.referenced_by = kwargs.get('comp_refs', [])
def save(
self,
force_insert=False,
validate=True,
clean=True,
write_concern=None,
cascade=None,
cascade_kwargs=None,
_refs=None,
save_condition=None,
signal_kwargs=None,
**kwargs,
):
self.date_modified = datetime.utcnow
return Document.save(self,
force_insert=force_insert,
validate=validate,
clean=clean,
write_concern=write_concern,
cascade=cascade,
cascade_kwargs=cascade_kwargs,
_refs=_refs,
save_condition=save_condition,
signal_kwargs=signal_kwargs,
**kwargs) | yumm | /yumm-0.0.1.tar.gz/yumm-0.0.1/src/mongoSimultan/base_types.py | base_types.py |
from mongoengine import StringField, DictField, DateTimeField, DynamicField, DynamicDocument, Document, ListField
from datetime import datetime
class Component(Document):
name = StringField()
type = StringField()
content = DictField(DynamicField())
referenced_by = ListField(DynamicField())
date_created = DateTimeField(default=datetime.utcnow)
date_modified = DateTimeField(default=datetime.utcnow)
meta = {'allow_inheritance': True}
def __init__(self, *args, **kwargs):
super(Component, self).__init__(*args, **kwargs)
self._update_refs = False
self.name = kwargs.get('name', None)
self.type = kwargs.get('name', None)
self.content = kwargs.get('content', None)
self.comp_refs = kwargs.get('referenced_by', [])
self._c_refs = set()
def save(
self,
force_insert=False,
validate=True,
clean=True,
write_concern=None,
cascade=None,
cascade_kwargs=None,
_refs=None,
save_condition=None,
signal_kwargs=None,
**kwargs,
):
self.date_modified = datetime.utcnow
ret = DynamicDocument.save(self,
force_insert=force_insert,
validate=validate,
clean=clean,
write_concern=write_concern,
cascade=cascade,
cascade_kwargs=cascade_kwargs,
_refs=_refs,
save_condition=save_condition,
signal_kwargs=signal_kwargs,
**kwargs)
if self._update_refs:
for value in self.content.values():
if isinstance(value, DynamicDocument) or isinstance(value, Document):
value.referenced_by = list(value._c_refs)
value.save()
value._changed_fields = []
setattr(self, '_update_refs', False)
ret._update_refs = self._update_refs
return ret
def __setattr__(self, key, value):
DynamicDocument.__setattr__(self, key, value)
if key == 'content':
print('content')
if value is not None:
for c_value in value.values():
if isinstance(c_value, DynamicDocument) or isinstance(c_value, Document):
c_value._c_refs.add(self)
object.__setattr__(self, '_update_refs', True)
return self
# if isinstance(self.__getattribute__(key), )
def delete(self, signal_kwargs=None, **write_concern):
if self.r | yumm | /yumm-0.0.1.tar.gz/yumm-0.0.1/src/mongoSimultan/component.py | component.py |
from mongoengine import fields
from numpy import generic
from numpy import ndarray
from numpy import array as np_array
from pandas import DataFrame, MultiIndex
def _as_native(x):
if isinstance(x, dict):
return dict([(k, _as_native(v)) for k, v in x.items()])
elif isinstance(x, list):
return [_as_native(v) for v in x]
else:
return x
class NPArrayField(fields.ListField):
"""A pandas DataFrame field.
Looks to the outside world like a Pandas.DataFrame, but stores
in the database as an using Pandas.DataFrame.to_dict("list").
"""
def __init__(self, orient="list", *args, **kwargs):
if orient not in ('dict', 'list', 'series', 'split', 'records', 'index'):
raise ValueError(u"orient must be one of ('dict', 'list', 'series', 'split', 'records', 'index') but got: %s")
self.orient = orient
super(NPArrayField, self).__init__(*args, **kwargs)
def __get__(self, instance, owner):
print("__get__:", instance, owner)
# df = pd.DataFrame.from_dict(_as_native(super(DataFrameField, self).__get__(instance, owner)))
array = np_array(_as_native(super(NPArrayField, self).__get__(instance, owner)))
return array
def __set__(self, instance, value):
if value is None or isinstance(value, list):
return super(NPArrayField, self).__set__(instance, value)
if not isinstance(value, ndarray):
raise ValueError("value is not a np.ndarray instance")
obj = value.tolist()
# coerce numpy objects into python objects for lack of the BSON-numpy package on windows
return super(NPArrayField, self).__set__(instance, obj)
def to_python(self, value):
return value
class MongoDF(DataFrame):
_attributes_ = "instance, owner"
def __init__(self, *args, **kw):
super(MongoDF, self).__init__(*args, **kw)
if len(args) == 1 and isinstance(args[0], MongoDF):
args[0]._copy_attrs(self)
def _copy_attrs(self, df):
for attr in self._attributes_.split(","):
df.__dict__[attr] = getattr(self, attr, None)
@property
def _constructor(self):
def f(*args, **kw):
df = MongoDF(*args, **kw)
self._copy_attrs(df)
return df
return f
class DataFrameField(fields.DictField):
"""A pandas DataFrame field.
Looks to the outside world like a Pandas.DataFrame, but stores
in the database as an using Pandas.DataFrame.to_dict("list").
"""
def __init__(self, orient="list", *args, **kwargs):
if orient not in ('dict', 'list', 'series', 'split', 'records', 'index'):
raise ValueError(u"orient must be one of ('dict', 'list', 'series', 'split', 'records', 'index') but got: %s")
self.orient = orient
super(DataFrameField, self).__init__(*args, **kwargs)
def __get__(self, instance, owner):
print("__get__:", instance, owner)
# df = pd.DataFrame.from_dict(_as_native(super(DataFrameField, self).__get__(instance, owner)))
df = MongoDF.from_dict(_as_native(super(DataFrameField, self).__get__(instance, owner)))
df.instance = instance
df.owner = owner
return df
def __set__(self, instance, value):
if value is None or isinstance(value, dict):
return super(DataFrameField, self).__set__(instance, value)
if not isinstance(value, DataFrame):
raise ValueError("value is not a pandas.DataFrame instance")
if isinstance(value.index, MultiIndex):
self.error(u'value.index is a MultiIndex; MultiIndex objects may not be stored in MongoDB. Consider using `value.reset_index()` to flatten')
if isinstance(value.keys(), MultiIndex):
self.error(u'value.keys() is a MultiIndex; MultiIndex objects may not be stored in MongoDB. Consider using `value.unstack().reset_index()` to flatten')
obj = value.to_dict(self.orient)
# coerce numpy objects into python objects for lack of the BSON-numpy package on windows
for col in obj.values():
if len(col) and isinstance(col[0], generic):
for i in range(len(col)):
col[i] = col[i].item()
return super(DataFrameField, self).__set__(instance, obj)
def to_python(self, value):
return value | yumm | /yumm-0.0.1.tar.gz/yumm-0.0.1/src/mongoSimultan/fields.py | fields.py |
yummly.py
*********
|version| |license|
Python library for `Yummly API <https://developer.yummly.com](https://developer.yummly.com>`_
**NOTE:** This library and its author are not affliated with Yummly.
Installation
============
::
pip install yummly
Dependencies
============
- requests >= 1.1.0
Usage
=====
Use ``yummly.Client`` to create a client object to interact with the Yummly API.
The client accepts ``api_id``, ``api_key``, and ``timeout`` as init parameters:
.. code-block:: python
from yummly import Client
# default option values
TIMEOUT = 5.0
RETRIES = 0
client = Client(api_id=YOUR_API_ID, api_key=YOUR_API_KEY, timeout=TIMEOUT, retries=RETRIES)
search = client.search('green eggs and ham')
match = search.matches[0]
recipe = client.recipe(match.id)
Search Recipes
--------------
API endpoint: ``api.yummly.com/v1/api/recipes?<params>``
Search for recipes meeting certain criteria:
.. code-block:: python
results = yummly.search('bacon')
print('Total Matches:', results.totalMatchCount)
for match in results.matches:
print('Recipe ID:', match.id)
print('Recipe:', match.recipeName)
print('Rating:', match.rating)
print('Total Time (mins):', match.totalTimeInSeconds / 60.0)
print('----------------------------------------------------')
Limit your results to a maximum:
.. code-block:: python
# return the first 10 results
results = yummly.search('chicken marsala', maxResults=10)
Offset the results for pagination:
.. code-block:: python
# return 2nd page of results
results = yummly.search('pulled pork', maxResults=10, start=10)
Provide search parameters:
.. code-block:: python
params = {
'q': 'pork chops',
'start': 0,
'maxResult': 40,
'requirePicutres': True,
'allowedIngredient[]': ['salt', 'pepper'],
'excludedIngredient[]': ['cumin', 'paprika'],
'maxTotalTimeInSeconds': 3600,
'facetField[]': ['ingredient', 'diet'],
'flavor.meaty.min': 0.5,
'flavor.meaty.max': 1,
'flavor.sweet.min': 0,
'flavor.sweet.max': 0.5,
'nutrition.FAT.min': 0,
'nutrition.FAT.max': 15
}
results = yummly.search(**params)
For a full list of supported search parameters, see section *The Search Recipes Call* located at: https://developer.yummly.com/intro
Example search response: https://developer.yummly.com/wiki/search-recipes-response-sample
Get Recipe
----------
API endpoint: ``api.yummly.com/v1/api/recipe/<recipe_id>``
Fetch a recipe by its recipe ID:
.. code-block:: python
recipe = yummly.recipe(recipe_id)
print('Recipe ID:', recipe.id)
print('Recipe:', recipe.name)
print('Rating:', recipe.rating)
print('Total Time:', recipe.totalTime)
print('Yields:', recipe.yields)
print('Ingredients:')
for ingred in recipe.ingredientLines:
print(ingred)
Example recipe response: https://developer.yummly.com/wiki/get-recipe-response-sample
**NOTE:** Yummly's Get-Recipe response includes ``yield`` as a field name. However, ``yield`` is a keyword in Python so this has been renamed to ``yields``.
Search metadata
---------------
API endpoint: ``api.yummly.com/v1/api/metadata/<metadata_key>``
Yummly provides a metadata endpoint that returns the possible values for allowed/excluded ingredient, diet, allergy, and other search parameters:
.. code-block:: python
METADATA_KEYS = [
'ingredient',
'holiday',
'diet',
'allergy',
'technique',
'cuisine',
'course',
'source',
'brand',
'restriction'
]
ingredients = client.metadata('ingredient')
diets = client.metadata('diet')
sources = client.metadata('source')
**NOTE:** Yummly's raw API returns this data as a JSONP response which ``yummly.py`` parses off and then converts to a ``list`` containing instances of the corresponding metadata class.
API Model Classes
=================
All underlying API model classes are in ``yummly/models.py``. The base class used for all models is a modified ``dict`` class with attribute-style access (i.e. both ``obj.foo`` and ``obj['foo']`` are valid accessor methods).
A derived ``dict`` class was chosen to accommodate painless conversion to JSON which is a fairly common requirement when using ``yummly.py`` as an API proxy to feed your applications (e.g. a web app with ``yummly.py`` running on your server instead of directly using the Yummly API on the frontend).
Testing
=======
Tests are located in ``tests/``. They can be executed using ``pytest`` from the root directory using ``makefile`` or ``pytest``.
::
# using makefile
make test
# using pytest directly
py.test yummly
**NOTE:** Running the test suite will use real API calls which will count against your call limit. Currently, 22 API calls are made when running the tests.
Test Config File
----------------
A test config file is required to run the tests. Create ``tests/config.json`` with the following properties:
.. code-block:: javascript
{
"api_id": "YOUR_API_ID",
"api_key": "YOUR_API_KEY"
}
This file will be loaded automatically when the tests are run.
License
=======
This software is licensed under the MIT License.
TODO
====
- Provide helpers for complex search parameters like nutrition, flavors, and metadata
.. |version| image:: http://img.shields.io/pypi/v/yummly.svg?style=flat
:target: https://pypi.python.org/pypi/yummly/
.. |license| image:: http://img.shields.io/pypi/l/yummly.svg?style=flat
:target: https://pypi.python.org/pypi/yummly/
| yummly | /yummly-0.5.0.tar.gz/yummly-0.5.0/README.rst | README.rst |
License
=======
Copyright (c) 2014 Derrick Gilland
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
| yummly | /yummly-0.5.0.tar.gz/yummly-0.5.0/LICENSE.rst | LICENSE.rst |
MIT License
Copyright (c) 2020 Joel Lefkowitz
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
| yummy-cereal | /yummy_cereal-2.0.3-py3-none-any.whl/yummy_cereal-2.0.3.dist-info/LICENSE.md | LICENSE.md |
from dataclasses import dataclass, field
from typing import Any, Dict, Generic, List, TypeVar
from typing_inspect import get_args
from ..exceptions import (
DictFieldSerializingError,
ListFieldSerializingError,
MissingFieldError,
)
from ..protocols import Factory, SerializerMap
from ..utils.annotations import (
field_is_generic_dict,
field_is_generic_list,
get_cls_annotations,
)
T = TypeVar("T")
@dataclass
class AnnotationsSerializer(Generic[T]):
cls: Factory[T]
field_defaults: Dict = field(default_factory=dict)
specified_serializers: SerializerMap = field(default_factory=dict)
def __call__(self, obj: T) -> Dict:
"""
Serializes an object based on its class annotations
Args:
obj (T): Object to serialize
Raises:
MissingFieldError: An annotated was not provided a value
Returns:
Dict: Serialized object
"""
annotations = get_cls_annotations(self.cls)
serialized_fields = self.field_defaults.copy()
for field_name, field_type in annotations.items():
if field_is_generic_list(self.cls, field_name) and hasattr(obj, field_name):
serialized_fields[field_name] = self.serialize_list_field(
obj, field_name, field_type
)
elif field_is_generic_dict(self.cls, field_name) and hasattr(
obj, field_name
):
serialized_fields[field_name] = self.serialize_dict_field(
obj, field_name, field_type
)
elif hasattr(obj, field_name):
field_data = getattr(obj, field_name)
field_serializer = self.select_field_serializer(field_type)
serialized_fields[field_name] = (
field_data
if field_serializer is Any
else field_serializer(field_data)
)
elif field_name in self.field_defaults:
serialized_fields[field_name] = self.field_defaults[field_name]
else:
raise MissingFieldError(obj, field_name)
return serialized_fields
def select_field_serializer(self, field_type: Any) -> Any:
"""
Selects which serializer to use for a given field type
Args:
field_type (Any): Type of the field to serialize
Returns:
Any: Selected serializer to use
"""
return (
self.specified_serializers[field_type]
if field_type in self.specified_serializers
else field_type
)
def serialize_list_field(self, obj: Any, field_name: str, field_type: Any) -> List:
"""
Serializes a list field with its inner type
Args:
obj (Any): Object to serialize
field_name (str): Name of the object's list field
field_type (Any): Type of the object's list field
Raises:
ListFieldSerializingError: The inner field data was not itself a list
Returns:
List: List of serialized inner objects
"""
field_data = getattr(obj, field_name)
inner_field_type = get_args(field_type)[0]
inner_field_serializer = self.select_field_serializer(inner_field_type)
if isinstance(field_data, list):
return [inner_field_serializer(i) for i in field_data]
else:
raise ListFieldSerializingError(field_data, inner_field_serializer)
def serialize_dict_field(self, obj: Any, field_name: str, field_type: Any) -> Dict:
"""
Serializes a dict field with its inner type
Args:
obj (Any): Object to serialize
field_name (str): Name of the object's dict field
field_type (Any): Type of the object's dict field
Raises:
DictFieldSerializingError: The inner field data was not itself a dict
Returns:
Dict: Dict of serialized inner objects
"""
field_data = getattr(obj, field_name)
inner_field_type = get_args(field_type)[0]
inner_field_serializer = self.select_field_serializer(inner_field_type)
if isinstance(field_data, dict):
return {k: inner_field_serializer(v) for k, v in field_data.items()}
else:
raise DictFieldSerializingError(
field_data, inner_field_serializer,
) | yummy-cereal | /yummy_cereal-2.0.3-py3-none-any.whl/yummy_cereal/serializers/annotations_serializer.py | annotations_serializer.py |
from dataclasses import dataclass, field
from typing import Any, Dict, Generic, List, TypeVar
from typing_inspect import get_args
from ..exceptions import DictFieldParsingError, FieldParsingError, ListFieldParsingError
from ..protocols import Factory, ParserMap
from ..utils.annotations import (
field_is_generic_dict,
field_is_generic_list,
get_cls_annotations,
)
T = TypeVar("T")
@dataclass
class AnnotationsParser(Generic[T]):
cls: Factory[T]
field_defaults: Dict = field(default_factory=dict)
specified_parsers: ParserMap = field(default_factory=dict)
def __call__(self, config: Dict) -> T:
"""
Parses an object based on its class annotations
Args:
config (Dict): Configuration to parse
Returns:
T: Parsed object
"""
parsed_fields = {k: self.parse_field(k, v) for k, v in config.items()}
parser_kwargs = {**self.field_defaults, **parsed_fields}
return self.cls(**parser_kwargs)
def select_field_parser(self, field_type: Any) -> Any:
"""
Selects which parser to use for a given field type
Args:
field_type (Any): Type of the field to parse
Returns:
Any: Selected parser to use
"""
return (
self.specified_parsers[field_type]
if field_type in self.specified_parsers
else field_type
)
def parse_field(self, field_name: Any, raw_field_value: Any) -> Any:
"""
Parses a field based on its class annotations
Args:
field_name (Any): Name of the object's field
raw_field_value (Any): Field data to parse
Raises:
FieldParsingError: The field data could not be parsed
Returns:
Any: Parsed field data
"""
annotations = get_cls_annotations(self.cls)
field_type = annotations[field_name]
if field_is_generic_list(self.cls, field_name):
inner_field_type = get_args(field_type)[0]
return self.parse_list_field(
raw_field_value,
self.select_field_parser(inner_field_type),
get_cls_annotations(inner_field_type),
)
elif field_is_generic_dict(self.cls, field_name):
inner_field_type = get_args(field_type)[0]
return self.parse_dict_field(
raw_field_value,
self.select_field_parser(inner_field_type),
get_cls_annotations(inner_field_type),
)
else:
field_parser = self.select_field_parser(field_type)
if field_parser is Any:
return raw_field_value
else:
try:
return field_parser(raw_field_value)
except TypeError:
raise FieldParsingError(field_parser, raw_field_value)
def parse_list_field(
self,
raw_field_value: Any,
inner_field_parser: Any,
inner_field_annotations: Dict,
) -> List:
"""
Parses an object's list field
Args:
raw_field_value (Any): List field data to parse
inner_field_parser (Any): Inner field parser type
inner_field_annotations (Dict): Annotations belonging to the inner field type
Raises:
ListFieldParsingError: The list field data was not itself or a list or able to be converted to a dictionary with names
Returns:
List: Parsed list field
"""
if isinstance(raw_field_value, list):
return [inner_field_parser(i) for i in raw_field_value]
elif (
isinstance(raw_field_value, dict)
and len(inner_field_annotations) == 2
and "name" in inner_field_annotations
):
inner_field_annotations.pop("name")
group_field, group_type = inner_field_annotations.popitem()
return [
inner_field_parser({"name": k, group_field: v})
for k, v in raw_field_value.items()
]
else:
raise ListFieldParsingError(inner_field_parser, raw_field_value)
def parse_dict_field(
self,
raw_field_value: Any,
inner_field_parser: Any,
inner_field_annotations: Dict,
) -> Dict:
"""
Parses an object's dict field
Args:
raw_field_value (Any): dict field data to parse
inner_field_parser (Any): Inner field parser type
inner_field_annotations (Dict): Annotations belonging to the inner field type
Raises:
DictFieldParsingError: The dict field data was not itself or a dict
Returns:
Dict: Parsed dict field
"""
if isinstance(raw_field_value, dict):
return {k: inner_field_parser(v) for k, v in raw_field_value.items()}
else:
raise DictFieldParsingError(inner_field_parser, raw_field_value) | yummy-cereal | /yummy_cereal-2.0.3-py3-none-any.whl/yummy_cereal/parsers/annotations_parser.py | annotations_parser.py |
==================
Yummy Sphinx Theme
==================
.. image:: https://img.shields.io/pypi/v/yummy_sphinx_theme.svg
:target: https://pypi.org/project/yummy_sphinx_theme/
.. image:: https://img.shields.io/github/release/TagnumElite/yummy_sphinx_theme/all.svg
:target: https://github.com/TagnumElite/yummy_sphinx_theme
.. image:: https://img.shields.io/pypi/l/yummy_sphinx_theme.svg
:target: https://pypi.org/project/yummy_sphinx_theme/
.. image:: https://img.shields.io/pypi/pyversions/yummy_sphinx_theme.svg
:target: https://pypi.org/project/yummy_sphinx_theme/
.. image:: https://img.shields.io/github/issues-raw/TagnumElite/yummy_sphinx_theme.svg
:target: https://github.com/TagnumElite/yummy_sphinx_theme
.. image:: https://img.shields.io/travis/TagnumElite/yummy_sphinx_theme/develop.svg
:target: https://travis-ci.org/TagnumElite/yummy_sphinx_theme
The yummy_sphinx_theme is created from `DONGChuan's Yummy-Jekyll <https://github.com/DONGChuan/Yummy-Jekyll>`_
The niftools_sphinx_theme is a modification of the yummy_sphinx_theme for the niftools team.
If either has a complaint with what I have made, because I did do this without any legal knowledge.
Or proper creditation.
.. contents::
`Demo <http://tagnumelite.com/yummy_sphinx_theme>`_
=============================================================
Installation
============
There are two ways to install these themes
Via Python Package Interface
----------------------------
Download the package and add ``sphinx`` to your ``requirements.txt`` file:
.. code:: bash
pip install yummy_sphinx_theme
In your ``conf.py`` file:
.. code:: python
html_theme = "yummy_sphinx_theme"
Via git or download
-------------------
Download the ``yummy_sphinx_theme`` folder into your documentation at
``docs/_themes/`` then add the following two lines to your Sphinx
``conf.py`` file:
.. code:: python
html_theme = "yummy_sphinx_theme"
html_theme_path = ["_themes", ]
=========
Changelog
=========
Yummy
=====
0.1.0:
------
* Remove niftools_sphinx_theme
* Added option to disable geopattern
* Add background image fallback for jumbotron
0.0.6:
------
* Fix theme.css_t problem
* Add Home Name Config
* Remove Unwanted File
* Fix Version 0.0.5
* Add versions footholder
0.0.5:
------
* Fix Theme Problems
* Update Docs
0.0.4:
------
* Default Footer Icon
* Fix Table Of Contents
* Customizable Table Of Contents
0.0.3:
------
* More color customization
* `Customizable navbar icon <http://fontawesome.io/icons/>`_
0.0.2:
------
* Add Dedicated Documentation Page
* Add Disqus
* Add Content To Layout
* Made Navbar into Dictionary
* Made Navbar background color customisable in theme conf
* Use bowser to manage dependencies
* Add GitHub, GitLab, BitBucket and Canonical Urls
* Disable Octicons and Primer-Markdown
* Clean Layout.html
* Source Code footer Icon changes to the specified url
* Add the missing end html tag
0.0.1:
------
* Turn jekyll theme into sphinx theme
* Renamed project from niftools_sphinx_theme to yummy_sphinx_theme
| yummy-sphinx-theme | /yummy_sphinx_theme-0.1.1.tar.gz/yummy_sphinx_theme-0.1.1/README.rst | README.rst |
if("undefined"==typeof jQuery)throw new Error("Bootstrap's JavaScript requires jQuery");+function(a){"use strict";var b=a.fn.jquery.split(" ")[0].split(".");if(b[0]<2&&b[1]<9||1==b[0]&&9==b[1]&&b[2]<1||b[0]>3)throw new Error("Bootstrap's JavaScript requires jQuery version 1.9.1 or higher, but lower than version 4")}(jQuery),+function(a){"use strict";function b(){var a=document.createElement("bootstrap"),b={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"};for(var c in b)if(void 0!==a.style[c])return{end:b[c]};return!1}a.fn.emulateTransitionEnd=function(b){var c=!1,d=this;a(this).one("bsTransitionEnd",function(){c=!0});var e=function(){c||a(d).trigger(a.support.transition.end)};return setTimeout(e,b),this},a(function(){a.support.transition=b(),a.support.transition&&(a.event.special.bsTransitionEnd={bindType:a.support.transition.end,delegateType:a.support.transition.end,handle:function(b){if(a(b.target).is(this))return b.handleObj.handler.apply(this,arguments)}})})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var c=a(this),e=c.data("bs.alert");e||c.data("bs.alert",e=new d(this)),"string"==typeof b&&e[b].call(c)})}var c='[data-dismiss="alert"]',d=function(b){a(b).on("click",c,this.close)};d.VERSION="3.3.7",d.TRANSITION_DURATION=150,d.prototype.close=function(b){function c(){g.detach().trigger("closed.bs.alert").remove()}var e=a(this),f=e.attr("data-target");f||(f=e.attr("href"),f=f&&f.replace(/.*(?=#[^\s]*$)/,""));var g=a("#"===f?[]:f);b&&b.preventDefault(),g.length||(g=e.closest(".alert")),g.trigger(b=a.Event("close.bs.alert")),b.isDefaultPrevented()||(g.removeClass("in"),a.support.transition&&g.hasClass("fade")?g.one("bsTransitionEnd",c).emulateTransitionEnd(d.TRANSITION_DURATION):c())};var e=a.fn.alert;a.fn.alert=b,a.fn.alert.Constructor=d,a.fn.alert.noConflict=function(){return a.fn.alert=e,this},a(document).on("click.bs.alert.data-api",c,d.prototype.close)}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.button"),f="object"==typeof b&&b;e||d.data("bs.button",e=new c(this,f)),"toggle"==b?e.toggle():b&&e.setState(b)})}var c=function(b,d){this.$element=a(b),this.options=a.extend({},c.DEFAULTS,d),this.isLoading=!1};c.VERSION="3.3.7",c.DEFAULTS={loadingText:"loading..."},c.prototype.setState=function(b){var c="disabled",d=this.$element,e=d.is("input")?"val":"html",f=d.data();b+="Text",null==f.resetText&&d.data("resetText",d[e]()),setTimeout(a.proxy(function(){d[e](null==f[b]?this.options[b]:f[b]),"loadingText"==b?(this.isLoading=!0,d.addClass(c).attr(c,c).prop(c,!0)):this.isLoading&&(this.isLoading=!1,d.removeClass(c).removeAttr(c).prop(c,!1))},this),0)},c.prototype.toggle=function(){var a=!0,b=this.$element.closest('[data-toggle="buttons"]');if(b.length){var c=this.$element.find("input");"radio"==c.prop("type")?(c.prop("checked")&&(a=!1),b.find(".active").removeClass("active"),this.$element.addClass("active")):"checkbox"==c.prop("type")&&(c.prop("checked")!==this.$element.hasClass("active")&&(a=!1),this.$element.toggleClass("active")),c.prop("checked",this.$element.hasClass("active")),a&&c.trigger("change")}else this.$element.attr("aria-pressed",!this.$element.hasClass("active")),this.$element.toggleClass("active")};var d=a.fn.button;a.fn.button=b,a.fn.button.Constructor=c,a.fn.button.noConflict=function(){return a.fn.button=d,this},a(document).on("click.bs.button.data-api",'[data-toggle^="button"]',function(c){var d=a(c.target).closest(".btn");b.call(d,"toggle"),a(c.target).is('input[type="radio"], input[type="checkbox"]')||(c.preventDefault(),d.is("input,button")?d.trigger("focus"):d.find("input:visible,button:visible").first().trigger("focus"))}).on("focus.bs.button.data-api blur.bs.button.data-api",'[data-toggle^="button"]',function(b){a(b.target).closest(".btn").toggleClass("focus",/^focus(in)?$/.test(b.type))})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.carousel"),f=a.extend({},c.DEFAULTS,d.data(),"object"==typeof b&&b),g="string"==typeof b?b:f.slide;e||d.data("bs.carousel",e=new c(this,f)),"number"==typeof b?e.to(b):g?e[g]():f.interval&&e.pause().cycle()})}var c=function(b,c){this.$element=a(b),this.$indicators=this.$element.find(".carousel-indicators"),this.options=c,this.paused=null,this.sliding=null,this.interval=null,this.$active=null,this.$items=null,this.options.keyboard&&this.$element.on("keydown.bs.carousel",a.proxy(this.keydown,this)),"hover"==this.options.pause&&!("ontouchstart"in document.documentElement)&&this.$element.on("mouseenter.bs.carousel",a.proxy(this.pause,this)).on("mouseleave.bs.carousel",a.proxy(this.cycle,this))};c.VERSION="3.3.7",c.TRANSITION_DURATION=600,c.DEFAULTS={interval:5e3,pause:"hover",wrap:!0,keyboard:!0},c.prototype.keydown=function(a){if(!/input|textarea/i.test(a.target.tagName)){switch(a.which){case 37:this.prev();break;case 39:this.next();break;default:return}a.preventDefault()}},c.prototype.cycle=function(b){return b||(this.paused=!1),this.interval&&clearInterval(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(a.proxy(this.next,this),this.options.interval)),this},c.prototype.getItemIndex=function(a){return this.$items=a.parent().children(".item"),this.$items.index(a||this.$active)},c.prototype.getItemForDirection=function(a,b){var c=this.getItemIndex(b),d="prev"==a&&0===c||"next"==a&&c==this.$items.length-1;if(d&&!this.options.wrap)return b;var e="prev"==a?-1:1,f=(c+e)%this.$items.length;return this.$items.eq(f)},c.prototype.to=function(a){var b=this,c=this.getItemIndex(this.$active=this.$element.find(".item.active"));if(!(a>this.$items.length-1||a<0))return this.sliding?this.$element.one("slid.bs.carousel",function(){b.to(a)}):c==a?this.pause().cycle():this.slide(a>c?"next":"prev",this.$items.eq(a))},c.prototype.pause=function(b){return b||(this.paused=!0),this.$element.find(".next, .prev").length&&a.support.transition&&(this.$element.trigger(a.support.transition.end),this.cycle(!0)),this.interval=clearInterval(this.interval),this},c.prototype.next=function(){if(!this.sliding)return this.slide("next")},c.prototype.prev=function(){if(!this.sliding)return this.slide("prev")},c.prototype.slide=function(b,d){var e=this.$element.find(".item.active"),f=d||this.getItemForDirection(b,e),g=this.interval,h="next"==b?"left":"right",i=this;if(f.hasClass("active"))return this.sliding=!1;var j=f[0],k=a.Event("slide.bs.carousel",{relatedTarget:j,direction:h});if(this.$element.trigger(k),!k.isDefaultPrevented()){if(this.sliding=!0,g&&this.pause(),this.$indicators.length){this.$indicators.find(".active").removeClass("active");var l=a(this.$indicators.children()[this.getItemIndex(f)]);l&&l.addClass("active")}var m=a.Event("slid.bs.carousel",{relatedTarget:j,direction:h});return a.support.transition&&this.$element.hasClass("slide")?(f.addClass(b),f[0].offsetWidth,e.addClass(h),f.addClass(h),e.one("bsTransitionEnd",function(){f.removeClass([b,h].join(" ")).addClass("active"),e.removeClass(["active",h].join(" ")),i.sliding=!1,setTimeout(function(){i.$element.trigger(m)},0)}).emulateTransitionEnd(c.TRANSITION_DURATION)):(e.removeClass("active"),f.addClass("active"),this.sliding=!1,this.$element.trigger(m)),g&&this.cycle(),this}};var d=a.fn.carousel;a.fn.carousel=b,a.fn.carousel.Constructor=c,a.fn.carousel.noConflict=function(){return a.fn.carousel=d,this};var e=function(c){var d,e=a(this),f=a(e.attr("data-target")||(d=e.attr("href"))&&d.replace(/.*(?=#[^\s]+$)/,""));if(f.hasClass("carousel")){var g=a.extend({},f.data(),e.data()),h=e.attr("data-slide-to");h&&(g.interval=!1),b.call(f,g),h&&f.data("bs.carousel").to(h),c.preventDefault()}};a(document).on("click.bs.carousel.data-api","[data-slide]",e).on("click.bs.carousel.data-api","[data-slide-to]",e),a(window).on("load",function(){a('[data-ride="carousel"]').each(function(){var c=a(this);b.call(c,c.data())})})}(jQuery),+function(a){"use strict";function b(b){var c,d=b.attr("data-target")||(c=b.attr("href"))&&c.replace(/.*(?=#[^\s]+$)/,"");return a(d)}function c(b){return this.each(function(){var c=a(this),e=c.data("bs.collapse"),f=a.extend({},d.DEFAULTS,c.data(),"object"==typeof b&&b);!e&&f.toggle&&/show|hide/.test(b)&&(f.toggle=!1),e||c.data("bs.collapse",e=new d(this,f)),"string"==typeof b&&e[b]()})}var d=function(b,c){this.$element=a(b),this.options=a.extend({},d.DEFAULTS,c),this.$trigger=a('[data-toggle="collapse"][href="#'+b.id+'"],[data-toggle="collapse"][data-target="#'+b.id+'"]'),this.transitioning=null,this.options.parent?this.$parent=this.getParent():this.addAriaAndCollapsedClass(this.$element,this.$trigger),this.options.toggle&&this.toggle()};d.VERSION="3.3.7",d.TRANSITION_DURATION=350,d.DEFAULTS={toggle:!0},d.prototype.dimension=function(){var a=this.$element.hasClass("width");return a?"width":"height"},d.prototype.show=function(){if(!this.transitioning&&!this.$element.hasClass("in")){var b,e=this.$parent&&this.$parent.children(".panel").children(".in, .collapsing");if(!(e&&e.length&&(b=e.data("bs.collapse"),b&&b.transitioning))){var f=a.Event("show.bs.collapse");if(this.$element.trigger(f),!f.isDefaultPrevented()){e&&e.length&&(c.call(e,"hide"),b||e.data("bs.collapse",null));var g=this.dimension();this.$element.removeClass("collapse").addClass("collapsing")[g](0).attr("aria-expanded",!0),this.$trigger.removeClass("collapsed").attr("aria-expanded",!0),this.transitioning=1;var h=function(){this.$element.removeClass("collapsing").addClass("collapse in")[g](""),this.transitioning=0,this.$element.trigger("shown.bs.collapse")};if(!a.support.transition)return h.call(this);var i=a.camelCase(["scroll",g].join("-"));this.$element.one("bsTransitionEnd",a.proxy(h,this)).emulateTransitionEnd(d.TRANSITION_DURATION)[g](this.$element[0][i])}}}},d.prototype.hide=function(){if(!this.transitioning&&this.$element.hasClass("in")){var b=a.Event("hide.bs.collapse");if(this.$element.trigger(b),!b.isDefaultPrevented()){var c=this.dimension();this.$element[c](this.$element[c]())[0].offsetHeight,this.$element.addClass("collapsing").removeClass("collapse in").attr("aria-expanded",!1),this.$trigger.addClass("collapsed").attr("aria-expanded",!1),this.transitioning=1;var e=function(){this.transitioning=0,this.$element.removeClass("collapsing").addClass("collapse").trigger("hidden.bs.collapse")};return a.support.transition?void this.$element[c](0).one("bsTransitionEnd",a.proxy(e,this)).emulateTransitionEnd(d.TRANSITION_DURATION):e.call(this)}}},d.prototype.toggle=function(){this[this.$element.hasClass("in")?"hide":"show"]()},d.prototype.getParent=function(){return a(this.options.parent).find('[data-toggle="collapse"][data-parent="'+this.options.parent+'"]').each(a.proxy(function(c,d){var e=a(d);this.addAriaAndCollapsedClass(b(e),e)},this)).end()},d.prototype.addAriaAndCollapsedClass=function(a,b){var c=a.hasClass("in");a.attr("aria-expanded",c),b.toggleClass("collapsed",!c).attr("aria-expanded",c)};var e=a.fn.collapse;a.fn.collapse=c,a.fn.collapse.Constructor=d,a.fn.collapse.noConflict=function(){return a.fn.collapse=e,this},a(document).on("click.bs.collapse.data-api",'[data-toggle="collapse"]',function(d){var e=a(this);e.attr("data-target")||d.preventDefault();var f=b(e),g=f.data("bs.collapse"),h=g?"toggle":e.data();c.call(f,h)})}(jQuery),+function(a){"use strict";function b(b){var c=b.attr("data-target");c||(c=b.attr("href"),c=c&&/#[A-Za-z]/.test(c)&&c.replace(/.*(?=#[^\s]*$)/,""));var d=c&&a(c);return d&&d.length?d:b.parent()}function c(c){c&&3===c.which||(a(e).remove(),a(f).each(function(){var d=a(this),e=b(d),f={relatedTarget:this};e.hasClass("open")&&(c&&"click"==c.type&&/input|textarea/i.test(c.target.tagName)&&a.contains(e[0],c.target)||(e.trigger(c=a.Event("hide.bs.dropdown",f)),c.isDefaultPrevented()||(d.attr("aria-expanded","false"),e.removeClass("open").trigger(a.Event("hidden.bs.dropdown",f)))))}))}function d(b){return this.each(function(){var c=a(this),d=c.data("bs.dropdown");d||c.data("bs.dropdown",d=new g(this)),"string"==typeof b&&d[b].call(c)})}var e=".dropdown-backdrop",f='[data-toggle="dropdown"]',g=function(b){a(b).on("click.bs.dropdown",this.toggle)};g.VERSION="3.3.7",g.prototype.toggle=function(d){var e=a(this);if(!e.is(".disabled, :disabled")){var f=b(e),g=f.hasClass("open");if(c(),!g){"ontouchstart"in document.documentElement&&!f.closest(".navbar-nav").length&&a(document.createElement("div")).addClass("dropdown-backdrop").insertAfter(a(this)).on("click",c);var h={relatedTarget:this};if(f.trigger(d=a.Event("show.bs.dropdown",h)),d.isDefaultPrevented())return;e.trigger("focus").attr("aria-expanded","true"),f.toggleClass("open").trigger(a.Event("shown.bs.dropdown",h))}return!1}},g.prototype.keydown=function(c){if(/(38|40|27|32)/.test(c.which)&&!/input|textarea/i.test(c.target.tagName)){var d=a(this);if(c.preventDefault(),c.stopPropagation(),!d.is(".disabled, :disabled")){var e=b(d),g=e.hasClass("open");if(!g&&27!=c.which||g&&27==c.which)return 27==c.which&&e.find(f).trigger("focus"),d.trigger("click");var h=" li:not(.disabled):visible a",i=e.find(".dropdown-menu"+h);if(i.length){var j=i.index(c.target);38==c.which&&j>0&&j--,40==c.which&&j<i.length-1&&j++,~j||(j=0),i.eq(j).trigger("focus")}}}};var h=a.fn.dropdown;a.fn.dropdown=d,a.fn.dropdown.Constructor=g,a.fn.dropdown.noConflict=function(){return a.fn.dropdown=h,this},a(document).on("click.bs.dropdown.data-api",c).on("click.bs.dropdown.data-api",".dropdown form",function(a){a.stopPropagation()}).on("click.bs.dropdown.data-api",f,g.prototype.toggle).on("keydown.bs.dropdown.data-api",f,g.prototype.keydown).on("keydown.bs.dropdown.data-api",".dropdown-menu",g.prototype.keydown)}(jQuery),+function(a){"use strict";function b(b,d){return this.each(function(){var e=a(this),f=e.data("bs.modal"),g=a.extend({},c.DEFAULTS,e.data(),"object"==typeof b&&b);f||e.data("bs.modal",f=new c(this,g)),"string"==typeof b?f[b](d):g.show&&f.show(d)})}var c=function(b,c){this.options=c,this.$body=a(document.body),this.$element=a(b),this.$dialog=this.$element.find(".modal-dialog"),this.$backdrop=null,this.isShown=null,this.originalBodyPad=null,this.scrollbarWidth=0,this.ignoreBackdropClick=!1,this.options.remote&&this.$element.find(".modal-content").load(this.options.remote,a.proxy(function(){this.$element.trigger("loaded.bs.modal")},this))};c.VERSION="3.3.7",c.TRANSITION_DURATION=300,c.BACKDROP_TRANSITION_DURATION=150,c.DEFAULTS={backdrop:!0,keyboard:!0,show:!0},c.prototype.toggle=function(a){return this.isShown?this.hide():this.show(a)},c.prototype.show=function(b){var d=this,e=a.Event("show.bs.modal",{relatedTarget:b});this.$element.trigger(e),this.isShown||e.isDefaultPrevented()||(this.isShown=!0,this.checkScrollbar(),this.setScrollbar(),this.$body.addClass("modal-open"),this.escape(),this.resize(),this.$element.on("click.dismiss.bs.modal",'[data-dismiss="modal"]',a.proxy(this.hide,this)),this.$dialog.on("mousedown.dismiss.bs.modal",function(){d.$element.one("mouseup.dismiss.bs.modal",function(b){a(b.target).is(d.$element)&&(d.ignoreBackdropClick=!0)})}),this.backdrop(function(){var e=a.support.transition&&d.$element.hasClass("fade");d.$element.parent().length||d.$element.appendTo(d.$body),d.$element.show().scrollTop(0),d.adjustDialog(),e&&d.$element[0].offsetWidth,d.$element.addClass("in"),d.enforceFocus();var f=a.Event("shown.bs.modal",{relatedTarget:b});e?d.$dialog.one("bsTransitionEnd",function(){d.$element.trigger("focus").trigger(f)}).emulateTransitionEnd(c.TRANSITION_DURATION):d.$element.trigger("focus").trigger(f)}))},c.prototype.hide=function(b){b&&b.preventDefault(),b=a.Event("hide.bs.modal"),this.$element.trigger(b),this.isShown&&!b.isDefaultPrevented()&&(this.isShown=!1,this.escape(),this.resize(),a(document).off("focusin.bs.modal"),this.$element.removeClass("in").off("click.dismiss.bs.modal").off("mouseup.dismiss.bs.modal"),this.$dialog.off("mousedown.dismiss.bs.modal"),a.support.transition&&this.$element.hasClass("fade")?this.$element.one("bsTransitionEnd",a.proxy(this.hideModal,this)).emulateTransitionEnd(c.TRANSITION_DURATION):this.hideModal())},c.prototype.enforceFocus=function(){a(document).off("focusin.bs.modal").on("focusin.bs.modal",a.proxy(function(a){document===a.target||this.$element[0]===a.target||this.$element.has(a.target).length||this.$element.trigger("focus")},this))},c.prototype.escape=function(){this.isShown&&this.options.keyboard?this.$element.on("keydown.dismiss.bs.modal",a.proxy(function(a){27==a.which&&this.hide()},this)):this.isShown||this.$element.off("keydown.dismiss.bs.modal")},c.prototype.resize=function(){this.isShown?a(window).on("resize.bs.modal",a.proxy(this.handleUpdate,this)):a(window).off("resize.bs.modal")},c.prototype.hideModal=function(){var a=this;this.$element.hide(),this.backdrop(function(){a.$body.removeClass("modal-open"),a.resetAdjustments(),a.resetScrollbar(),a.$element.trigger("hidden.bs.modal")})},c.prototype.removeBackdrop=function(){this.$backdrop&&this.$backdrop.remove(),this.$backdrop=null},c.prototype.backdrop=function(b){var d=this,e=this.$element.hasClass("fade")?"fade":"";if(this.isShown&&this.options.backdrop){var f=a.support.transition&&e;if(this.$backdrop=a(document.createElement("div")).addClass("modal-backdrop "+e).appendTo(this.$body),this.$element.on("click.dismiss.bs.modal",a.proxy(function(a){return this.ignoreBackdropClick?void(this.ignoreBackdropClick=!1):void(a.target===a.currentTarget&&("static"==this.options.backdrop?this.$element[0].focus():this.hide()))},this)),f&&this.$backdrop[0].offsetWidth,this.$backdrop.addClass("in"),!b)return;f?this.$backdrop.one("bsTransitionEnd",b).emulateTransitionEnd(c.BACKDROP_TRANSITION_DURATION):b()}else if(!this.isShown&&this.$backdrop){this.$backdrop.removeClass("in");var g=function(){d.removeBackdrop(),b&&b()};a.support.transition&&this.$element.hasClass("fade")?this.$backdrop.one("bsTransitionEnd",g).emulateTransitionEnd(c.BACKDROP_TRANSITION_DURATION):g()}else b&&b()},c.prototype.handleUpdate=function(){this.adjustDialog()},c.prototype.adjustDialog=function(){var a=this.$element[0].scrollHeight>document.documentElement.clientHeight;this.$element.css({paddingLeft:!this.bodyIsOverflowing&&a?this.scrollbarWidth:"",paddingRight:this.bodyIsOverflowing&&!a?this.scrollbarWidth:""})},c.prototype.resetAdjustments=function(){this.$element.css({paddingLeft:"",paddingRight:""})},c.prototype.checkScrollbar=function(){var a=window.innerWidth;if(!a){var b=document.documentElement.getBoundingClientRect();a=b.right-Math.abs(b.left)}this.bodyIsOverflowing=document.body.clientWidth<a,this.scrollbarWidth=this.measureScrollbar()},c.prototype.setScrollbar=function(){var a=parseInt(this.$body.css("padding-right")||0,10);this.originalBodyPad=document.body.style.paddingRight||"",this.bodyIsOverflowing&&this.$body.css("padding-right",a+this.scrollbarWidth)},c.prototype.resetScrollbar=function(){this.$body.css("padding-right",this.originalBodyPad)},c.prototype.measureScrollbar=function(){var a=document.createElement("div");a.className="modal-scrollbar-measure",this.$body.append(a);var b=a.offsetWidth-a.clientWidth;return this.$body[0].removeChild(a),b};var d=a.fn.modal;a.fn.modal=b,a.fn.modal.Constructor=c,a.fn.modal.noConflict=function(){return a.fn.modal=d,this},a(document).on("click.bs.modal.data-api",'[data-toggle="modal"]',function(c){var d=a(this),e=d.attr("href"),f=a(d.attr("data-target")||e&&e.replace(/.*(?=#[^\s]+$)/,"")),g=f.data("bs.modal")?"toggle":a.extend({remote:!/#/.test(e)&&e},f.data(),d.data());d.is("a")&&c.preventDefault(),f.one("show.bs.modal",function(a){a.isDefaultPrevented()||f.one("hidden.bs.modal",function(){d.is(":visible")&&d.trigger("focus")})}),b.call(f,g,this)})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.tooltip"),f="object"==typeof b&&b;!e&&/destroy|hide/.test(b)||(e||d.data("bs.tooltip",e=new c(this,f)),"string"==typeof b&&e[b]())})}var c=function(a,b){this.type=null,this.options=null,this.enabled=null,this.timeout=null,this.hoverState=null,this.$element=null,this.inState=null,this.init("tooltip",a,b)};c.VERSION="3.3.7",c.TRANSITION_DURATION=150,c.DEFAULTS={animation:!0,placement:"top",selector:!1,template:'<div class="tooltip" role="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>',trigger:"hover focus",title:"",delay:0,html:!1,container:!1,viewport:{selector:"body",padding:0}},c.prototype.init=function(b,c,d){if(this.enabled=!0,this.type=b,this.$element=a(c),this.options=this.getOptions(d),this.$viewport=this.options.viewport&&a(a.isFunction(this.options.viewport)?this.options.viewport.call(this,this.$element):this.options.viewport.selector||this.options.viewport),this.inState={click:!1,hover:!1,focus:!1},this.$element[0]instanceof document.constructor&&!this.options.selector)throw new Error("`selector` option must be specified when initializing "+this.type+" on the window.document object!");for(var e=this.options.trigger.split(" "),f=e.length;f--;){var g=e[f];if("click"==g)this.$element.on("click."+this.type,this.options.selector,a.proxy(this.toggle,this));else if("manual"!=g){var h="hover"==g?"mouseenter":"focusin",i="hover"==g?"mouseleave":"focusout";this.$element.on(h+"."+this.type,this.options.selector,a.proxy(this.enter,this)),this.$element.on(i+"."+this.type,this.options.selector,a.proxy(this.leave,this))}}this.options.selector?this._options=a.extend({},this.options,{trigger:"manual",selector:""}):this.fixTitle()},c.prototype.getDefaults=function(){return c.DEFAULTS},c.prototype.getOptions=function(b){return b=a.extend({},this.getDefaults(),this.$element.data(),b),b.delay&&"number"==typeof b.delay&&(b.delay={show:b.delay,hide:b.delay}),b},c.prototype.getDelegateOptions=function(){var b={},c=this.getDefaults();return this._options&&a.each(this._options,function(a,d){c[a]!=d&&(b[a]=d)}),b},c.prototype.enter=function(b){var c=b instanceof this.constructor?b:a(b.currentTarget).data("bs."+this.type);return c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c)),b instanceof a.Event&&(c.inState["focusin"==b.type?"focus":"hover"]=!0),c.tip().hasClass("in")||"in"==c.hoverState?void(c.hoverState="in"):(clearTimeout(c.timeout),c.hoverState="in",c.options.delay&&c.options.delay.show?void(c.timeout=setTimeout(function(){"in"==c.hoverState&&c.show()},c.options.delay.show)):c.show())},c.prototype.isInStateTrue=function(){for(var a in this.inState)if(this.inState[a])return!0;return!1},c.prototype.leave=function(b){var c=b instanceof this.constructor?b:a(b.currentTarget).data("bs."+this.type);if(c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c)),b instanceof a.Event&&(c.inState["focusout"==b.type?"focus":"hover"]=!1),!c.isInStateTrue())return clearTimeout(c.timeout),c.hoverState="out",c.options.delay&&c.options.delay.hide?void(c.timeout=setTimeout(function(){"out"==c.hoverState&&c.hide()},c.options.delay.hide)):c.hide()},c.prototype.show=function(){var b=a.Event("show.bs."+this.type);if(this.hasContent()&&this.enabled){this.$element.trigger(b);var d=a.contains(this.$element[0].ownerDocument.documentElement,this.$element[0]);if(b.isDefaultPrevented()||!d)return;var e=this,f=this.tip(),g=this.getUID(this.type);this.setContent(),f.attr("id",g),this.$element.attr("aria-describedby",g),this.options.animation&&f.addClass("fade");var h="function"==typeof this.options.placement?this.options.placement.call(this,f[0],this.$element[0]):this.options.placement,i=/\s?auto?\s?/i,j=i.test(h);j&&(h=h.replace(i,"")||"top"),f.detach().css({top:0,left:0,display:"block"}).addClass(h).data("bs."+this.type,this),this.options.container?f.appendTo(this.options.container):f.insertAfter(this.$element),this.$element.trigger("inserted.bs."+this.type);var k=this.getPosition(),l=f[0].offsetWidth,m=f[0].offsetHeight;if(j){var n=h,o=this.getPosition(this.$viewport);h="bottom"==h&&k.bottom+m>o.bottom?"top":"top"==h&&k.top-m<o.top?"bottom":"right"==h&&k.right+l>o.width?"left":"left"==h&&k.left-l<o.left?"right":h,f.removeClass(n).addClass(h)}var p=this.getCalculatedOffset(h,k,l,m);this.applyPlacement(p,h);var q=function(){var a=e.hoverState;e.$element.trigger("shown.bs."+e.type),e.hoverState=null,"out"==a&&e.leave(e)};a.support.transition&&this.$tip.hasClass("fade")?f.one("bsTransitionEnd",q).emulateTransitionEnd(c.TRANSITION_DURATION):q()}},c.prototype.applyPlacement=function(b,c){var d=this.tip(),e=d[0].offsetWidth,f=d[0].offsetHeight,g=parseInt(d.css("margin-top"),10),h=parseInt(d.css("margin-left"),10);isNaN(g)&&(g=0),isNaN(h)&&(h=0),b.top+=g,b.left+=h,a.offset.setOffset(d[0],a.extend({using:function(a){d.css({top:Math.round(a.top),left:Math.round(a.left)})}},b),0),d.addClass("in");var i=d[0].offsetWidth,j=d[0].offsetHeight;"top"==c&&j!=f&&(b.top=b.top+f-j);var k=this.getViewportAdjustedDelta(c,b,i,j);k.left?b.left+=k.left:b.top+=k.top;var l=/top|bottom/.test(c),m=l?2*k.left-e+i:2*k.top-f+j,n=l?"offsetWidth":"offsetHeight";d.offset(b),this.replaceArrow(m,d[0][n],l)},c.prototype.replaceArrow=function(a,b,c){this.arrow().css(c?"left":"top",50*(1-a/b)+"%").css(c?"top":"left","")},c.prototype.setContent=function(){var a=this.tip(),b=this.getTitle();a.find(".tooltip-inner")[this.options.html?"html":"text"](b),a.removeClass("fade in top bottom left right")},c.prototype.hide=function(b){function d(){"in"!=e.hoverState&&f.detach(),e.$element&&e.$element.removeAttr("aria-describedby").trigger("hidden.bs."+e.type),b&&b()}var e=this,f=a(this.$tip),g=a.Event("hide.bs."+this.type);if(this.$element.trigger(g),!g.isDefaultPrevented())return f.removeClass("in"),a.support.transition&&f.hasClass("fade")?f.one("bsTransitionEnd",d).emulateTransitionEnd(c.TRANSITION_DURATION):d(),this.hoverState=null,this},c.prototype.fixTitle=function(){var a=this.$element;(a.attr("title")||"string"!=typeof a.attr("data-original-title"))&&a.attr("data-original-title",a.attr("title")||"").attr("title","")},c.prototype.hasContent=function(){return this.getTitle()},c.prototype.getPosition=function(b){b=b||this.$element;var c=b[0],d="BODY"==c.tagName,e=c.getBoundingClientRect();null==e.width&&(e=a.extend({},e,{width:e.right-e.left,height:e.bottom-e.top}));var f=window.SVGElement&&c instanceof window.SVGElement,g=d?{top:0,left:0}:f?null:b.offset(),h={scroll:d?document.documentElement.scrollTop||document.body.scrollTop:b.scrollTop()},i=d?{width:a(window).width(),height:a(window).height()}:null;return a.extend({},e,h,i,g)},c.prototype.getCalculatedOffset=function(a,b,c,d){return"bottom"==a?{top:b.top+b.height,left:b.left+b.width/2-c/2}:"top"==a?{top:b.top-d,left:b.left+b.width/2-c/2}:"left"==a?{top:b.top+b.height/2-d/2,left:b.left-c}:{top:b.top+b.height/2-d/2,left:b.left+b.width}},c.prototype.getViewportAdjustedDelta=function(a,b,c,d){var e={top:0,left:0};if(!this.$viewport)return e;var f=this.options.viewport&&this.options.viewport.padding||0,g=this.getPosition(this.$viewport);if(/right|left/.test(a)){var h=b.top-f-g.scroll,i=b.top+f-g.scroll+d;h<g.top?e.top=g.top-h:i>g.top+g.height&&(e.top=g.top+g.height-i)}else{var j=b.left-f,k=b.left+f+c;j<g.left?e.left=g.left-j:k>g.right&&(e.left=g.left+g.width-k)}return e},c.prototype.getTitle=function(){var a,b=this.$element,c=this.options;return a=b.attr("data-original-title")||("function"==typeof c.title?c.title.call(b[0]):c.title)},c.prototype.getUID=function(a){do a+=~~(1e6*Math.random());while(document.getElementById(a));return a},c.prototype.tip=function(){if(!this.$tip&&(this.$tip=a(this.options.template),1!=this.$tip.length))throw new Error(this.type+" `template` option must consist of exactly 1 top-level element!");return this.$tip},c.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".tooltip-arrow")},c.prototype.enable=function(){this.enabled=!0},c.prototype.disable=function(){this.enabled=!1},c.prototype.toggleEnabled=function(){this.enabled=!this.enabled},c.prototype.toggle=function(b){var c=this;b&&(c=a(b.currentTarget).data("bs."+this.type),c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c))),b?(c.inState.click=!c.inState.click,c.isInStateTrue()?c.enter(c):c.leave(c)):c.tip().hasClass("in")?c.leave(c):c.enter(c)},c.prototype.destroy=function(){var a=this;clearTimeout(this.timeout),this.hide(function(){a.$element.off("."+a.type).removeData("bs."+a.type),a.$tip&&a.$tip.detach(),a.$tip=null,a.$arrow=null,a.$viewport=null,a.$element=null})};var d=a.fn.tooltip;a.fn.tooltip=b,a.fn.tooltip.Constructor=c,a.fn.tooltip.noConflict=function(){return a.fn.tooltip=d,this}}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.popover"),f="object"==typeof b&&b;!e&&/destroy|hide/.test(b)||(e||d.data("bs.popover",e=new c(this,f)),"string"==typeof b&&e[b]())})}var c=function(a,b){this.init("popover",a,b)};if(!a.fn.tooltip)throw new Error("Popover requires tooltip.js");c.VERSION="3.3.7",c.DEFAULTS=a.extend({},a.fn.tooltip.Constructor.DEFAULTS,{placement:"right",trigger:"click",content:"",template:'<div class="popover" role="tooltip"><div class="arrow"></div><h3 class="popover-title"></h3><div class="popover-content"></div></div>'}),c.prototype=a.extend({},a.fn.tooltip.Constructor.prototype),c.prototype.constructor=c,c.prototype.getDefaults=function(){return c.DEFAULTS},c.prototype.setContent=function(){var a=this.tip(),b=this.getTitle(),c=this.getContent();a.find(".popover-title")[this.options.html?"html":"text"](b),a.find(".popover-content").children().detach().end()[this.options.html?"string"==typeof c?"html":"append":"text"](c),a.removeClass("fade top bottom left right in"),a.find(".popover-title").html()||a.find(".popover-title").hide()},c.prototype.hasContent=function(){return this.getTitle()||this.getContent()},c.prototype.getContent=function(){var a=this.$element,b=this.options;return a.attr("data-content")||("function"==typeof b.content?b.content.call(a[0]):b.content)},c.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".arrow")};var d=a.fn.popover;a.fn.popover=b,a.fn.popover.Constructor=c,a.fn.popover.noConflict=function(){return a.fn.popover=d,this}}(jQuery),+function(a){"use strict";function b(c,d){this.$body=a(document.body),this.$scrollElement=a(a(c).is(document.body)?window:c),this.options=a.extend({},b.DEFAULTS,d),this.selector=(this.options.target||"")+" .nav li > a",this.offsets=[],this.targets=[],this.activeTarget=null,this.scrollHeight=0,this.$scrollElement.on("scroll.bs.scrollspy",a.proxy(this.process,this)),this.refresh(),this.process()}function c(c){return this.each(function(){var d=a(this),e=d.data("bs.scrollspy"),f="object"==typeof c&&c;e||d.data("bs.scrollspy",e=new b(this,f)),"string"==typeof c&&e[c]()})}b.VERSION="3.3.7",b.DEFAULTS={offset:10},b.prototype.getScrollHeight=function(){return this.$scrollElement[0].scrollHeight||Math.max(this.$body[0].scrollHeight,document.documentElement.scrollHeight)},b.prototype.refresh=function(){var b=this,c="offset",d=0;this.offsets=[],this.targets=[],this.scrollHeight=this.getScrollHeight(),a.isWindow(this.$scrollElement[0])||(c="position",d=this.$scrollElement.scrollTop()),this.$body.find(this.selector).map(function(){var b=a(this),e=b.data("target")||b.attr("href"),f=/^#./.test(e)&&a(e);return f&&f.length&&f.is(":visible")&&[[f[c]().top+d,e]]||null}).sort(function(a,b){return a[0]-b[0]}).each(function(){b.offsets.push(this[0]),b.targets.push(this[1])})},b.prototype.process=function(){var a,b=this.$scrollElement.scrollTop()+this.options.offset,c=this.getScrollHeight(),d=this.options.offset+c-this.$scrollElement.height(),e=this.offsets,f=this.targets,g=this.activeTarget;if(this.scrollHeight!=c&&this.refresh(),b>=d)return g!=(a=f[f.length-1])&&this.activate(a);if(g&&b<e[0])return this.activeTarget=null,this.clear();for(a=e.length;a--;)g!=f[a]&&b>=e[a]&&(void 0===e[a+1]||b<e[a+1])&&this.activate(f[a])},b.prototype.activate=function(b){
this.activeTarget=b,this.clear();var c=this.selector+'[data-target="'+b+'"],'+this.selector+'[href="'+b+'"]',d=a(c).parents("li").addClass("active");d.parent(".dropdown-menu").length&&(d=d.closest("li.dropdown").addClass("active")),d.trigger("activate.bs.scrollspy")},b.prototype.clear=function(){a(this.selector).parentsUntil(this.options.target,".active").removeClass("active")};var d=a.fn.scrollspy;a.fn.scrollspy=c,a.fn.scrollspy.Constructor=b,a.fn.scrollspy.noConflict=function(){return a.fn.scrollspy=d,this},a(window).on("load.bs.scrollspy.data-api",function(){a('[data-spy="scroll"]').each(function(){var b=a(this);c.call(b,b.data())})})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.tab");e||d.data("bs.tab",e=new c(this)),"string"==typeof b&&e[b]()})}var c=function(b){this.element=a(b)};c.VERSION="3.3.7",c.TRANSITION_DURATION=150,c.prototype.show=function(){var b=this.element,c=b.closest("ul:not(.dropdown-menu)"),d=b.data("target");if(d||(d=b.attr("href"),d=d&&d.replace(/.*(?=#[^\s]*$)/,"")),!b.parent("li").hasClass("active")){var e=c.find(".active:last a"),f=a.Event("hide.bs.tab",{relatedTarget:b[0]}),g=a.Event("show.bs.tab",{relatedTarget:e[0]});if(e.trigger(f),b.trigger(g),!g.isDefaultPrevented()&&!f.isDefaultPrevented()){var h=a(d);this.activate(b.closest("li"),c),this.activate(h,h.parent(),function(){e.trigger({type:"hidden.bs.tab",relatedTarget:b[0]}),b.trigger({type:"shown.bs.tab",relatedTarget:e[0]})})}}},c.prototype.activate=function(b,d,e){function f(){g.removeClass("active").find("> .dropdown-menu > .active").removeClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!1),b.addClass("active").find('[data-toggle="tab"]').attr("aria-expanded",!0),h?(b[0].offsetWidth,b.addClass("in")):b.removeClass("fade"),b.parent(".dropdown-menu").length&&b.closest("li.dropdown").addClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!0),e&&e()}var g=d.find("> .active"),h=e&&a.support.transition&&(g.length&&g.hasClass("fade")||!!d.find("> .fade").length);g.length&&h?g.one("bsTransitionEnd",f).emulateTransitionEnd(c.TRANSITION_DURATION):f(),g.removeClass("in")};var d=a.fn.tab;a.fn.tab=b,a.fn.tab.Constructor=c,a.fn.tab.noConflict=function(){return a.fn.tab=d,this};var e=function(c){c.preventDefault(),b.call(a(this),"show")};a(document).on("click.bs.tab.data-api",'[data-toggle="tab"]',e).on("click.bs.tab.data-api",'[data-toggle="pill"]',e)}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.affix"),f="object"==typeof b&&b;e||d.data("bs.affix",e=new c(this,f)),"string"==typeof b&&e[b]()})}var c=function(b,d){this.options=a.extend({},c.DEFAULTS,d),this.$target=a(this.options.target).on("scroll.bs.affix.data-api",a.proxy(this.checkPosition,this)).on("click.bs.affix.data-api",a.proxy(this.checkPositionWithEventLoop,this)),this.$element=a(b),this.affixed=null,this.unpin=null,this.pinnedOffset=null,this.checkPosition()};c.VERSION="3.3.7",c.RESET="affix affix-top affix-bottom",c.DEFAULTS={offset:0,target:window},c.prototype.getState=function(a,b,c,d){var e=this.$target.scrollTop(),f=this.$element.offset(),g=this.$target.height();if(null!=c&&"top"==this.affixed)return e<c&&"top";if("bottom"==this.affixed)return null!=c?!(e+this.unpin<=f.top)&&"bottom":!(e+g<=a-d)&&"bottom";var h=null==this.affixed,i=h?e:f.top,j=h?g:b;return null!=c&&e<=c?"top":null!=d&&i+j>=a-d&&"bottom"},c.prototype.getPinnedOffset=function(){if(this.pinnedOffset)return this.pinnedOffset;this.$element.removeClass(c.RESET).addClass("affix");var a=this.$target.scrollTop(),b=this.$element.offset();return this.pinnedOffset=b.top-a},c.prototype.checkPositionWithEventLoop=function(){setTimeout(a.proxy(this.checkPosition,this),1)},c.prototype.checkPosition=function(){if(this.$element.is(":visible")){var b=this.$element.height(),d=this.options.offset,e=d.top,f=d.bottom,g=Math.max(a(document).height(),a(document.body).height());"object"!=typeof d&&(f=e=d),"function"==typeof e&&(e=d.top(this.$element)),"function"==typeof f&&(f=d.bottom(this.$element));var h=this.getState(g,b,e,f);if(this.affixed!=h){null!=this.unpin&&this.$element.css("top","");var i="affix"+(h?"-"+h:""),j=a.Event(i+".bs.affix");if(this.$element.trigger(j),j.isDefaultPrevented())return;this.affixed=h,this.unpin="bottom"==h?this.getPinnedOffset():null,this.$element.removeClass(c.RESET).addClass(i).trigger(i.replace("affix","affixed")+".bs.affix")}"bottom"==h&&this.$element.offset({top:g-b-f})}};var d=a.fn.affix;a.fn.affix=b,a.fn.affix.Constructor=c,a.fn.affix.noConflict=function(){return a.fn.affix=d,this},a(window).on("load",function(){a('[data-spy="affix"]').each(function(){var c=a(this),d=c.data();d.offset=d.offset||{},null!=d.offsetBottom&&(d.offset.bottom=d.offsetBottom),null!=d.offsetTop&&(d.offset.top=d.offsetTop),b.call(c,d)})})}(jQuery); | yummy-sphinx-theme | /yummy_sphinx_theme-0.1.1.tar.gz/yummy_sphinx_theme-0.1.1/yummy_sphinx_theme/static/js/bootstrap.min.js | bootstrap.min.js |
if (typeof jQuery === 'undefined') {
throw new Error('Bootstrap\'s JavaScript requires jQuery')
}
+function ($) {
'use strict';
var version = $.fn.jquery.split(' ')[0].split('.')
if ((version[0] < 2 && version[1] < 9) || (version[0] == 1 && version[1] == 9 && version[2] < 1) || (version[0] > 3)) {
throw new Error('Bootstrap\'s JavaScript requires jQuery version 1.9.1 or higher, but lower than version 4')
}
}(jQuery);
/* ========================================================================
* Bootstrap: transition.js v3.3.7
* http://getbootstrap.com/javascript/#transitions
* ========================================================================
* Copyright 2011-2016 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// CSS TRANSITION SUPPORT (Shoutout: http://www.modernizr.com/)
// ============================================================
function transitionEnd() {
var el = document.createElement('bootstrap')
var transEndEventNames = {
WebkitTransition : 'webkitTransitionEnd',
MozTransition : 'transitionend',
OTransition : 'oTransitionEnd otransitionend',
transition : 'transitionend'
}
for (var name in transEndEventNames) {
if (el.style[name] !== undefined) {
return { end: transEndEventNames[name] }
}
}
return false // explicit for ie8 ( ._.)
}
// http://blog.alexmaccaw.com/css-transitions
$.fn.emulateTransitionEnd = function (duration) {
var called = false
var $el = this
$(this).one('bsTransitionEnd', function () { called = true })
var callback = function () { if (!called) $($el).trigger($.support.transition.end) }
setTimeout(callback, duration)
return this
}
$(function () {
$.support.transition = transitionEnd()
if (!$.support.transition) return
$.event.special.bsTransitionEnd = {
bindType: $.support.transition.end,
delegateType: $.support.transition.end,
handle: function (e) {
if ($(e.target).is(this)) return e.handleObj.handler.apply(this, arguments)
}
}
})
}(jQuery);
/* ========================================================================
* Bootstrap: alert.js v3.3.7
* http://getbootstrap.com/javascript/#alerts
* ========================================================================
* Copyright 2011-2016 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// ALERT CLASS DEFINITION
// ======================
var dismiss = '[data-dismiss="alert"]'
var Alert = function (el) {
$(el).on('click', dismiss, this.close)
}
Alert.VERSION = '3.3.7'
Alert.TRANSITION_DURATION = 150
Alert.prototype.close = function (e) {
var $this = $(this)
var selector = $this.attr('data-target')
if (!selector) {
selector = $this.attr('href')
selector = selector && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7
}
var $parent = $(selector === '#' ? [] : selector)
if (e) e.preventDefault()
if (!$parent.length) {
$parent = $this.closest('.alert')
}
$parent.trigger(e = $.Event('close.bs.alert'))
if (e.isDefaultPrevented()) return
$parent.removeClass('in')
function removeElement() {
// detach from parent, fire event then clean up data
$parent.detach().trigger('closed.bs.alert').remove()
}
$.support.transition && $parent.hasClass('fade') ?
$parent
.one('bsTransitionEnd', removeElement)
.emulateTransitionEnd(Alert.TRANSITION_DURATION) :
removeElement()
}
// ALERT PLUGIN DEFINITION
// =======================
function Plugin(option) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.alert')
if (!data) $this.data('bs.alert', (data = new Alert(this)))
if (typeof option == 'string') data[option].call($this)
})
}
var old = $.fn.alert
$.fn.alert = Plugin
$.fn.alert.Constructor = Alert
// ALERT NO CONFLICT
// =================
$.fn.alert.noConflict = function () {
$.fn.alert = old
return this
}
// ALERT DATA-API
// ==============
$(document).on('click.bs.alert.data-api', dismiss, Alert.prototype.close)
}(jQuery);
/* ========================================================================
* Bootstrap: button.js v3.3.7
* http://getbootstrap.com/javascript/#buttons
* ========================================================================
* Copyright 2011-2016 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// BUTTON PUBLIC CLASS DEFINITION
// ==============================
var Button = function (element, options) {
this.$element = $(element)
this.options = $.extend({}, Button.DEFAULTS, options)
this.isLoading = false
}
Button.VERSION = '3.3.7'
Button.DEFAULTS = {
loadingText: 'loading...'
}
Button.prototype.setState = function (state) {
var d = 'disabled'
var $el = this.$element
var val = $el.is('input') ? 'val' : 'html'
var data = $el.data()
state += 'Text'
if (data.resetText == null) $el.data('resetText', $el[val]())
// push to event loop to allow forms to submit
setTimeout($.proxy(function () {
$el[val](data[state] == null ? this.options[state] : data[state])
if (state == 'loadingText') {
this.isLoading = true
$el.addClass(d).attr(d, d).prop(d, true)
} else if (this.isLoading) {
this.isLoading = false
$el.removeClass(d).removeAttr(d).prop(d, false)
}
}, this), 0)
}
Button.prototype.toggle = function () {
var changed = true
var $parent = this.$element.closest('[data-toggle="buttons"]')
if ($parent.length) {
var $input = this.$element.find('input')
if ($input.prop('type') == 'radio') {
if ($input.prop('checked')) changed = false
$parent.find('.active').removeClass('active')
this.$element.addClass('active')
} else if ($input.prop('type') == 'checkbox') {
if (($input.prop('checked')) !== this.$element.hasClass('active')) changed = false
this.$element.toggleClass('active')
}
$input.prop('checked', this.$element.hasClass('active'))
if (changed) $input.trigger('change')
} else {
this.$element.attr('aria-pressed', !this.$element.hasClass('active'))
this.$element.toggleClass('active')
}
}
// BUTTON PLUGIN DEFINITION
// ========================
function Plugin(option) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.button')
var options = typeof option == 'object' && option
if (!data) $this.data('bs.button', (data = new Button(this, options)))
if (option == 'toggle') data.toggle()
else if (option) data.setState(option)
})
}
var old = $.fn.button
$.fn.button = Plugin
$.fn.button.Constructor = Button
// BUTTON NO CONFLICT
// ==================
$.fn.button.noConflict = function () {
$.fn.button = old
return this
}
// BUTTON DATA-API
// ===============
$(document)
.on('click.bs.button.data-api', '[data-toggle^="button"]', function (e) {
var $btn = $(e.target).closest('.btn')
Plugin.call($btn, 'toggle')
if (!($(e.target).is('input[type="radio"], input[type="checkbox"]'))) {
// Prevent double click on radios, and the double selections (so cancellation) on checkboxes
e.preventDefault()
// The target component still receive the focus
if ($btn.is('input,button')) $btn.trigger('focus')
else $btn.find('input:visible,button:visible').first().trigger('focus')
}
})
.on('focus.bs.button.data-api blur.bs.button.data-api', '[data-toggle^="button"]', function (e) {
$(e.target).closest('.btn').toggleClass('focus', /^focus(in)?$/.test(e.type))
})
}(jQuery);
/* ========================================================================
* Bootstrap: carousel.js v3.3.7
* http://getbootstrap.com/javascript/#carousel
* ========================================================================
* Copyright 2011-2016 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// CAROUSEL CLASS DEFINITION
// =========================
var Carousel = function (element, options) {
this.$element = $(element)
this.$indicators = this.$element.find('.carousel-indicators')
this.options = options
this.paused = null
this.sliding = null
this.interval = null
this.$active = null
this.$items = null
this.options.keyboard && this.$element.on('keydown.bs.carousel', $.proxy(this.keydown, this))
this.options.pause == 'hover' && !('ontouchstart' in document.documentElement) && this.$element
.on('mouseenter.bs.carousel', $.proxy(this.pause, this))
.on('mouseleave.bs.carousel', $.proxy(this.cycle, this))
}
Carousel.VERSION = '3.3.7'
Carousel.TRANSITION_DURATION = 600
Carousel.DEFAULTS = {
interval: 5000,
pause: 'hover',
wrap: true,
keyboard: true
}
Carousel.prototype.keydown = function (e) {
if (/input|textarea/i.test(e.target.tagName)) return
switch (e.which) {
case 37: this.prev(); break
case 39: this.next(); break
default: return
}
e.preventDefault()
}
Carousel.prototype.cycle = function (e) {
e || (this.paused = false)
this.interval && clearInterval(this.interval)
this.options.interval
&& !this.paused
&& (this.interval = setInterval($.proxy(this.next, this), this.options.interval))
return this
}
Carousel.prototype.getItemIndex = function (item) {
this.$items = item.parent().children('.item')
return this.$items.index(item || this.$active)
}
Carousel.prototype.getItemForDirection = function (direction, active) {
var activeIndex = this.getItemIndex(active)
var willWrap = (direction == 'prev' && activeIndex === 0)
|| (direction == 'next' && activeIndex == (this.$items.length - 1))
if (willWrap && !this.options.wrap) return active
var delta = direction == 'prev' ? -1 : 1
var itemIndex = (activeIndex + delta) % this.$items.length
return this.$items.eq(itemIndex)
}
Carousel.prototype.to = function (pos) {
var that = this
var activeIndex = this.getItemIndex(this.$active = this.$element.find('.item.active'))
if (pos > (this.$items.length - 1) || pos < 0) return
if (this.sliding) return this.$element.one('slid.bs.carousel', function () { that.to(pos) }) // yes, "slid"
if (activeIndex == pos) return this.pause().cycle()
return this.slide(pos > activeIndex ? 'next' : 'prev', this.$items.eq(pos))
}
Carousel.prototype.pause = function (e) {
e || (this.paused = true)
if (this.$element.find('.next, .prev').length && $.support.transition) {
this.$element.trigger($.support.transition.end)
this.cycle(true)
}
this.interval = clearInterval(this.interval)
return this
}
Carousel.prototype.next = function () {
if (this.sliding) return
return this.slide('next')
}
Carousel.prototype.prev = function () {
if (this.sliding) return
return this.slide('prev')
}
Carousel.prototype.slide = function (type, next) {
var $active = this.$element.find('.item.active')
var $next = next || this.getItemForDirection(type, $active)
var isCycling = this.interval
var direction = type == 'next' ? 'left' : 'right'
var that = this
if ($next.hasClass('active')) return (this.sliding = false)
var relatedTarget = $next[0]
var slideEvent = $.Event('slide.bs.carousel', {
relatedTarget: relatedTarget,
direction: direction
})
this.$element.trigger(slideEvent)
if (slideEvent.isDefaultPrevented()) return
this.sliding = true
isCycling && this.pause()
if (this.$indicators.length) {
this.$indicators.find('.active').removeClass('active')
var $nextIndicator = $(this.$indicators.children()[this.getItemIndex($next)])
$nextIndicator && $nextIndicator.addClass('active')
}
var slidEvent = $.Event('slid.bs.carousel', { relatedTarget: relatedTarget, direction: direction }) // yes, "slid"
if ($.support.transition && this.$element.hasClass('slide')) {
$next.addClass(type)
$next[0].offsetWidth // force reflow
$active.addClass(direction)
$next.addClass(direction)
$active
.one('bsTransitionEnd', function () {
$next.removeClass([type, direction].join(' ')).addClass('active')
$active.removeClass(['active', direction].join(' '))
that.sliding = false
setTimeout(function () {
that.$element.trigger(slidEvent)
}, 0)
})
.emulateTransitionEnd(Carousel.TRANSITION_DURATION)
} else {
$active.removeClass('active')
$next.addClass('active')
this.sliding = false
this.$element.trigger(slidEvent)
}
isCycling && this.cycle()
return this
}
// CAROUSEL PLUGIN DEFINITION
// ==========================
function Plugin(option) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.carousel')
var options = $.extend({}, Carousel.DEFAULTS, $this.data(), typeof option == 'object' && option)
var action = typeof option == 'string' ? option : options.slide
if (!data) $this.data('bs.carousel', (data = new Carousel(this, options)))
if (typeof option == 'number') data.to(option)
else if (action) data[action]()
else if (options.interval) data.pause().cycle()
})
}
var old = $.fn.carousel
$.fn.carousel = Plugin
$.fn.carousel.Constructor = Carousel
// CAROUSEL NO CONFLICT
// ====================
$.fn.carousel.noConflict = function () {
$.fn.carousel = old
return this
}
// CAROUSEL DATA-API
// =================
var clickHandler = function (e) {
var href
var $this = $(this)
var $target = $($this.attr('data-target') || (href = $this.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '')) // strip for ie7
if (!$target.hasClass('carousel')) return
var options = $.extend({}, $target.data(), $this.data())
var slideIndex = $this.attr('data-slide-to')
if (slideIndex) options.interval = false
Plugin.call($target, options)
if (slideIndex) {
$target.data('bs.carousel').to(slideIndex)
}
e.preventDefault()
}
$(document)
.on('click.bs.carousel.data-api', '[data-slide]', clickHandler)
.on('click.bs.carousel.data-api', '[data-slide-to]', clickHandler)
$(window).on('load', function () {
$('[data-ride="carousel"]').each(function () {
var $carousel = $(this)
Plugin.call($carousel, $carousel.data())
})
})
}(jQuery);
/* ========================================================================
* Bootstrap: collapse.js v3.3.7
* http://getbootstrap.com/javascript/#collapse
* ========================================================================
* Copyright 2011-2016 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
/* jshint latedef: false */
+function ($) {
'use strict';
// COLLAPSE PUBLIC CLASS DEFINITION
// ================================
var Collapse = function (element, options) {
this.$element = $(element)
this.options = $.extend({}, Collapse.DEFAULTS, options)
this.$trigger = $('[data-toggle="collapse"][href="#' + element.id + '"],' +
'[data-toggle="collapse"][data-target="#' + element.id + '"]')
this.transitioning = null
if (this.options.parent) {
this.$parent = this.getParent()
} else {
this.addAriaAndCollapsedClass(this.$element, this.$trigger)
}
if (this.options.toggle) this.toggle()
}
Collapse.VERSION = '3.3.7'
Collapse.TRANSITION_DURATION = 350
Collapse.DEFAULTS = {
toggle: true
}
Collapse.prototype.dimension = function () {
var hasWidth = this.$element.hasClass('width')
return hasWidth ? 'width' : 'height'
}
Collapse.prototype.show = function () {
if (this.transitioning || this.$element.hasClass('in')) return
var activesData
var actives = this.$parent && this.$parent.children('.panel').children('.in, .collapsing')
if (actives && actives.length) {
activesData = actives.data('bs.collapse')
if (activesData && activesData.transitioning) return
}
var startEvent = $.Event('show.bs.collapse')
this.$element.trigger(startEvent)
if (startEvent.isDefaultPrevented()) return
if (actives && actives.length) {
Plugin.call(actives, 'hide')
activesData || actives.data('bs.collapse', null)
}
var dimension = this.dimension()
this.$element
.removeClass('collapse')
.addClass('collapsing')[dimension](0)
.attr('aria-expanded', true)
this.$trigger
.removeClass('collapsed')
.attr('aria-expanded', true)
this.transitioning = 1
var complete = function () {
this.$element
.removeClass('collapsing')
.addClass('collapse in')[dimension]('')
this.transitioning = 0
this.$element
.trigger('shown.bs.collapse')
}
if (!$.support.transition) return complete.call(this)
var scrollSize = $.camelCase(['scroll', dimension].join('-'))
this.$element
.one('bsTransitionEnd', $.proxy(complete, this))
.emulateTransitionEnd(Collapse.TRANSITION_DURATION)[dimension](this.$element[0][scrollSize])
}
Collapse.prototype.hide = function () {
if (this.transitioning || !this.$element.hasClass('in')) return
var startEvent = $.Event('hide.bs.collapse')
this.$element.trigger(startEvent)
if (startEvent.isDefaultPrevented()) return
var dimension = this.dimension()
this.$element[dimension](this.$element[dimension]())[0].offsetHeight
this.$element
.addClass('collapsing')
.removeClass('collapse in')
.attr('aria-expanded', false)
this.$trigger
.addClass('collapsed')
.attr('aria-expanded', false)
this.transitioning = 1
var complete = function () {
this.transitioning = 0
this.$element
.removeClass('collapsing')
.addClass('collapse')
.trigger('hidden.bs.collapse')
}
if (!$.support.transition) return complete.call(this)
this.$element
[dimension](0)
.one('bsTransitionEnd', $.proxy(complete, this))
.emulateTransitionEnd(Collapse.TRANSITION_DURATION)
}
Collapse.prototype.toggle = function () {
this[this.$element.hasClass('in') ? 'hide' : 'show']()
}
Collapse.prototype.getParent = function () {
return $(this.options.parent)
.find('[data-toggle="collapse"][data-parent="' + this.options.parent + '"]')
.each($.proxy(function (i, element) {
var $element = $(element)
this.addAriaAndCollapsedClass(getTargetFromTrigger($element), $element)
}, this))
.end()
}
Collapse.prototype.addAriaAndCollapsedClass = function ($element, $trigger) {
var isOpen = $element.hasClass('in')
$element.attr('aria-expanded', isOpen)
$trigger
.toggleClass('collapsed', !isOpen)
.attr('aria-expanded', isOpen)
}
function getTargetFromTrigger($trigger) {
var href
var target = $trigger.attr('data-target')
|| (href = $trigger.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '') // strip for ie7
return $(target)
}
// COLLAPSE PLUGIN DEFINITION
// ==========================
function Plugin(option) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.collapse')
var options = $.extend({}, Collapse.DEFAULTS, $this.data(), typeof option == 'object' && option)
if (!data && options.toggle && /show|hide/.test(option)) options.toggle = false
if (!data) $this.data('bs.collapse', (data = new Collapse(this, options)))
if (typeof option == 'string') data[option]()
})
}
var old = $.fn.collapse
$.fn.collapse = Plugin
$.fn.collapse.Constructor = Collapse
// COLLAPSE NO CONFLICT
// ====================
$.fn.collapse.noConflict = function () {
$.fn.collapse = old
return this
}
// COLLAPSE DATA-API
// =================
$(document).on('click.bs.collapse.data-api', '[data-toggle="collapse"]', function (e) {
var $this = $(this)
if (!$this.attr('data-target')) e.preventDefault()
var $target = getTargetFromTrigger($this)
var data = $target.data('bs.collapse')
var option = data ? 'toggle' : $this.data()
Plugin.call($target, option)
})
}(jQuery);
/* ========================================================================
* Bootstrap: dropdown.js v3.3.7
* http://getbootstrap.com/javascript/#dropdowns
* ========================================================================
* Copyright 2011-2016 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// DROPDOWN CLASS DEFINITION
// =========================
var backdrop = '.dropdown-backdrop'
var toggle = '[data-toggle="dropdown"]'
var Dropdown = function (element) {
$(element).on('click.bs.dropdown', this.toggle)
}
Dropdown.VERSION = '3.3.7'
function getParent($this) {
var selector = $this.attr('data-target')
if (!selector) {
selector = $this.attr('href')
selector = selector && /#[A-Za-z]/.test(selector) && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7
}
var $parent = selector && $(selector)
return $parent && $parent.length ? $parent : $this.parent()
}
function clearMenus(e) {
if (e && e.which === 3) return
$(backdrop).remove()
$(toggle).each(function () {
var $this = $(this)
var $parent = getParent($this)
var relatedTarget = { relatedTarget: this }
if (!$parent.hasClass('open')) return
if (e && e.type == 'click' && /input|textarea/i.test(e.target.tagName) && $.contains($parent[0], e.target)) return
$parent.trigger(e = $.Event('hide.bs.dropdown', relatedTarget))
if (e.isDefaultPrevented()) return
$this.attr('aria-expanded', 'false')
$parent.removeClass('open').trigger($.Event('hidden.bs.dropdown', relatedTarget))
})
}
Dropdown.prototype.toggle = function (e) {
var $this = $(this)
if ($this.is('.disabled, :disabled')) return
var $parent = getParent($this)
var isActive = $parent.hasClass('open')
clearMenus()
if (!isActive) {
if ('ontouchstart' in document.documentElement && !$parent.closest('.navbar-nav').length) {
// if mobile we use a backdrop because click events don't delegate
$(document.createElement('div'))
.addClass('dropdown-backdrop')
.insertAfter($(this))
.on('click', clearMenus)
}
var relatedTarget = { relatedTarget: this }
$parent.trigger(e = $.Event('show.bs.dropdown', relatedTarget))
if (e.isDefaultPrevented()) return
$this
.trigger('focus')
.attr('aria-expanded', 'true')
$parent
.toggleClass('open')
.trigger($.Event('shown.bs.dropdown', relatedTarget))
}
return false
}
Dropdown.prototype.keydown = function (e) {
if (!/(38|40|27|32)/.test(e.which) || /input|textarea/i.test(e.target.tagName)) return
var $this = $(this)
e.preventDefault()
e.stopPropagation()
if ($this.is('.disabled, :disabled')) return
var $parent = getParent($this)
var isActive = $parent.hasClass('open')
if (!isActive && e.which != 27 || isActive && e.which == 27) {
if (e.which == 27) $parent.find(toggle).trigger('focus')
return $this.trigger('click')
}
var desc = ' li:not(.disabled):visible a'
var $items = $parent.find('.dropdown-menu' + desc)
if (!$items.length) return
var index = $items.index(e.target)
if (e.which == 38 && index > 0) index-- // up
if (e.which == 40 && index < $items.length - 1) index++ // down
if (!~index) index = 0
$items.eq(index).trigger('focus')
}
// DROPDOWN PLUGIN DEFINITION
// ==========================
function Plugin(option) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.dropdown')
if (!data) $this.data('bs.dropdown', (data = new Dropdown(this)))
if (typeof option == 'string') data[option].call($this)
})
}
var old = $.fn.dropdown
$.fn.dropdown = Plugin
$.fn.dropdown.Constructor = Dropdown
// DROPDOWN NO CONFLICT
// ====================
$.fn.dropdown.noConflict = function () {
$.fn.dropdown = old
return this
}
// APPLY TO STANDARD DROPDOWN ELEMENTS
// ===================================
$(document)
.on('click.bs.dropdown.data-api', clearMenus)
.on('click.bs.dropdown.data-api', '.dropdown form', function (e) { e.stopPropagation() })
.on('click.bs.dropdown.data-api', toggle, Dropdown.prototype.toggle)
.on('keydown.bs.dropdown.data-api', toggle, Dropdown.prototype.keydown)
.on('keydown.bs.dropdown.data-api', '.dropdown-menu', Dropdown.prototype.keydown)
}(jQuery);
/* ========================================================================
* Bootstrap: modal.js v3.3.7
* http://getbootstrap.com/javascript/#modals
* ========================================================================
* Copyright 2011-2016 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// MODAL CLASS DEFINITION
// ======================
var Modal = function (element, options) {
this.options = options
this.$body = $(document.body)
this.$element = $(element)
this.$dialog = this.$element.find('.modal-dialog')
this.$backdrop = null
this.isShown = null
this.originalBodyPad = null
this.scrollbarWidth = 0
this.ignoreBackdropClick = false
if (this.options.remote) {
this.$element
.find('.modal-content')
.load(this.options.remote, $.proxy(function () {
this.$element.trigger('loaded.bs.modal')
}, this))
}
}
Modal.VERSION = '3.3.7'
Modal.TRANSITION_DURATION = 300
Modal.BACKDROP_TRANSITION_DURATION = 150
Modal.DEFAULTS = {
backdrop: true,
keyboard: true,
show: true
}
Modal.prototype.toggle = function (_relatedTarget) {
return this.isShown ? this.hide() : this.show(_relatedTarget)
}
Modal.prototype.show = function (_relatedTarget) {
var that = this
var e = $.Event('show.bs.modal', { relatedTarget: _relatedTarget })
this.$element.trigger(e)
if (this.isShown || e.isDefaultPrevented()) return
this.isShown = true
this.checkScrollbar()
this.setScrollbar()
this.$body.addClass('modal-open')
this.escape()
this.resize()
this.$element.on('click.dismiss.bs.modal', '[data-dismiss="modal"]', $.proxy(this.hide, this))
this.$dialog.on('mousedown.dismiss.bs.modal', function () {
that.$element.one('mouseup.dismiss.bs.modal', function (e) {
if ($(e.target).is(that.$element)) that.ignoreBackdropClick = true
})
})
this.backdrop(function () {
var transition = $.support.transition && that.$element.hasClass('fade')
if (!that.$element.parent().length) {
that.$element.appendTo(that.$body) // don't move modals dom position
}
that.$element
.show()
.scrollTop(0)
that.adjustDialog()
if (transition) {
that.$element[0].offsetWidth // force reflow
}
that.$element.addClass('in')
that.enforceFocus()
var e = $.Event('shown.bs.modal', { relatedTarget: _relatedTarget })
transition ?
that.$dialog // wait for modal to slide in
.one('bsTransitionEnd', function () {
that.$element.trigger('focus').trigger(e)
})
.emulateTransitionEnd(Modal.TRANSITION_DURATION) :
that.$element.trigger('focus').trigger(e)
})
}
Modal.prototype.hide = function (e) {
if (e) e.preventDefault()
e = $.Event('hide.bs.modal')
this.$element.trigger(e)
if (!this.isShown || e.isDefaultPrevented()) return
this.isShown = false
this.escape()
this.resize()
$(document).off('focusin.bs.modal')
this.$element
.removeClass('in')
.off('click.dismiss.bs.modal')
.off('mouseup.dismiss.bs.modal')
this.$dialog.off('mousedown.dismiss.bs.modal')
$.support.transition && this.$element.hasClass('fade') ?
this.$element
.one('bsTransitionEnd', $.proxy(this.hideModal, this))
.emulateTransitionEnd(Modal.TRANSITION_DURATION) :
this.hideModal()
}
Modal.prototype.enforceFocus = function () {
$(document)
.off('focusin.bs.modal') // guard against infinite focus loop
.on('focusin.bs.modal', $.proxy(function (e) {
if (document !== e.target &&
this.$element[0] !== e.target &&
!this.$element.has(e.target).length) {
this.$element.trigger('focus')
}
}, this))
}
Modal.prototype.escape = function () {
if (this.isShown && this.options.keyboard) {
this.$element.on('keydown.dismiss.bs.modal', $.proxy(function (e) {
e.which == 27 && this.hide()
}, this))
} else if (!this.isShown) {
this.$element.off('keydown.dismiss.bs.modal')
}
}
Modal.prototype.resize = function () {
if (this.isShown) {
$(window).on('resize.bs.modal', $.proxy(this.handleUpdate, this))
} else {
$(window).off('resize.bs.modal')
}
}
Modal.prototype.hideModal = function () {
var that = this
this.$element.hide()
this.backdrop(function () {
that.$body.removeClass('modal-open')
that.resetAdjustments()
that.resetScrollbar()
that.$element.trigger('hidden.bs.modal')
})
}
Modal.prototype.removeBackdrop = function () {
this.$backdrop && this.$backdrop.remove()
this.$backdrop = null
}
Modal.prototype.backdrop = function (callback) {
var that = this
var animate = this.$element.hasClass('fade') ? 'fade' : ''
if (this.isShown && this.options.backdrop) {
var doAnimate = $.support.transition && animate
this.$backdrop = $(document.createElement('div'))
.addClass('modal-backdrop ' + animate)
.appendTo(this.$body)
this.$element.on('click.dismiss.bs.modal', $.proxy(function (e) {
if (this.ignoreBackdropClick) {
this.ignoreBackdropClick = false
return
}
if (e.target !== e.currentTarget) return
this.options.backdrop == 'static'
? this.$element[0].focus()
: this.hide()
}, this))
if (doAnimate) this.$backdrop[0].offsetWidth // force reflow
this.$backdrop.addClass('in')
if (!callback) return
doAnimate ?
this.$backdrop
.one('bsTransitionEnd', callback)
.emulateTransitionEnd(Modal.BACKDROP_TRANSITION_DURATION) :
callback()
} else if (!this.isShown && this.$backdrop) {
this.$backdrop.removeClass('in')
var callbackRemove = function () {
that.removeBackdrop()
callback && callback()
}
$.support.transition && this.$element.hasClass('fade') ?
this.$backdrop
.one('bsTransitionEnd', callbackRemove)
.emulateTransitionEnd(Modal.BACKDROP_TRANSITION_DURATION) :
callbackRemove()
} else if (callback) {
callback()
}
}
// these following methods are used to handle overflowing modals
Modal.prototype.handleUpdate = function () {
this.adjustDialog()
}
Modal.prototype.adjustDialog = function () {
var modalIsOverflowing = this.$element[0].scrollHeight > document.documentElement.clientHeight
this.$element.css({
paddingLeft: !this.bodyIsOverflowing && modalIsOverflowing ? this.scrollbarWidth : '',
paddingRight: this.bodyIsOverflowing && !modalIsOverflowing ? this.scrollbarWidth : ''
})
}
Modal.prototype.resetAdjustments = function () {
this.$element.css({
paddingLeft: '',
paddingRight: ''
})
}
Modal.prototype.checkScrollbar = function () {
var fullWindowWidth = window.innerWidth
if (!fullWindowWidth) { // workaround for missing window.innerWidth in IE8
var documentElementRect = document.documentElement.getBoundingClientRect()
fullWindowWidth = documentElementRect.right - Math.abs(documentElementRect.left)
}
this.bodyIsOverflowing = document.body.clientWidth < fullWindowWidth
this.scrollbarWidth = this.measureScrollbar()
}
Modal.prototype.setScrollbar = function () {
var bodyPad = parseInt((this.$body.css('padding-right') || 0), 10)
this.originalBodyPad = document.body.style.paddingRight || ''
if (this.bodyIsOverflowing) this.$body.css('padding-right', bodyPad + this.scrollbarWidth)
}
Modal.prototype.resetScrollbar = function () {
this.$body.css('padding-right', this.originalBodyPad)
}
Modal.prototype.measureScrollbar = function () { // thx walsh
var scrollDiv = document.createElement('div')
scrollDiv.className = 'modal-scrollbar-measure'
this.$body.append(scrollDiv)
var scrollbarWidth = scrollDiv.offsetWidth - scrollDiv.clientWidth
this.$body[0].removeChild(scrollDiv)
return scrollbarWidth
}
// MODAL PLUGIN DEFINITION
// =======================
function Plugin(option, _relatedTarget) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.modal')
var options = $.extend({}, Modal.DEFAULTS, $this.data(), typeof option == 'object' && option)
if (!data) $this.data('bs.modal', (data = new Modal(this, options)))
if (typeof option == 'string') data[option](_relatedTarget)
else if (options.show) data.show(_relatedTarget)
})
}
var old = $.fn.modal
$.fn.modal = Plugin
$.fn.modal.Constructor = Modal
// MODAL NO CONFLICT
// =================
$.fn.modal.noConflict = function () {
$.fn.modal = old
return this
}
// MODAL DATA-API
// ==============
$(document).on('click.bs.modal.data-api', '[data-toggle="modal"]', function (e) {
var $this = $(this)
var href = $this.attr('href')
var $target = $($this.attr('data-target') || (href && href.replace(/.*(?=#[^\s]+$)/, ''))) // strip for ie7
var option = $target.data('bs.modal') ? 'toggle' : $.extend({ remote: !/#/.test(href) && href }, $target.data(), $this.data())
if ($this.is('a')) e.preventDefault()
$target.one('show.bs.modal', function (showEvent) {
if (showEvent.isDefaultPrevented()) return // only register focus restorer if modal will actually get shown
$target.one('hidden.bs.modal', function () {
$this.is(':visible') && $this.trigger('focus')
})
})
Plugin.call($target, option, this)
})
}(jQuery);
/* ========================================================================
* Bootstrap: tooltip.js v3.3.7
* http://getbootstrap.com/javascript/#tooltip
* Inspired by the original jQuery.tipsy by Jason Frame
* ========================================================================
* Copyright 2011-2016 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// TOOLTIP PUBLIC CLASS DEFINITION
// ===============================
var Tooltip = function (element, options) {
this.type = null
this.options = null
this.enabled = null
this.timeout = null
this.hoverState = null
this.$element = null
this.inState = null
this.init('tooltip', element, options)
}
Tooltip.VERSION = '3.3.7'
Tooltip.TRANSITION_DURATION = 150
Tooltip.DEFAULTS = {
animation: true,
placement: 'top',
selector: false,
template: '<div class="tooltip" role="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>',
trigger: 'hover focus',
title: '',
delay: 0,
html: false,
container: false,
viewport: {
selector: 'body',
padding: 0
}
}
Tooltip.prototype.init = function (type, element, options) {
this.enabled = true
this.type = type
this.$element = $(element)
this.options = this.getOptions(options)
this.$viewport = this.options.viewport && $($.isFunction(this.options.viewport) ? this.options.viewport.call(this, this.$element) : (this.options.viewport.selector || this.options.viewport))
this.inState = { click: false, hover: false, focus: false }
if (this.$element[0] instanceof document.constructor && !this.options.selector) {
throw new Error('`selector` option must be specified when initializing ' + this.type + ' on the window.document object!')
}
var triggers = this.options.trigger.split(' ')
for (var i = triggers.length; i--;) {
var trigger = triggers[i]
if (trigger == 'click') {
this.$element.on('click.' + this.type, this.options.selector, $.proxy(this.toggle, this))
} else if (trigger != 'manual') {
var eventIn = trigger == 'hover' ? 'mouseenter' : 'focusin'
var eventOut = trigger == 'hover' ? 'mouseleave' : 'focusout'
this.$element.on(eventIn + '.' + this.type, this.options.selector, $.proxy(this.enter, this))
this.$element.on(eventOut + '.' + this.type, this.options.selector, $.proxy(this.leave, this))
}
}
this.options.selector ?
(this._options = $.extend({}, this.options, { trigger: 'manual', selector: '' })) :
this.fixTitle()
}
Tooltip.prototype.getDefaults = function () {
return Tooltip.DEFAULTS
}
Tooltip.prototype.getOptions = function (options) {
options = $.extend({}, this.getDefaults(), this.$element.data(), options)
if (options.delay && typeof options.delay == 'number') {
options.delay = {
show: options.delay,
hide: options.delay
}
}
return options
}
Tooltip.prototype.getDelegateOptions = function () {
var options = {}
var defaults = this.getDefaults()
this._options && $.each(this._options, function (key, value) {
if (defaults[key] != value) options[key] = value
})
return options
}
Tooltip.prototype.enter = function (obj) {
var self = obj instanceof this.constructor ?
obj : $(obj.currentTarget).data('bs.' + this.type)
if (!self) {
self = new this.constructor(obj.currentTarget, this.getDelegateOptions())
$(obj.currentTarget).data('bs.' + this.type, self)
}
if (obj instanceof $.Event) {
self.inState[obj.type == 'focusin' ? 'focus' : 'hover'] = true
}
if (self.tip().hasClass('in') || self.hoverState == 'in') {
self.hoverState = 'in'
return
}
clearTimeout(self.timeout)
self.hoverState = 'in'
if (!self.options.delay || !self.options.delay.show) return self.show()
self.timeout = setTimeout(function () {
if (self.hoverState == 'in') self.show()
}, self.options.delay.show)
}
Tooltip.prototype.isInStateTrue = function () {
for (var key in this.inState) {
if (this.inState[key]) return true
}
return false
}
Tooltip.prototype.leave = function (obj) {
var self = obj instanceof this.constructor ?
obj : $(obj.currentTarget).data('bs.' + this.type)
if (!self) {
self = new this.constructor(obj.currentTarget, this.getDelegateOptions())
$(obj.currentTarget).data('bs.' + this.type, self)
}
if (obj instanceof $.Event) {
self.inState[obj.type == 'focusout' ? 'focus' : 'hover'] = false
}
if (self.isInStateTrue()) return
clearTimeout(self.timeout)
self.hoverState = 'out'
if (!self.options.delay || !self.options.delay.hide) return self.hide()
self.timeout = setTimeout(function () {
if (self.hoverState == 'out') self.hide()
}, self.options.delay.hide)
}
Tooltip.prototype.show = function () {
var e = $.Event('show.bs.' + this.type)
if (this.hasContent() && this.enabled) {
this.$element.trigger(e)
var inDom = $.contains(this.$element[0].ownerDocument.documentElement, this.$element[0])
if (e.isDefaultPrevented() || !inDom) return
var that = this
var $tip = this.tip()
var tipId = this.getUID(this.type)
this.setContent()
$tip.attr('id', tipId)
this.$element.attr('aria-describedby', tipId)
if (this.options.animation) $tip.addClass('fade')
var placement = typeof this.options.placement == 'function' ?
this.options.placement.call(this, $tip[0], this.$element[0]) :
this.options.placement
var autoToken = /\s?auto?\s?/i
var autoPlace = autoToken.test(placement)
if (autoPlace) placement = placement.replace(autoToken, '') || 'top'
$tip
.detach()
.css({ top: 0, left: 0, display: 'block' })
.addClass(placement)
.data('bs.' + this.type, this)
this.options.container ? $tip.appendTo(this.options.container) : $tip.insertAfter(this.$element)
this.$element.trigger('inserted.bs.' + this.type)
var pos = this.getPosition()
var actualWidth = $tip[0].offsetWidth
var actualHeight = $tip[0].offsetHeight
if (autoPlace) {
var orgPlacement = placement
var viewportDim = this.getPosition(this.$viewport)
placement = placement == 'bottom' && pos.bottom + actualHeight > viewportDim.bottom ? 'top' :
placement == 'top' && pos.top - actualHeight < viewportDim.top ? 'bottom' :
placement == 'right' && pos.right + actualWidth > viewportDim.width ? 'left' :
placement == 'left' && pos.left - actualWidth < viewportDim.left ? 'right' :
placement
$tip
.removeClass(orgPlacement)
.addClass(placement)
}
var calculatedOffset = this.getCalculatedOffset(placement, pos, actualWidth, actualHeight)
this.applyPlacement(calculatedOffset, placement)
var complete = function () {
var prevHoverState = that.hoverState
that.$element.trigger('shown.bs.' + that.type)
that.hoverState = null
if (prevHoverState == 'out') that.leave(that)
}
$.support.transition && this.$tip.hasClass('fade') ?
$tip
.one('bsTransitionEnd', complete)
.emulateTransitionEnd(Tooltip.TRANSITION_DURATION) :
complete()
}
}
Tooltip.prototype.applyPlacement = function (offset, placement) {
var $tip = this.tip()
var width = $tip[0].offsetWidth
var height = $tip[0].offsetHeight
// manually read margins because getBoundingClientRect includes difference
var marginTop = parseInt($tip.css('margin-top'), 10)
var marginLeft = parseInt($tip.css('margin-left'), 10)
// we must check for NaN for ie 8/9
if (isNaN(marginTop)) marginTop = 0
if (isNaN(marginLeft)) marginLeft = 0
offset.top += marginTop
offset.left += marginLeft
// $.fn.offset doesn't round pixel values
// so we use setOffset directly with our own function B-0
$.offset.setOffset($tip[0], $.extend({
using: function (props) {
$tip.css({
top: Math.round(props.top),
left: Math.round(props.left)
})
}
}, offset), 0)
$tip.addClass('in')
// check to see if placing tip in new offset caused the tip to resize itself
var actualWidth = $tip[0].offsetWidth
var actualHeight = $tip[0].offsetHeight
if (placement == 'top' && actualHeight != height) {
offset.top = offset.top + height - actualHeight
}
var delta = this.getViewportAdjustedDelta(placement, offset, actualWidth, actualHeight)
if (delta.left) offset.left += delta.left
else offset.top += delta.top
var isVertical = /top|bottom/.test(placement)
var arrowDelta = isVertical ? delta.left * 2 - width + actualWidth : delta.top * 2 - height + actualHeight
var arrowOffsetPosition = isVertical ? 'offsetWidth' : 'offsetHeight'
$tip.offset(offset)
this.replaceArrow(arrowDelta, $tip[0][arrowOffsetPosition], isVertical)
}
Tooltip.prototype.replaceArrow = function (delta, dimension, isVertical) {
this.arrow()
.css(isVertical ? 'left' : 'top', 50 * (1 - delta / dimension) + '%')
.css(isVertical ? 'top' : 'left', '')
}
Tooltip.prototype.setContent = function () {
var $tip = this.tip()
var title = this.getTitle()
$tip.find('.tooltip-inner')[this.options.html ? 'html' : 'text'](title)
$tip.removeClass('fade in top bottom left right')
}
Tooltip.prototype.hide = function (callback) {
var that = this
var $tip = $(this.$tip)
var e = $.Event('hide.bs.' + this.type)
function complete() {
if (that.hoverState != 'in') $tip.detach()
if (that.$element) { // TODO: Check whether guarding this code with this `if` is really necessary.
that.$element
.removeAttr('aria-describedby')
.trigger('hidden.bs.' + that.type)
}
callback && callback()
}
this.$element.trigger(e)
if (e.isDefaultPrevented()) return
$tip.removeClass('in')
$.support.transition && $tip.hasClass('fade') ?
$tip
.one('bsTransitionEnd', complete)
.emulateTransitionEnd(Tooltip.TRANSITION_DURATION) :
complete()
this.hoverState = null
return this
}
Tooltip.prototype.fixTitle = function () {
var $e = this.$element
if ($e.attr('title') || typeof $e.attr('data-original-title') != 'string') {
$e.attr('data-original-title', $e.attr('title') || '').attr('title', '')
}
}
Tooltip.prototype.hasContent = function () {
return this.getTitle()
}
Tooltip.prototype.getPosition = function ($element) {
$element = $element || this.$element
var el = $element[0]
var isBody = el.tagName == 'BODY'
var elRect = el.getBoundingClientRect()
if (elRect.width == null) {
// width and height are missing in IE8, so compute them manually; see https://github.com/twbs/bootstrap/issues/14093
elRect = $.extend({}, elRect, { width: elRect.right - elRect.left, height: elRect.bottom - elRect.top })
}
var isSvg = window.SVGElement && el instanceof window.SVGElement
// Avoid using $.offset() on SVGs since it gives incorrect results in jQuery 3.
// See https://github.com/twbs/bootstrap/issues/20280
var elOffset = isBody ? { top: 0, left: 0 } : (isSvg ? null : $element.offset())
var scroll = { scroll: isBody ? document.documentElement.scrollTop || document.body.scrollTop : $element.scrollTop() }
var outerDims = isBody ? { width: $(window).width(), height: $(window).height() } : null
return $.extend({}, elRect, scroll, outerDims, elOffset)
}
Tooltip.prototype.getCalculatedOffset = function (placement, pos, actualWidth, actualHeight) {
return placement == 'bottom' ? { top: pos.top + pos.height, left: pos.left + pos.width / 2 - actualWidth / 2 } :
placement == 'top' ? { top: pos.top - actualHeight, left: pos.left + pos.width / 2 - actualWidth / 2 } :
placement == 'left' ? { top: pos.top + pos.height / 2 - actualHeight / 2, left: pos.left - actualWidth } :
/* placement == 'right' */ { top: pos.top + pos.height / 2 - actualHeight / 2, left: pos.left + pos.width }
}
Tooltip.prototype.getViewportAdjustedDelta = function (placement, pos, actualWidth, actualHeight) {
var delta = { top: 0, left: 0 }
if (!this.$viewport) return delta
var viewportPadding = this.options.viewport && this.options.viewport.padding || 0
var viewportDimensions = this.getPosition(this.$viewport)
if (/right|left/.test(placement)) {
var topEdgeOffset = pos.top - viewportPadding - viewportDimensions.scroll
var bottomEdgeOffset = pos.top + viewportPadding - viewportDimensions.scroll + actualHeight
if (topEdgeOffset < viewportDimensions.top) { // top overflow
delta.top = viewportDimensions.top - topEdgeOffset
} else if (bottomEdgeOffset > viewportDimensions.top + viewportDimensions.height) { // bottom overflow
delta.top = viewportDimensions.top + viewportDimensions.height - bottomEdgeOffset
}
} else {
var leftEdgeOffset = pos.left - viewportPadding
var rightEdgeOffset = pos.left + viewportPadding + actualWidth
if (leftEdgeOffset < viewportDimensions.left) { // left overflow
delta.left = viewportDimensions.left - leftEdgeOffset
} else if (rightEdgeOffset > viewportDimensions.right) { // right overflow
delta.left = viewportDimensions.left + viewportDimensions.width - rightEdgeOffset
}
}
return delta
}
Tooltip.prototype.getTitle = function () {
var title
var $e = this.$element
var o = this.options
title = $e.attr('data-original-title')
|| (typeof o.title == 'function' ? o.title.call($e[0]) : o.title)
return title
}
Tooltip.prototype.getUID = function (prefix) {
do prefix += ~~(Math.random() * 1000000)
while (document.getElementById(prefix))
return prefix
}
Tooltip.prototype.tip = function () {
if (!this.$tip) {
this.$tip = $(this.options.template)
if (this.$tip.length != 1) {
throw new Error(this.type + ' `template` option must consist of exactly 1 top-level element!')
}
}
return this.$tip
}
Tooltip.prototype.arrow = function () {
return (this.$arrow = this.$arrow || this.tip().find('.tooltip-arrow'))
}
Tooltip.prototype.enable = function () {
this.enabled = true
}
Tooltip.prototype.disable = function () {
this.enabled = false
}
Tooltip.prototype.toggleEnabled = function () {
this.enabled = !this.enabled
}
Tooltip.prototype.toggle = function (e) {
var self = this
if (e) {
self = $(e.currentTarget).data('bs.' + this.type)
if (!self) {
self = new this.constructor(e.currentTarget, this.getDelegateOptions())
$(e.currentTarget).data('bs.' + this.type, self)
}
}
if (e) {
self.inState.click = !self.inState.click
if (self.isInStateTrue()) self.enter(self)
else self.leave(self)
} else {
self.tip().hasClass('in') ? self.leave(self) : self.enter(self)
}
}
Tooltip.prototype.destroy = function () {
var that = this
clearTimeout(this.timeout)
this.hide(function () {
that.$element.off('.' + that.type).removeData('bs.' + that.type)
if (that.$tip) {
that.$tip.detach()
}
that.$tip = null
that.$arrow = null
that.$viewport = null
that.$element = null
})
}
// TOOLTIP PLUGIN DEFINITION
// =========================
function Plugin(option) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.tooltip')
var options = typeof option == 'object' && option
if (!data && /destroy|hide/.test(option)) return
if (!data) $this.data('bs.tooltip', (data = new Tooltip(this, options)))
if (typeof option == 'string') data[option]()
})
}
var old = $.fn.tooltip
$.fn.tooltip = Plugin
$.fn.tooltip.Constructor = Tooltip
// TOOLTIP NO CONFLICT
// ===================
$.fn.tooltip.noConflict = function () {
$.fn.tooltip = old
return this
}
}(jQuery);
/* ========================================================================
* Bootstrap: popover.js v3.3.7
* http://getbootstrap.com/javascript/#popovers
* ========================================================================
* Copyright 2011-2016 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// POPOVER PUBLIC CLASS DEFINITION
// ===============================
var Popover = function (element, options) {
this.init('popover', element, options)
}
if (!$.fn.tooltip) throw new Error('Popover requires tooltip.js')
Popover.VERSION = '3.3.7'
Popover.DEFAULTS = $.extend({}, $.fn.tooltip.Constructor.DEFAULTS, {
placement: 'right',
trigger: 'click',
content: '',
template: '<div class="popover" role="tooltip"><div class="arrow"></div><h3 class="popover-title"></h3><div class="popover-content"></div></div>'
})
// NOTE: POPOVER EXTENDS tooltip.js
// ================================
Popover.prototype = $.extend({}, $.fn.tooltip.Constructor.prototype)
Popover.prototype.constructor = Popover
Popover.prototype.getDefaults = function () {
return Popover.DEFAULTS
}
Popover.prototype.setContent = function () {
var $tip = this.tip()
var title = this.getTitle()
var content = this.getContent()
$tip.find('.popover-title')[this.options.html ? 'html' : 'text'](title)
$tip.find('.popover-content').children().detach().end()[ // we use append for html objects to maintain js events
this.options.html ? (typeof content == 'string' ? 'html' : 'append') : 'text'
](content)
$tip.removeClass('fade top bottom left right in')
// IE8 doesn't accept hiding via the `:empty` pseudo selector, we have to do
// this manually by checking the contents.
if (!$tip.find('.popover-title').html()) $tip.find('.popover-title').hide()
}
Popover.prototype.hasContent = function () {
return this.getTitle() || this.getContent()
}
Popover.prototype.getContent = function () {
var $e = this.$element
var o = this.options
return $e.attr('data-content')
|| (typeof o.content == 'function' ?
o.content.call($e[0]) :
o.content)
}
Popover.prototype.arrow = function () {
return (this.$arrow = this.$arrow || this.tip().find('.arrow'))
}
// POPOVER PLUGIN DEFINITION
// =========================
function Plugin(option) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.popover')
var options = typeof option == 'object' && option
if (!data && /destroy|hide/.test(option)) return
if (!data) $this.data('bs.popover', (data = new Popover(this, options)))
if (typeof option == 'string') data[option]()
})
}
var old = $.fn.popover
$.fn.popover = Plugin
$.fn.popover.Constructor = Popover
// POPOVER NO CONFLICT
// ===================
$.fn.popover.noConflict = function () {
$.fn.popover = old
return this
}
}(jQuery);
/* ========================================================================
* Bootstrap: scrollspy.js v3.3.7
* http://getbootstrap.com/javascript/#scrollspy
* ========================================================================
* Copyright 2011-2016 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// SCROLLSPY CLASS DEFINITION
// ==========================
function ScrollSpy(element, options) {
this.$body = $(document.body)
this.$scrollElement = $(element).is(document.body) ? $(window) : $(element)
this.options = $.extend({}, ScrollSpy.DEFAULTS, options)
this.selector = (this.options.target || '') + ' .nav li > a'
this.offsets = []
this.targets = []
this.activeTarget = null
this.scrollHeight = 0
this.$scrollElement.on('scroll.bs.scrollspy', $.proxy(this.process, this))
this.refresh()
this.process()
}
ScrollSpy.VERSION = '3.3.7'
ScrollSpy.DEFAULTS = {
offset: 10
}
ScrollSpy.prototype.getScrollHeight = function () {
return this.$scrollElement[0].scrollHeight || Math.max(this.$body[0].scrollHeight, document.documentElement.scrollHeight)
}
ScrollSpy.prototype.refresh = function () {
var that = this
var offsetMethod = 'offset'
var offsetBase = 0
this.offsets = []
this.targets = []
this.scrollHeight = this.getScrollHeight()
if (!$.isWindow(this.$scrollElement[0])) {
offsetMethod = 'position'
offsetBase = this.$scrollElement.scrollTop()
}
this.$body
.find(this.selector)
.map(function () {
var $el = $(this)
var href = $el.data('target') || $el.attr('href')
var $href = /^#./.test(href) && $(href)
return ($href
&& $href.length
&& $href.is(':visible')
&& [[$href[offsetMethod]().top + offsetBase, href]]) || null
})
.sort(function (a, b) { return a[0] - b[0] })
.each(function () {
that.offsets.push(this[0])
that.targets.push(this[1])
})
}
ScrollSpy.prototype.process = function () {
var scrollTop = this.$scrollElement.scrollTop() + this.options.offset
var scrollHeight = this.getScrollHeight()
var maxScroll = this.options.offset + scrollHeight - this.$scrollElement.height()
var offsets = this.offsets
var targets = this.targets
var activeTarget = this.activeTarget
var i
if (this.scrollHeight != scrollHeight) {
this.refresh()
}
if (scrollTop >= maxScroll) {
return activeTarget != (i = targets[targets.length - 1]) && this.activate(i)
}
if (activeTarget && scrollTop < offsets[0]) {
this.activeTarget = null
return this.clear()
}
for (i = offsets.length; i--;) {
activeTarget != targets[i]
&& scrollTop >= offsets[i]
&& (offsets[i + 1] === undefined || scrollTop < offsets[i + 1])
&& this.activate(targets[i])
}
}
ScrollSpy.prototype.activate = function (target) {
this.activeTarget = target
this.clear()
var selector = this.selector +
'[data-target="' + target + '"],' +
this.selector + '[href="' + target + '"]'
var active = $(selector)
.parents('li')
.addClass('active')
if (active.parent('.dropdown-menu').length) {
active = active
.closest('li.dropdown')
.addClass('active')
}
active.trigger('activate.bs.scrollspy')
}
ScrollSpy.prototype.clear = function () {
$(this.selector)
.parentsUntil(this.options.target, '.active')
.removeClass('active')
}
// SCROLLSPY PLUGIN DEFINITION
// ===========================
function Plugin(option) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.scrollspy')
var options = typeof option == 'object' && option
if (!data) $this.data('bs.scrollspy', (data = new ScrollSpy(this, options)))
if (typeof option == 'string') data[option]()
})
}
var old = $.fn.scrollspy
$.fn.scrollspy = Plugin
$.fn.scrollspy.Constructor = ScrollSpy
// SCROLLSPY NO CONFLICT
// =====================
$.fn.scrollspy.noConflict = function () {
$.fn.scrollspy = old
return this
}
// SCROLLSPY DATA-API
// ==================
$(window).on('load.bs.scrollspy.data-api', function () {
$('[data-spy="scroll"]').each(function () {
var $spy = $(this)
Plugin.call($spy, $spy.data())
})
})
}(jQuery);
/* ========================================================================
* Bootstrap: tab.js v3.3.7
* http://getbootstrap.com/javascript/#tabs
* ========================================================================
* Copyright 2011-2016 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// TAB CLASS DEFINITION
// ====================
var Tab = function (element) {
// jscs:disable requireDollarBeforejQueryAssignment
this.element = $(element)
// jscs:enable requireDollarBeforejQueryAssignment
}
Tab.VERSION = '3.3.7'
Tab.TRANSITION_DURATION = 150
Tab.prototype.show = function () {
var $this = this.element
var $ul = $this.closest('ul:not(.dropdown-menu)')
var selector = $this.data('target')
if (!selector) {
selector = $this.attr('href')
selector = selector && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7
}
if ($this.parent('li').hasClass('active')) return
var $previous = $ul.find('.active:last a')
var hideEvent = $.Event('hide.bs.tab', {
relatedTarget: $this[0]
})
var showEvent = $.Event('show.bs.tab', {
relatedTarget: $previous[0]
})
$previous.trigger(hideEvent)
$this.trigger(showEvent)
if (showEvent.isDefaultPrevented() || hideEvent.isDefaultPrevented()) return
var $target = $(selector)
this.activate($this.closest('li'), $ul)
this.activate($target, $target.parent(), function () {
$previous.trigger({
type: 'hidden.bs.tab',
relatedTarget: $this[0]
})
$this.trigger({
type: 'shown.bs.tab',
relatedTarget: $previous[0]
})
})
}
Tab.prototype.activate = function (element, container, callback) {
var $active = container.find('> .active')
var transition = callback
&& $.support.transition
&& ($active.length && $active.hasClass('fade') || !!container.find('> .fade').length)
function next() {
$active
.removeClass('active')
.find('> .dropdown-menu > .active')
.removeClass('active')
.end()
.find('[data-toggle="tab"]')
.attr('aria-expanded', false)
element
.addClass('active')
.find('[data-toggle="tab"]')
.attr('aria-expanded', true)
if (transition) {
element[0].offsetWidth // reflow for transition
element.addClass('in')
} else {
element.removeClass('fade')
}
if (element.parent('.dropdown-menu').length) {
element
.closest('li.dropdown')
.addClass('active')
.end()
.find('[data-toggle="tab"]')
.attr('aria-expanded', true)
}
callback && callback()
}
$active.length && transition ?
$active
.one('bsTransitionEnd', next)
.emulateTransitionEnd(Tab.TRANSITION_DURATION) :
next()
$active.removeClass('in')
}
// TAB PLUGIN DEFINITION
// =====================
function Plugin(option) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.tab')
if (!data) $this.data('bs.tab', (data = new Tab(this)))
if (typeof option == 'string') data[option]()
})
}
var old = $.fn.tab
$.fn.tab = Plugin
$.fn.tab.Constructor = Tab
// TAB NO CONFLICT
// ===============
$.fn.tab.noConflict = function () {
$.fn.tab = old
return this
}
// TAB DATA-API
// ============
var clickHandler = function (e) {
e.preventDefault()
Plugin.call($(this), 'show')
}
$(document)
.on('click.bs.tab.data-api', '[data-toggle="tab"]', clickHandler)
.on('click.bs.tab.data-api', '[data-toggle="pill"]', clickHandler)
}(jQuery);
/* ========================================================================
* Bootstrap: affix.js v3.3.7
* http://getbootstrap.com/javascript/#affix
* ========================================================================
* Copyright 2011-2016 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// AFFIX CLASS DEFINITION
// ======================
var Affix = function (element, options) {
this.options = $.extend({}, Affix.DEFAULTS, options)
this.$target = $(this.options.target)
.on('scroll.bs.affix.data-api', $.proxy(this.checkPosition, this))
.on('click.bs.affix.data-api', $.proxy(this.checkPositionWithEventLoop, this))
this.$element = $(element)
this.affixed = null
this.unpin = null
this.pinnedOffset = null
this.checkPosition()
}
Affix.VERSION = '3.3.7'
Affix.RESET = 'affix affix-top affix-bottom'
Affix.DEFAULTS = {
offset: 0,
target: window
}
Affix.prototype.getState = function (scrollHeight, height, offsetTop, offsetBottom) {
var scrollTop = this.$target.scrollTop()
var position = this.$element.offset()
var targetHeight = this.$target.height()
if (offsetTop != null && this.affixed == 'top') return scrollTop < offsetTop ? 'top' : false
if (this.affixed == 'bottom') {
if (offsetTop != null) return (scrollTop + this.unpin <= position.top) ? false : 'bottom'
return (scrollTop + targetHeight <= scrollHeight - offsetBottom) ? false : 'bottom'
}
var initializing = this.affixed == null
var colliderTop = initializing ? scrollTop : position.top
var colliderHeight = initializing ? targetHeight : height
if (offsetTop != null && scrollTop <= offsetTop) return 'top'
if (offsetBottom != null && (colliderTop + colliderHeight >= scrollHeight - offsetBottom)) return 'bottom'
return false
}
Affix.prototype.getPinnedOffset = function () {
if (this.pinnedOffset) return this.pinnedOffset
this.$element.removeClass(Affix.RESET).addClass('affix')
var scrollTop = this.$target.scrollTop()
var position = this.$element.offset()
return (this.pinnedOffset = position.top - scrollTop)
}
Affix.prototype.checkPositionWithEventLoop = function () {
setTimeout($.proxy(this.checkPosition, this), 1)
}
Affix.prototype.checkPosition = function () {
if (!this.$element.is(':visible')) return
var height = this.$element.height()
var offset = this.options.offset
var offsetTop = offset.top
var offsetBottom = offset.bottom
var scrollHeight = Math.max($(document).height(), $(document.body).height())
if (typeof offset != 'object') offsetBottom = offsetTop = offset
if (typeof offsetTop == 'function') offsetTop = offset.top(this.$element)
if (typeof offsetBottom == 'function') offsetBottom = offset.bottom(this.$element)
var affix = this.getState(scrollHeight, height, offsetTop, offsetBottom)
if (this.affixed != affix) {
if (this.unpin != null) this.$element.css('top', '')
var affixType = 'affix' + (affix ? '-' + affix : '')
var e = $.Event(affixType + '.bs.affix')
this.$element.trigger(e)
if (e.isDefaultPrevented()) return
this.affixed = affix
this.unpin = affix == 'bottom' ? this.getPinnedOffset() : null
this.$element
.removeClass(Affix.RESET)
.addClass(affixType)
.trigger(affixType.replace('affix', 'affixed') + '.bs.affix')
}
if (affix == 'bottom') {
this.$element.offset({
top: scrollHeight - height - offsetBottom
})
}
}
// AFFIX PLUGIN DEFINITION
// =======================
function Plugin(option) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.affix')
var options = typeof option == 'object' && option
if (!data) $this.data('bs.affix', (data = new Affix(this, options)))
if (typeof option == 'string') data[option]()
})
}
var old = $.fn.affix
$.fn.affix = Plugin
$.fn.affix.Constructor = Affix
// AFFIX NO CONFLICT
// =================
$.fn.affix.noConflict = function () {
$.fn.affix = old
return this
}
// AFFIX DATA-API
// ==============
$(window).on('load', function () {
$('[data-spy="affix"]').each(function () {
var $spy = $(this)
var data = $spy.data()
data.offset = data.offset || {}
if (data.offsetBottom != null) data.offset.bottom = data.offsetBottom
if (data.offsetTop != null) data.offset.top = data.offsetTop
Plugin.call($spy, data)
})
})
}(jQuery); | yummy-sphinx-theme | /yummy_sphinx_theme-0.1.1.tar.gz/yummy_sphinx_theme-0.1.1/yummy_sphinx_theme/static/js/bootstrap.js | bootstrap.js |
!function(t){if("object"==typeof exports)module.exports=t();else if("function"==typeof define&&define.amd)define(t);else{var r;"undefined"!=typeof window?r=window:"undefined"!=typeof global?r=global:"undefined"!=typeof self&&(r=self),r.GeoPattern=t()}}(function(){return function t(r,s,e){function i(n,a){if(!s[n]){if(!r[n]){var h="function"==typeof require&&require;if(!a&&h)return h(n,!0);if(o)return o(n,!0);throw new Error("Cannot find module '"+n+"'")}var l=s[n]={exports:{}};r[n][0].call(l.exports,function(t){var s=r[n][1][t];return i(s?s:t)},l,l.exports,t,r,s,e)}return s[n].exports}for(var o="function"==typeof require&&require,n=0;n<e.length;n++)i(e[n]);return i}({1:[function(t,r){!function(s){"use strict";function e(t){return function(r,s){return"object"==typeof r&&(s=r,r=null),(null===r||void 0===r)&&(r=(new Date).toString()),s||(s={}),t.call(this,r,s)}}var i=t("./lib/pattern"),o=r.exports={generate:e(function(t,r){return new i(t,r)})};s&&(s.fn.geopattern=e(function(t,r){return this.each(function(){var e=s(this).attr("data-title-sha");e&&(r=s.extend({hash:e},r));var i=o.generate(t,r);s(this).css("background-image",i.toDataUrl())})}))}("undefined"!=typeof jQuery?jQuery:null)},{"./lib/pattern":3}],2:[function(t,r){"use strict";function s(t){var r=/^#?([a-f\d])([a-f\d])([a-f\d])$/i;t=t.replace(r,function(t,r,s,e){return r+r+s+s+e+e});var s=/^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(t);return s?{r:parseInt(s[1],16),g:parseInt(s[2],16),b:parseInt(s[3],16)}:null}function e(t){return"#"+["r","g","b"].map(function(r){return("0"+t[r].toString(16)).slice(-2)}).join("")}function i(t){var r=t.r,s=t.g,e=t.b;r/=255,s/=255,e/=255;var i,o,n=Math.max(r,s,e),a=Math.min(r,s,e),h=(n+a)/2;if(n===a)i=o=0;else{var l=n-a;switch(o=h>.5?l/(2-n-a):l/(n+a),n){case r:i=(s-e)/l+(e>s?6:0);break;case s:i=(e-r)/l+2;break;case e:i=(r-s)/l+4}i/=6}return{h:i,s:o,l:h}}function o(t){function r(t,r,s){return 0>s&&(s+=1),s>1&&(s-=1),1/6>s?t+6*(r-t)*s:.5>s?r:2/3>s?t+(r-t)*(2/3-s)*6:t}var s,e,i,o=t.h,n=t.s,a=t.l;if(0===n)s=e=i=a;else{var h=.5>a?a*(1+n):a+n-a*n,l=2*a-h;s=r(l,h,o+1/3),e=r(l,h,o),i=r(l,h,o-1/3)}return{r:Math.round(255*s),g:Math.round(255*e),b:Math.round(255*i)}}r.exports={hex2rgb:s,rgb2hex:e,rgb2hsl:i,hsl2rgb:o,rgb2rgbString:function(t){return"rgb("+[t.r,t.g,t.b].join(",")+")"}}},{}],3:[function(t,r){(function(s){"use strict";function e(t,r,s){return parseInt(t.substr(r,s||1),16)}function i(t,r,s,e,i){var o=parseFloat(t),n=s-r,a=i-e;return(o-r)*a/n+e}function o(t){return t%2===0?C:j}function n(t){return i(t,0,15,M,W)}function a(t){var r=t,s=r/2,e=Math.sin(60*Math.PI/180)*r;return[0,e,s,0,s+r,0,2*r,e,s+r,2*e,s,2*e,0,e].join(",")}function h(t,r){var s=.66*r;return[[0,0,t/2,r-s,t/2,r,0,s,0,0],[t/2,r-s,t,0,t,s,t/2,r,t/2,r-s]].map(function(t){return t.join(",")})}function l(t){return[[t,0,t,3*t],[0,t,3*t,t]]}function c(t){var r=t,s=.33*r;return[s,0,r-s,0,r,s,r,r-s,r-s,r,s,r,0,r-s,0,s,s,0].join(",")}function f(t,r){var s=t/2;return[s,0,t,r,0,r,s,0].join(",")}function u(t,r){return[t/2,0,t,r/2,t/2,r,0,r/2].join(",")}function p(t){return[0,0,t,t,0,t,0,0].join(",")}function g(t,r,s,e,i){var a=p(e),h=n(i[0]),l=o(i[0]),c={stroke:S,"stroke-opacity":A,"fill-opacity":h,fill:l};t.polyline(a,c).transform({translate:[r+e,s],scale:[-1,1]}),t.polyline(a,c).transform({translate:[r+e,s+2*e],scale:[1,-1]}),h=n(i[1]),l=o(i[1]),c={stroke:S,"stroke-opacity":A,"fill-opacity":h,fill:l},t.polyline(a,c).transform({translate:[r+e,s+2*e],scale:[-1,-1]}),t.polyline(a,c).transform({translate:[r+e,s],scale:[1,1]})}function v(t,r,s,e,i){var a=n(i),h=o(i),l=p(e),c={stroke:S,"stroke-opacity":A,"fill-opacity":a,fill:h};t.polyline(l,c).transform({translate:[r,s+e],scale:[1,-1]}),t.polyline(l,c).transform({translate:[r+2*e,s+e],scale:[-1,-1]}),t.polyline(l,c).transform({translate:[r,s+e],scale:[1,1]}),t.polyline(l,c).transform({translate:[r+2*e,s+e],scale:[-1,1]})}function y(t,r){var s=t/2;return[0,0,r,s,0,t,0,0].join(",")}var d=t("extend"),b=t("./color"),m=t("./sha1"),k=t("./svg"),x={baseColor:"#933c3c"},w=["octogons","overlappingCircles","plusSigns","xes","sineWaves","hexagons","overlappingRings","plaid","triangles","squares","concentricCircles","diamonds","tessellation","nestedSquares","mosaicSquares","chevrons"],j="#222",C="#ddd",S="#000",A=.02,M=.02,W=.15,H=r.exports=function(t,r){return this.opts=d({},x,r),this.hash=r.hash||m(t),this.svg=new k,this.generateBackground(),this.generatePattern(),this};H.prototype.toSvg=function(){return this.svg.toString()},H.prototype.toString=function(){return this.toSvg()},H.prototype.toBase64=function(){var t,r=this.toSvg();return t="undefined"!=typeof window&&"function"==typeof window.btoa?window.btoa(r):new s(r).toString("base64")},H.prototype.toDataUri=function(){return"data:image/svg+xml;base64,"+this.toBase64()},H.prototype.toDataUrl=function(){return'url("'+this.toDataUri()+'")'},H.prototype.generateBackground=function(){var t,r,s,o;this.opts.color?s=b.hex2rgb(this.opts.color):(r=i(e(this.hash,14,3),0,4095,0,359),o=e(this.hash,17),t=b.rgb2hsl(b.hex2rgb(this.opts.baseColor)),t.h=(360*t.h-r+360)%360/360,t.s=o%2===0?Math.min(1,(100*t.s+o)/100):Math.max(0,(100*t.s-o)/100),s=b.hsl2rgb(t)),this.color=b.rgb2hex(s),this.svg.rect(0,0,"100%","100%",{fill:b.rgb2rgbString(s)})},H.prototype.generatePattern=function(){var t=this.opts.generator;if(t){if(w.indexOf(t)<0)throw new Error("The generator "+t+" does not exist.")}else t=w[e(this.hash,20)];return this["geo"+t.slice(0,1).toUpperCase()+t.slice(1)]()},H.prototype.geoHexagons=function(){var t,r,s,h,l,c,f,u,p=e(this.hash,0),g=i(p,0,15,8,60),v=g*Math.sqrt(3),y=2*g,d=a(g);for(this.svg.setWidth(3*y+3*g),this.svg.setHeight(6*v),s=0,u=0;6>u;u++)for(f=0;6>f;f++)c=e(this.hash,s),t=f%2===0?u*v:u*v+v/2,h=n(c),r=o(c),l={fill:r,"fill-opacity":h,stroke:S,"stroke-opacity":A},this.svg.polyline(d,l).transform({translate:[f*g*1.5-y/2,t-v/2]}),0===f&&this.svg.polyline(d,l).transform({translate:[6*g*1.5-y/2,t-v/2]}),0===u&&(t=f%2===0?6*v:6*v+v/2,this.svg.polyline(d,l).transform({translate:[f*g*1.5-y/2,t-v/2]})),0===f&&0===u&&this.svg.polyline(d,l).transform({translate:[6*g*1.5-y/2,5*v+v/2]}),s++},H.prototype.geoSineWaves=function(){var t,r,s,a,h,l,c,f=Math.floor(i(e(this.hash,0),0,15,100,400)),u=Math.floor(i(e(this.hash,1),0,15,30,100)),p=Math.floor(i(e(this.hash,2),0,15,3,30));for(this.svg.setWidth(f),this.svg.setHeight(36*p),r=0;36>r;r++)l=e(this.hash,r),s=n(l),t=o(l),c=f/4*.7,h={fill:"none",stroke:t,opacity:s,"stroke-width":""+p+"px"},a="M0 "+u+" C "+c+" 0, "+(f/2-c)+" 0, "+f/2+" "+u+" S "+(f-c)+" "+2*u+", "+f+" "+u+" S "+(1.5*f-c)+" 0, "+1.5*f+", "+u,this.svg.path(a,h).transform({translate:[-f/4,p*r-1.5*u]}),this.svg.path(a,h).transform({translate:[-f/4,p*r-1.5*u+36*p]})},H.prototype.geoChevrons=function(){var t,r,s,a,l,c,f,u=i(e(this.hash,0),0,15,30,80),p=i(e(this.hash,0),0,15,30,80),g=h(u,p);for(this.svg.setWidth(6*u),this.svg.setHeight(6*p*.66),r=0,f=0;6>f;f++)for(c=0;6>c;c++)l=e(this.hash,r),s=n(l),t=o(l),a={stroke:S,"stroke-opacity":A,fill:t,"fill-opacity":s,"stroke-width":1},this.svg.group(a).transform({translate:[c*u,f*p*.66-p/2]}).polyline(g).end(),0===f&&this.svg.group(a).transform({translate:[c*u,6*p*.66-p/2]}).polyline(g).end(),r+=1},H.prototype.geoPlusSigns=function(){var t,r,s,a,h,c,f,u,p=i(e(this.hash,0),0,15,10,25),g=3*p,v=l(p);for(this.svg.setWidth(12*p),this.svg.setHeight(12*p),s=0,u=0;6>u;u++)for(f=0;6>f;f++)c=e(this.hash,s),a=n(c),r=o(c),t=u%2===0?0:1,h={fill:r,stroke:S,"stroke-opacity":A,"fill-opacity":a},this.svg.group(h).transform({translate:[f*g-f*p+t*p-p,u*g-u*p-g/2]}).rect(v).end(),0===f&&this.svg.group(h).transform({translate:[4*g-f*p+t*p-p,u*g-u*p-g/2]}).rect(v).end(),0===u&&this.svg.group(h).transform({translate:[f*g-f*p+t*p-p,4*g-u*p-g/2]}).rect(v).end(),0===f&&0===u&&this.svg.group(h).transform({translate:[4*g-f*p+t*p-p,4*g-u*p-g/2]}).rect(v).end(),s++},H.prototype.geoXes=function(){var t,r,s,a,h,c,f,u,p=i(e(this.hash,0),0,15,10,25),g=l(p),v=3*p*.943;for(this.svg.setWidth(3*v),this.svg.setHeight(3*v),s=0,u=0;6>u;u++)for(f=0;6>f;f++)c=e(this.hash,s),a=n(c),t=f%2===0?u*v-.5*v:u*v-.5*v+v/4,r=o(c),h={fill:r,opacity:a},this.svg.group(h).transform({translate:[f*v/2-v/2,t-u*v/2],rotate:[45,v/2,v/2]}).rect(g).end(),0===f&&this.svg.group(h).transform({translate:[6*v/2-v/2,t-u*v/2],rotate:[45,v/2,v/2]}).rect(g).end(),0===u&&(t=f%2===0?6*v-v/2:6*v-v/2+v/4,this.svg.group(h).transform({translate:[f*v/2-v/2,t-6*v/2],rotate:[45,v/2,v/2]}).rect(g).end()),5===u&&this.svg.group(h).transform({translate:[f*v/2-v/2,t-11*v/2],rotate:[45,v/2,v/2]}).rect(g).end(),0===f&&0===u&&this.svg.group(h).transform({translate:[6*v/2-v/2,t-6*v/2],rotate:[45,v/2,v/2]}).rect(g).end(),s++},H.prototype.geoOverlappingCircles=function(){var t,r,s,a,h,l,c,f=e(this.hash,0),u=i(f,0,15,25,200),p=u/2;for(this.svg.setWidth(6*p),this.svg.setHeight(6*p),r=0,c=0;6>c;c++)for(l=0;6>l;l++)h=e(this.hash,r),s=n(h),t=o(h),a={fill:t,opacity:s},this.svg.circle(l*p,c*p,p,a),0===l&&this.svg.circle(6*p,c*p,p,a),0===c&&this.svg.circle(l*p,6*p,p,a),0===l&&0===c&&this.svg.circle(6*p,6*p,p,a),r++},H.prototype.geoOctogons=function(){var t,r,s,a,h,l,f=i(e(this.hash,0),0,15,10,60),u=c(f);for(this.svg.setWidth(6*f),this.svg.setHeight(6*f),r=0,l=0;6>l;l++)for(h=0;6>h;h++)a=e(this.hash,r),s=n(a),t=o(a),this.svg.polyline(u,{fill:t,"fill-opacity":s,stroke:S,"stroke-opacity":A}).transform({translate:[h*f,l*f]}),r+=1},H.prototype.geoSquares=function(){var t,r,s,a,h,l,c=i(e(this.hash,0),0,15,10,60);for(this.svg.setWidth(6*c),this.svg.setHeight(6*c),r=0,l=0;6>l;l++)for(h=0;6>h;h++)a=e(this.hash,r),s=n(a),t=o(a),this.svg.rect(h*c,l*c,c,c,{fill:t,"fill-opacity":s,stroke:S,"stroke-opacity":A}),r+=1},H.prototype.geoConcentricCircles=function(){var t,r,s,a,h,l,c=e(this.hash,0),f=i(c,0,15,10,60),u=f/5;for(this.svg.setWidth(6*(f+u)),this.svg.setHeight(6*(f+u)),r=0,l=0;6>l;l++)for(h=0;6>h;h++)a=e(this.hash,r),s=n(a),t=o(a),this.svg.circle(h*f+h*u+(f+u)/2,l*f+l*u+(f+u)/2,f/2,{fill:"none",stroke:t,opacity:s,"stroke-width":u+"px"}),a=e(this.hash,39-r),s=n(a),t=o(a),this.svg.circle(h*f+h*u+(f+u)/2,l*f+l*u+(f+u)/2,f/4,{fill:t,"fill-opacity":s}),r+=1},H.prototype.geoOverlappingRings=function(){var t,r,s,a,h,l,c,f=e(this.hash,0),u=i(f,0,15,10,60),p=u/4;for(this.svg.setWidth(6*u),this.svg.setHeight(6*u),r=0,c=0;6>c;c++)for(l=0;6>l;l++)h=e(this.hash,r),s=n(h),t=o(h),a={fill:"none",stroke:t,opacity:s,"stroke-width":p+"px"},this.svg.circle(l*u,c*u,u-p/2,a),0===l&&this.svg.circle(6*u,c*u,u-p/2,a),0===c&&this.svg.circle(l*u,6*u,u-p/2,a),0===l&&0===c&&this.svg.circle(6*u,6*u,u-p/2,a),r+=1},H.prototype.geoTriangles=function(){var t,r,s,a,h,l,c,u,p=e(this.hash,0),g=i(p,0,15,15,80),v=g/2*Math.sqrt(3),y=f(g,v);for(this.svg.setWidth(3*g),this.svg.setHeight(6*v),r=0,u=0;6>u;u++)for(c=0;6>c;c++)l=e(this.hash,r),s=n(l),t=o(l),h={fill:t,"fill-opacity":s,stroke:S,"stroke-opacity":A},a=u%2===0?c%2===0?180:0:c%2!==0?180:0,this.svg.polyline(y,h).transform({translate:[c*g*.5-g/2,v*u],rotate:[a,g/2,v/2]}),0===c&&this.svg.polyline(y,h).transform({translate:[6*g*.5-g/2,v*u],rotate:[a,g/2,v/2]}),r+=1},H.prototype.geoDiamonds=function(){var t,r,s,a,h,l,c,f,p=i(e(this.hash,0),0,15,10,50),g=i(e(this.hash,1),0,15,10,50),v=u(p,g);for(this.svg.setWidth(6*p),this.svg.setHeight(3*g),s=0,f=0;6>f;f++)for(c=0;6>c;c++)l=e(this.hash,s),a=n(l),r=o(l),h={fill:r,"fill-opacity":a,stroke:S,"stroke-opacity":A},t=f%2===0?0:p/2,this.svg.polyline(v,h).transform({translate:[c*p-p/2+t,g/2*f-g/2]}),0===c&&this.svg.polyline(v,h).transform({translate:[6*p-p/2+t,g/2*f-g/2]}),0===f&&this.svg.polyline(v,h).transform({translate:[c*p-p/2+t,g/2*6-g/2]}),0===c&&0===f&&this.svg.polyline(v,h).transform({translate:[6*p-p/2+t,g/2*6-g/2]}),s+=1},H.prototype.geoNestedSquares=function(){var t,r,s,a,h,l,c,f=i(e(this.hash,0),0,15,4,12),u=7*f;for(this.svg.setWidth(6*(u+f)+6*f),this.svg.setHeight(6*(u+f)+6*f),r=0,c=0;6>c;c++)for(l=0;6>l;l++)h=e(this.hash,r),s=n(h),t=o(h),a={fill:"none",stroke:t,opacity:s,"stroke-width":f+"px"},this.svg.rect(l*u+l*f*2+f/2,c*u+c*f*2+f/2,u,u,a),h=e(this.hash,39-r),s=n(h),t=o(h),a={fill:"none",stroke:t,opacity:s,"stroke-width":f+"px"},this.svg.rect(l*u+l*f*2+f/2+2*f,c*u+c*f*2+f/2+2*f,3*f,3*f,a),r+=1},H.prototype.geoMosaicSquares=function(){var t,r,s,o=i(e(this.hash,0),0,15,15,50);for(this.svg.setWidth(8*o),this.svg.setHeight(8*o),t=0,s=0;4>s;s++)for(r=0;4>r;r++)r%2===0?s%2===0?v(this.svg,r*o*2,s*o*2,o,e(this.hash,t)):g(this.svg,r*o*2,s*o*2,o,[e(this.hash,t),e(this.hash,t+1)]):s%2===0?g(this.svg,r*o*2,s*o*2,o,[e(this.hash,t),e(this.hash,t+1)]):v(this.svg,r*o*2,s*o*2,o,e(this.hash,t)),t+=1},H.prototype.geoPlaid=function(){var t,r,s,i,a,h,l,c=0,f=0;for(r=0;36>r;)i=e(this.hash,r),c+=i+5,l=e(this.hash,r+1),s=n(l),t=o(l),a=l+5,this.svg.rect(0,c,"100%",a,{opacity:s,fill:t}),c+=a,r+=2;for(r=0;36>r;)i=e(this.hash,r),f+=i+5,l=e(this.hash,r+1),s=n(l),t=o(l),h=l+5,this.svg.rect(f,0,h,"100%",{opacity:s,fill:t}),f+=h,r+=2;this.svg.setWidth(f),this.svg.setHeight(c)},H.prototype.geoTessellation=function(){var t,r,s,a,h,l=i(e(this.hash,0),0,15,5,40),c=l*Math.sqrt(3),f=2*l,u=l/2*Math.sqrt(3),p=y(l,u),g=3*l+2*u,v=2*c+2*l;for(this.svg.setWidth(g),this.svg.setHeight(v),r=0;20>r;r++)switch(h=e(this.hash,r),s=n(h),t=o(h),a={stroke:S,"stroke-opacity":A,fill:t,"fill-opacity":s,"stroke-width":1},r){case 0:this.svg.rect(-l/2,-l/2,l,l,a),this.svg.rect(g-l/2,-l/2,l,l,a),this.svg.rect(-l/2,v-l/2,l,l,a),this.svg.rect(g-l/2,v-l/2,l,l,a);break;case 1:this.svg.rect(f/2+u,c/2,l,l,a);break;case 2:this.svg.rect(-l/2,v/2-l/2,l,l,a),this.svg.rect(g-l/2,v/2-l/2,l,l,a);break;case 3:this.svg.rect(f/2+u,1.5*c+l,l,l,a);break;case 4:this.svg.polyline(p,a).transform({translate:[l/2,-l/2],rotate:[0,l/2,u/2]}),this.svg.polyline(p,a).transform({translate:[l/2,v- -l/2],rotate:[0,l/2,u/2],scale:[1,-1]});break;case 5:this.svg.polyline(p,a).transform({translate:[g-l/2,-l/2],rotate:[0,l/2,u/2],scale:[-1,1]}),this.svg.polyline(p,a).transform({translate:[g-l/2,v+l/2],rotate:[0,l/2,u/2],scale:[-1,-1]});break;case 6:this.svg.polyline(p,a).transform({translate:[g/2+l/2,c/2]});break;case 7:this.svg.polyline(p,a).transform({translate:[g-g/2-l/2,c/2],scale:[-1,1]});break;case 8:this.svg.polyline(p,a).transform({translate:[g/2+l/2,v-c/2],scale:[1,-1]});break;case 9:this.svg.polyline(p,a).transform({translate:[g-g/2-l/2,v-c/2],scale:[-1,-1]});break;case 10:this.svg.polyline(p,a).transform({translate:[l/2,v/2-l/2]});break;case 11:this.svg.polyline(p,a).transform({translate:[g-l/2,v/2-l/2],scale:[-1,1]});break;case 12:this.svg.rect(0,0,l,l,a).transform({translate:[l/2,l/2],rotate:[-30,0,0]});break;case 13:this.svg.rect(0,0,l,l,a).transform({scale:[-1,1],translate:[-g+l/2,l/2],rotate:[-30,0,0]});break;case 14:this.svg.rect(0,0,l,l,a).transform({translate:[l/2,v/2-l/2-l],rotate:[30,0,l]});break;case 15:this.svg.rect(0,0,l,l,a).transform({scale:[-1,1],translate:[-g+l/2,v/2-l/2-l],rotate:[30,0,l]});break;case 16:this.svg.rect(0,0,l,l,a).transform({scale:[1,-1],translate:[l/2,-v+v/2-l/2-l],rotate:[30,0,l]});break;case 17:this.svg.rect(0,0,l,l,a).transform({scale:[-1,-1],translate:[-g+l/2,-v+v/2-l/2-l],rotate:[30,0,l]});break;case 18:this.svg.rect(0,0,l,l,a).transform({scale:[1,-1],translate:[l/2,-v+l/2],rotate:[-30,0,0]});break;case 19:this.svg.rect(0,0,l,l,a).transform({scale:[-1,-1],translate:[-g+l/2,-v+l/2],rotate:[-30,0,0]})}}}).call(this,t("buffer").Buffer)},{"./color":2,"./sha1":4,"./svg":5,buffer:7,extend:8}],4:[function(t,r){"use strict";function s(){function t(){for(var t=16;80>t;t++){var r=f[t-3]^f[t-8]^f[t-14]^f[t-16];f[t]=r<<1|r>>>31}var s,e,i=n,o=a,p=h,g=l,v=c;for(t=0;80>t;t++){20>t?(s=g^o&(p^g),e=1518500249):40>t?(s=o^p^g,e=1859775393):60>t?(s=o&p|g&(o|p),e=2400959708):(s=o^p^g,e=3395469782);var y=(i<<5|i>>>27)+s+v+e+(0|f[t]);v=g,g=p,p=o<<30|o>>>2,o=i,i=y}for(n=n+i|0,a=a+o|0,h=h+p|0,l=l+g|0,c=c+v|0,u=0,t=0;16>t;t++)f[t]=0}function r(r){f[u]|=(255&r)<<p,p?p-=8:(u++,p=24),16===u&&t()}function s(t){var s=t.length;g+=8*s;for(var e=0;s>e;e++)r(t.charCodeAt(e))}function e(t){if("string"==typeof t)return s(t);var e=t.length;g+=8*e;for(var i=0;e>i;i++)r(t[i])}function i(t){for(var r="",s=28;s>=0;s-=4)r+=(t>>s&15).toString(16);return r}function o(){r(128),(u>14||14===u&&24>p)&&t(),u=14,p=24,r(0),r(0),r(g>0xffffffffff?g/1099511627776:0),r(g>4294967295?g/4294967296:0);for(var s=24;s>=0;s-=8)r(g>>s);return i(n)+i(a)+i(h)+i(l)+i(c)}var n=1732584193,a=4023233417,h=2562383102,l=271733878,c=3285377520,f=new Uint32Array(80),u=0,p=24,g=0;return{update:e,digest:o}}r.exports=function(t){if(void 0===t)return s();var r=s();return r.update(t),r.digest()}},{}],5:[function(t,r){"use strict";function s(){return this.width=100,this.height=100,this.svg=i("svg"),this.context=[],this.setAttributes(this.svg,{xmlns:"http://www.w3.org/2000/svg",width:this.width,height:this.height}),this}var e=t("extend"),i=t("./xml");r.exports=s,s.prototype.currentContext=function(){return this.context[this.context.length-1]||this.svg},s.prototype.end=function(){return this.context.pop(),this},s.prototype.currentNode=function(){var t=this.currentContext();return t.lastChild||t},s.prototype.transform=function(t){return this.currentNode().setAttribute("transform",Object.keys(t).map(function(r){return r+"("+t[r].join(",")+")"}).join(" ")),this},s.prototype.setAttributes=function(t,r){Object.keys(r).forEach(function(s){t.setAttribute(s,r[s])})},s.prototype.setWidth=function(t){this.svg.setAttribute("width",Math.floor(t))},s.prototype.setHeight=function(t){this.svg.setAttribute("height",Math.floor(t))},s.prototype.toString=function(){return this.svg.toString()},s.prototype.rect=function(t,r,s,o,n){var a=this;if(Array.isArray(t))return t.forEach(function(t){a.rect.apply(a,t.concat(n))}),this;var h=i("rect");return this.currentContext().appendChild(h),this.setAttributes(h,e({x:t,y:r,width:s,height:o},n)),this},s.prototype.circle=function(t,r,s,o){var n=i("circle");return this.currentContext().appendChild(n),this.setAttributes(n,e({cx:t,cy:r,r:s},o)),this},s.prototype.path=function(t,r){var s=i("path");return this.currentContext().appendChild(s),this.setAttributes(s,e({d:t},r)),this},s.prototype.polyline=function(t,r){var s=this;if(Array.isArray(t))return t.forEach(function(t){s.polyline(t,r)}),this;var o=i("polyline");return this.currentContext().appendChild(o),this.setAttributes(o,e({points:t},r)),this},s.prototype.group=function(t){var r=i("g");return this.currentContext().appendChild(r),this.context.push(r),this.setAttributes(r,e({},t)),this}},{"./xml":6,extend:8}],6:[function(t,r){"use strict";var s=r.exports=function(t){return this instanceof s?(this.tagName=t,this.attributes=Object.create(null),this.children=[],this.lastChild=null,this):new s(t)};s.prototype.appendChild=function(t){return this.children.push(t),this.lastChild=t,this},s.prototype.setAttribute=function(t,r){return this.attributes[t]=r,this},s.prototype.toString=function(){var t=this;return["<",t.tagName,Object.keys(t.attributes).map(function(r){return[" ",r,'="',t.attributes[r],'"'].join("")}).join(""),">",t.children.map(function(t){return t.toString()}).join(""),"</",t.tagName,">"].join("")}},{}],7:[function(){},{}],8:[function(t,r){function s(t){if(!t||"[object Object]"!==i.call(t)||t.nodeType||t.setInterval)return!1;var r=e.call(t,"constructor"),s=e.call(t.constructor.prototype,"isPrototypeOf");if(t.constructor&&!r&&!s)return!1;var o;for(o in t);return void 0===o||e.call(t,o)}var e=Object.prototype.hasOwnProperty,i=Object.prototype.toString;r.exports=function o(){var t,r,e,i,n,a,h=arguments[0]||{},l=1,c=arguments.length,f=!1;for("boolean"==typeof h&&(f=h,h=arguments[1]||{},l=2),"object"!=typeof h&&"function"!=typeof h&&(h={});c>l;l++)if(null!=(t=arguments[l]))for(r in t)e=h[r],i=t[r],h!==i&&(f&&i&&(s(i)||(n=Array.isArray(i)))?(n?(n=!1,a=e&&Array.isArray(e)?e:[]):a=e&&s(e)?e:{},h[r]=o(f,a,i)):void 0!==i&&(h[r]=i));return h}},{}]},{},[1])(1)}); | yummy-sphinx-theme | /yummy_sphinx_theme-0.1.1.tar.gz/yummy_sphinx_theme-0.1.1/yummy_sphinx_theme/static/js/geopattern.min.js | geopattern.min.js |
(function() {
// Baseline setup
// --------------
// Establish the root object, `window` in the browser, or `exports` on the server.
var root = this;
// Save the previous value of the `_` variable.
var previousUnderscore = root._;
// Save bytes in the minified (but not gzipped) version:
var ArrayProto = Array.prototype, ObjProto = Object.prototype, FuncProto = Function.prototype;
// Create quick reference variables for speed access to core prototypes.
var
push = ArrayProto.push,
slice = ArrayProto.slice,
toString = ObjProto.toString,
hasOwnProperty = ObjProto.hasOwnProperty;
// All **ECMAScript 5** native function implementations that we hope to use
// are declared here.
var
nativeIsArray = Array.isArray,
nativeKeys = Object.keys,
nativeBind = FuncProto.bind,
nativeCreate = Object.create;
// Naked function reference for surrogate-prototype-swapping.
var Ctor = function(){};
// Create a safe reference to the Underscore object for use below.
var _ = function(obj) {
if (obj instanceof _) return obj;
if (!(this instanceof _)) return new _(obj);
this._wrapped = obj;
};
// Export the Underscore object for **Node.js**, with
// backwards-compatibility for the old `require()` API. If we're in
// the browser, add `_` as a global object.
if (typeof exports !== 'undefined') {
if (typeof module !== 'undefined' && module.exports) {
exports = module.exports = _;
}
exports._ = _;
} else {
root._ = _;
}
// Current version.
_.VERSION = '1.8.3';
// Internal function that returns an efficient (for current engines) version
// of the passed-in callback, to be repeatedly applied in other Underscore
// functions.
var optimizeCb = function(func, context, argCount) {
if (context === void 0) return func;
switch (argCount == null ? 3 : argCount) {
case 1: return function(value) {
return func.call(context, value);
};
case 2: return function(value, other) {
return func.call(context, value, other);
};
case 3: return function(value, index, collection) {
return func.call(context, value, index, collection);
};
case 4: return function(accumulator, value, index, collection) {
return func.call(context, accumulator, value, index, collection);
};
}
return function() {
return func.apply(context, arguments);
};
};
// A mostly-internal function to generate callbacks that can be applied
// to each element in a collection, returning the desired result — either
// identity, an arbitrary callback, a property matcher, or a property accessor.
var cb = function(value, context, argCount) {
if (value == null) return _.identity;
if (_.isFunction(value)) return optimizeCb(value, context, argCount);
if (_.isObject(value)) return _.matcher(value);
return _.property(value);
};
_.iteratee = function(value, context) {
return cb(value, context, Infinity);
};
// An internal function for creating assigner functions.
var createAssigner = function(keysFunc, undefinedOnly) {
return function(obj) {
var length = arguments.length;
if (length < 2 || obj == null) return obj;
for (var index = 1; index < length; index++) {
var source = arguments[index],
keys = keysFunc(source),
l = keys.length;
for (var i = 0; i < l; i++) {
var key = keys[i];
if (!undefinedOnly || obj[key] === void 0) obj[key] = source[key];
}
}
return obj;
};
};
// An internal function for creating a new object that inherits from another.
var baseCreate = function(prototype) {
if (!_.isObject(prototype)) return {};
if (nativeCreate) return nativeCreate(prototype);
Ctor.prototype = prototype;
var result = new Ctor;
Ctor.prototype = null;
return result;
};
var property = function(key) {
return function(obj) {
return obj == null ? void 0 : obj[key];
};
};
// Helper for collection methods to determine whether a collection
// should be iterated as an array or as an object
// Related: http://people.mozilla.org/~jorendorff/es6-draft.html#sec-tolength
// Avoids a very nasty iOS 8 JIT bug on ARM-64. #2094
var MAX_ARRAY_INDEX = Math.pow(2, 53) - 1;
var getLength = property('length');
var isArrayLike = function(collection) {
var length = getLength(collection);
return typeof length == 'number' && length >= 0 && length <= MAX_ARRAY_INDEX;
};
// Collection Functions
// --------------------
// The cornerstone, an `each` implementation, aka `forEach`.
// Handles raw objects in addition to array-likes. Treats all
// sparse array-likes as if they were dense.
_.each = _.forEach = function(obj, iteratee, context) {
iteratee = optimizeCb(iteratee, context);
var i, length;
if (isArrayLike(obj)) {
for (i = 0, length = obj.length; i < length; i++) {
iteratee(obj[i], i, obj);
}
} else {
var keys = _.keys(obj);
for (i = 0, length = keys.length; i < length; i++) {
iteratee(obj[keys[i]], keys[i], obj);
}
}
return obj;
};
// Return the results of applying the iteratee to each element.
_.map = _.collect = function(obj, iteratee, context) {
iteratee = cb(iteratee, context);
var keys = !isArrayLike(obj) && _.keys(obj),
length = (keys || obj).length,
results = Array(length);
for (var index = 0; index < length; index++) {
var currentKey = keys ? keys[index] : index;
results[index] = iteratee(obj[currentKey], currentKey, obj);
}
return results;
};
// Create a reducing function iterating left or right.
function createReduce(dir) {
// Optimized iterator function as using arguments.length
// in the main function will deoptimize the, see #1991.
function iterator(obj, iteratee, memo, keys, index, length) {
for (; index >= 0 && index < length; index += dir) {
var currentKey = keys ? keys[index] : index;
memo = iteratee(memo, obj[currentKey], currentKey, obj);
}
return memo;
}
return function(obj, iteratee, memo, context) {
iteratee = optimizeCb(iteratee, context, 4);
var keys = !isArrayLike(obj) && _.keys(obj),
length = (keys || obj).length,
index = dir > 0 ? 0 : length - 1;
// Determine the initial value if none is provided.
if (arguments.length < 3) {
memo = obj[keys ? keys[index] : index];
index += dir;
}
return iterator(obj, iteratee, memo, keys, index, length);
};
}
// **Reduce** builds up a single result from a list of values, aka `inject`,
// or `foldl`.
_.reduce = _.foldl = _.inject = createReduce(1);
// The right-associative version of reduce, also known as `foldr`.
_.reduceRight = _.foldr = createReduce(-1);
// Return the first value which passes a truth test. Aliased as `detect`.
_.find = _.detect = function(obj, predicate, context) {
var key;
if (isArrayLike(obj)) {
key = _.findIndex(obj, predicate, context);
} else {
key = _.findKey(obj, predicate, context);
}
if (key !== void 0 && key !== -1) return obj[key];
};
// Return all the elements that pass a truth test.
// Aliased as `select`.
_.filter = _.select = function(obj, predicate, context) {
var results = [];
predicate = cb(predicate, context);
_.each(obj, function(value, index, list) {
if (predicate(value, index, list)) results.push(value);
});
return results;
};
// Return all the elements for which a truth test fails.
_.reject = function(obj, predicate, context) {
return _.filter(obj, _.negate(cb(predicate)), context);
};
// Determine whether all of the elements match a truth test.
// Aliased as `all`.
_.every = _.all = function(obj, predicate, context) {
predicate = cb(predicate, context);
var keys = !isArrayLike(obj) && _.keys(obj),
length = (keys || obj).length;
for (var index = 0; index < length; index++) {
var currentKey = keys ? keys[index] : index;
if (!predicate(obj[currentKey], currentKey, obj)) return false;
}
return true;
};
// Determine if at least one element in the object matches a truth test.
// Aliased as `any`.
_.some = _.any = function(obj, predicate, context) {
predicate = cb(predicate, context);
var keys = !isArrayLike(obj) && _.keys(obj),
length = (keys || obj).length;
for (var index = 0; index < length; index++) {
var currentKey = keys ? keys[index] : index;
if (predicate(obj[currentKey], currentKey, obj)) return true;
}
return false;
};
// Determine if the array or object contains a given item (using `===`).
// Aliased as `includes` and `include`.
_.contains = _.includes = _.include = function(obj, item, fromIndex, guard) {
if (!isArrayLike(obj)) obj = _.values(obj);
if (typeof fromIndex != 'number' || guard) fromIndex = 0;
return _.indexOf(obj, item, fromIndex) >= 0;
};
// Invoke a method (with arguments) on every item in a collection.
_.invoke = function(obj, method) {
var args = slice.call(arguments, 2);
var isFunc = _.isFunction(method);
return _.map(obj, function(value) {
var func = isFunc ? method : value[method];
return func == null ? func : func.apply(value, args);
});
};
// Convenience version of a common use case of `map`: fetching a property.
_.pluck = function(obj, key) {
return _.map(obj, _.property(key));
};
// Convenience version of a common use case of `filter`: selecting only objects
// containing specific `key:value` pairs.
_.where = function(obj, attrs) {
return _.filter(obj, _.matcher(attrs));
};
// Convenience version of a common use case of `find`: getting the first object
// containing specific `key:value` pairs.
_.findWhere = function(obj, attrs) {
return _.find(obj, _.matcher(attrs));
};
// Return the maximum element (or element-based computation).
_.max = function(obj, iteratee, context) {
var result = -Infinity, lastComputed = -Infinity,
value, computed;
if (iteratee == null && obj != null) {
obj = isArrayLike(obj) ? obj : _.values(obj);
for (var i = 0, length = obj.length; i < length; i++) {
value = obj[i];
if (value > result) {
result = value;
}
}
} else {
iteratee = cb(iteratee, context);
_.each(obj, function(value, index, list) {
computed = iteratee(value, index, list);
if (computed > lastComputed || computed === -Infinity && result === -Infinity) {
result = value;
lastComputed = computed;
}
});
}
return result;
};
// Return the minimum element (or element-based computation).
_.min = function(obj, iteratee, context) {
var result = Infinity, lastComputed = Infinity,
value, computed;
if (iteratee == null && obj != null) {
obj = isArrayLike(obj) ? obj : _.values(obj);
for (var i = 0, length = obj.length; i < length; i++) {
value = obj[i];
if (value < result) {
result = value;
}
}
} else {
iteratee = cb(iteratee, context);
_.each(obj, function(value, index, list) {
computed = iteratee(value, index, list);
if (computed < lastComputed || computed === Infinity && result === Infinity) {
result = value;
lastComputed = computed;
}
});
}
return result;
};
// Shuffle a collection, using the modern version of the
// [Fisher-Yates shuffle](http://en.wikipedia.org/wiki/Fisher–Yates_shuffle).
_.shuffle = function(obj) {
var set = isArrayLike(obj) ? obj : _.values(obj);
var length = set.length;
var shuffled = Array(length);
for (var index = 0, rand; index < length; index++) {
rand = _.random(0, index);
if (rand !== index) shuffled[index] = shuffled[rand];
shuffled[rand] = set[index];
}
return shuffled;
};
// Sample **n** random values from a collection.
// If **n** is not specified, returns a single random element.
// The internal `guard` argument allows it to work with `map`.
_.sample = function(obj, n, guard) {
if (n == null || guard) {
if (!isArrayLike(obj)) obj = _.values(obj);
return obj[_.random(obj.length - 1)];
}
return _.shuffle(obj).slice(0, Math.max(0, n));
};
// Sort the object's values by a criterion produced by an iteratee.
_.sortBy = function(obj, iteratee, context) {
iteratee = cb(iteratee, context);
return _.pluck(_.map(obj, function(value, index, list) {
return {
value: value,
index: index,
criteria: iteratee(value, index, list)
};
}).sort(function(left, right) {
var a = left.criteria;
var b = right.criteria;
if (a !== b) {
if (a > b || a === void 0) return 1;
if (a < b || b === void 0) return -1;
}
return left.index - right.index;
}), 'value');
};
// An internal function used for aggregate "group by" operations.
var group = function(behavior) {
return function(obj, iteratee, context) {
var result = {};
iteratee = cb(iteratee, context);
_.each(obj, function(value, index) {
var key = iteratee(value, index, obj);
behavior(result, value, key);
});
return result;
};
};
// Groups the object's values by a criterion. Pass either a string attribute
// to group by, or a function that returns the criterion.
_.groupBy = group(function(result, value, key) {
if (_.has(result, key)) result[key].push(value); else result[key] = [value];
});
// Indexes the object's values by a criterion, similar to `groupBy`, but for
// when you know that your index values will be unique.
_.indexBy = group(function(result, value, key) {
result[key] = value;
});
// Counts instances of an object that group by a certain criterion. Pass
// either a string attribute to count by, or a function that returns the
// criterion.
_.countBy = group(function(result, value, key) {
if (_.has(result, key)) result[key]++; else result[key] = 1;
});
// Safely create a real, live array from anything iterable.
_.toArray = function(obj) {
if (!obj) return [];
if (_.isArray(obj)) return slice.call(obj);
if (isArrayLike(obj)) return _.map(obj, _.identity);
return _.values(obj);
};
// Return the number of elements in an object.
_.size = function(obj) {
if (obj == null) return 0;
return isArrayLike(obj) ? obj.length : _.keys(obj).length;
};
// Split a collection into two arrays: one whose elements all satisfy the given
// predicate, and one whose elements all do not satisfy the predicate.
_.partition = function(obj, predicate, context) {
predicate = cb(predicate, context);
var pass = [], fail = [];
_.each(obj, function(value, key, obj) {
(predicate(value, key, obj) ? pass : fail).push(value);
});
return [pass, fail];
};
// Array Functions
// ---------------
// Get the first element of an array. Passing **n** will return the first N
// values in the array. Aliased as `head` and `take`. The **guard** check
// allows it to work with `_.map`.
_.first = _.head = _.take = function(array, n, guard) {
if (array == null) return void 0;
if (n == null || guard) return array[0];
return _.initial(array, array.length - n);
};
// Returns everything but the last entry of the array. Especially useful on
// the arguments object. Passing **n** will return all the values in
// the array, excluding the last N.
_.initial = function(array, n, guard) {
return slice.call(array, 0, Math.max(0, array.length - (n == null || guard ? 1 : n)));
};
// Get the last element of an array. Passing **n** will return the last N
// values in the array.
_.last = function(array, n, guard) {
if (array == null) return void 0;
if (n == null || guard) return array[array.length - 1];
return _.rest(array, Math.max(0, array.length - n));
};
// Returns everything but the first entry of the array. Aliased as `tail` and `drop`.
// Especially useful on the arguments object. Passing an **n** will return
// the rest N values in the array.
_.rest = _.tail = _.drop = function(array, n, guard) {
return slice.call(array, n == null || guard ? 1 : n);
};
// Trim out all falsy values from an array.
_.compact = function(array) {
return _.filter(array, _.identity);
};
// Internal implementation of a recursive `flatten` function.
var flatten = function(input, shallow, strict, startIndex) {
var output = [], idx = 0;
for (var i = startIndex || 0, length = getLength(input); i < length; i++) {
var value = input[i];
if (isArrayLike(value) && (_.isArray(value) || _.isArguments(value))) {
//flatten current level of array or arguments object
if (!shallow) value = flatten(value, shallow, strict);
var j = 0, len = value.length;
output.length += len;
while (j < len) {
output[idx++] = value[j++];
}
} else if (!strict) {
output[idx++] = value;
}
}
return output;
};
// Flatten out an array, either recursively (by default), or just one level.
_.flatten = function(array, shallow) {
return flatten(array, shallow, false);
};
// Return a version of the array that does not contain the specified value(s).
_.without = function(array) {
return _.difference(array, slice.call(arguments, 1));
};
// Produce a duplicate-free version of the array. If the array has already
// been sorted, you have the option of using a faster algorithm.
// Aliased as `unique`.
_.uniq = _.unique = function(array, isSorted, iteratee, context) {
if (!_.isBoolean(isSorted)) {
context = iteratee;
iteratee = isSorted;
isSorted = false;
}
if (iteratee != null) iteratee = cb(iteratee, context);
var result = [];
var seen = [];
for (var i = 0, length = getLength(array); i < length; i++) {
var value = array[i],
computed = iteratee ? iteratee(value, i, array) : value;
if (isSorted) {
if (!i || seen !== computed) result.push(value);
seen = computed;
} else if (iteratee) {
if (!_.contains(seen, computed)) {
seen.push(computed);
result.push(value);
}
} else if (!_.contains(result, value)) {
result.push(value);
}
}
return result;
};
// Produce an array that contains the union: each distinct element from all of
// the passed-in arrays.
_.union = function() {
return _.uniq(flatten(arguments, true, true));
};
// Produce an array that contains every item shared between all the
// passed-in arrays.
_.intersection = function(array) {
var result = [];
var argsLength = arguments.length;
for (var i = 0, length = getLength(array); i < length; i++) {
var item = array[i];
if (_.contains(result, item)) continue;
for (var j = 1; j < argsLength; j++) {
if (!_.contains(arguments[j], item)) break;
}
if (j === argsLength) result.push(item);
}
return result;
};
// Take the difference between one array and a number of other arrays.
// Only the elements present in just the first array will remain.
_.difference = function(array) {
var rest = flatten(arguments, true, true, 1);
return _.filter(array, function(value){
return !_.contains(rest, value);
});
};
// Zip together multiple lists into a single array -- elements that share
// an index go together.
_.zip = function() {
return _.unzip(arguments);
};
// Complement of _.zip. Unzip accepts an array of arrays and groups
// each array's elements on shared indices
_.unzip = function(array) {
var length = array && _.max(array, getLength).length || 0;
var result = Array(length);
for (var index = 0; index < length; index++) {
result[index] = _.pluck(array, index);
}
return result;
};
// Converts lists into objects. Pass either a single array of `[key, value]`
// pairs, or two parallel arrays of the same length -- one of keys, and one of
// the corresponding values.
_.object = function(list, values) {
var result = {};
for (var i = 0, length = getLength(list); i < length; i++) {
if (values) {
result[list[i]] = values[i];
} else {
result[list[i][0]] = list[i][1];
}
}
return result;
};
// Generator function to create the findIndex and findLastIndex functions
function createPredicateIndexFinder(dir) {
return function(array, predicate, context) {
predicate = cb(predicate, context);
var length = getLength(array);
var index = dir > 0 ? 0 : length - 1;
for (; index >= 0 && index < length; index += dir) {
if (predicate(array[index], index, array)) return index;
}
return -1;
};
}
// Returns the first index on an array-like that passes a predicate test
_.findIndex = createPredicateIndexFinder(1);
_.findLastIndex = createPredicateIndexFinder(-1);
// Use a comparator function to figure out the smallest index at which
// an object should be inserted so as to maintain order. Uses binary search.
_.sortedIndex = function(array, obj, iteratee, context) {
iteratee = cb(iteratee, context, 1);
var value = iteratee(obj);
var low = 0, high = getLength(array);
while (low < high) {
var mid = Math.floor((low + high) / 2);
if (iteratee(array[mid]) < value) low = mid + 1; else high = mid;
}
return low;
};
// Generator function to create the indexOf and lastIndexOf functions
function createIndexFinder(dir, predicateFind, sortedIndex) {
return function(array, item, idx) {
var i = 0, length = getLength(array);
if (typeof idx == 'number') {
if (dir > 0) {
i = idx >= 0 ? idx : Math.max(idx + length, i);
} else {
length = idx >= 0 ? Math.min(idx + 1, length) : idx + length + 1;
}
} else if (sortedIndex && idx && length) {
idx = sortedIndex(array, item);
return array[idx] === item ? idx : -1;
}
if (item !== item) {
idx = predicateFind(slice.call(array, i, length), _.isNaN);
return idx >= 0 ? idx + i : -1;
}
for (idx = dir > 0 ? i : length - 1; idx >= 0 && idx < length; idx += dir) {
if (array[idx] === item) return idx;
}
return -1;
};
}
// Return the position of the first occurrence of an item in an array,
// or -1 if the item is not included in the array.
// If the array is large and already in sort order, pass `true`
// for **isSorted** to use binary search.
_.indexOf = createIndexFinder(1, _.findIndex, _.sortedIndex);
_.lastIndexOf = createIndexFinder(-1, _.findLastIndex);
// Generate an integer Array containing an arithmetic progression. A port of
// the native Python `range()` function. See
// [the Python documentation](http://docs.python.org/library/functions.html#range).
_.range = function(start, stop, step) {
if (stop == null) {
stop = start || 0;
start = 0;
}
step = step || 1;
var length = Math.max(Math.ceil((stop - start) / step), 0);
var range = Array(length);
for (var idx = 0; idx < length; idx++, start += step) {
range[idx] = start;
}
return range;
};
// Function (ahem) Functions
// ------------------
// Determines whether to execute a function as a constructor
// or a normal function with the provided arguments
var executeBound = function(sourceFunc, boundFunc, context, callingContext, args) {
if (!(callingContext instanceof boundFunc)) return sourceFunc.apply(context, args);
var self = baseCreate(sourceFunc.prototype);
var result = sourceFunc.apply(self, args);
if (_.isObject(result)) return result;
return self;
};
// Create a function bound to a given object (assigning `this`, and arguments,
// optionally). Delegates to **ECMAScript 5**'s native `Function.bind` if
// available.
_.bind = function(func, context) {
if (nativeBind && func.bind === nativeBind) return nativeBind.apply(func, slice.call(arguments, 1));
if (!_.isFunction(func)) throw new TypeError('Bind must be called on a function');
var args = slice.call(arguments, 2);
var bound = function() {
return executeBound(func, bound, context, this, args.concat(slice.call(arguments)));
};
return bound;
};
// Partially apply a function by creating a version that has had some of its
// arguments pre-filled, without changing its dynamic `this` context. _ acts
// as a placeholder, allowing any combination of arguments to be pre-filled.
_.partial = function(func) {
var boundArgs = slice.call(arguments, 1);
var bound = function() {
var position = 0, length = boundArgs.length;
var args = Array(length);
for (var i = 0; i < length; i++) {
args[i] = boundArgs[i] === _ ? arguments[position++] : boundArgs[i];
}
while (position < arguments.length) args.push(arguments[position++]);
return executeBound(func, bound, this, this, args);
};
return bound;
};
// Bind a number of an object's methods to that object. Remaining arguments
// are the method names to be bound. Useful for ensuring that all callbacks
// defined on an object belong to it.
_.bindAll = function(obj) {
var i, length = arguments.length, key;
if (length <= 1) throw new Error('bindAll must be passed function names');
for (i = 1; i < length; i++) {
key = arguments[i];
obj[key] = _.bind(obj[key], obj);
}
return obj;
};
// Memoize an expensive function by storing its results.
_.memoize = function(func, hasher) {
var memoize = function(key) {
var cache = memoize.cache;
var address = '' + (hasher ? hasher.apply(this, arguments) : key);
if (!_.has(cache, address)) cache[address] = func.apply(this, arguments);
return cache[address];
};
memoize.cache = {};
return memoize;
};
// Delays a function for the given number of milliseconds, and then calls
// it with the arguments supplied.
_.delay = function(func, wait) {
var args = slice.call(arguments, 2);
return setTimeout(function(){
return func.apply(null, args);
}, wait);
};
// Defers a function, scheduling it to run after the current call stack has
// cleared.
_.defer = _.partial(_.delay, _, 1);
// Returns a function, that, when invoked, will only be triggered at most once
// during a given window of time. Normally, the throttled function will run
// as much as it can, without ever going more than once per `wait` duration;
// but if you'd like to disable the execution on the leading edge, pass
// `{leading: false}`. To disable execution on the trailing edge, ditto.
_.throttle = function(func, wait, options) {
var context, args, result;
var timeout = null;
var previous = 0;
if (!options) options = {};
var later = function() {
previous = options.leading === false ? 0 : _.now();
timeout = null;
result = func.apply(context, args);
if (!timeout) context = args = null;
};
return function() {
var now = _.now();
if (!previous && options.leading === false) previous = now;
var remaining = wait - (now - previous);
context = this;
args = arguments;
if (remaining <= 0 || remaining > wait) {
if (timeout) {
clearTimeout(timeout);
timeout = null;
}
previous = now;
result = func.apply(context, args);
if (!timeout) context = args = null;
} else if (!timeout && options.trailing !== false) {
timeout = setTimeout(later, remaining);
}
return result;
};
};
// Returns a function, that, as long as it continues to be invoked, will not
// be triggered. The function will be called after it stops being called for
// N milliseconds. If `immediate` is passed, trigger the function on the
// leading edge, instead of the trailing.
_.debounce = function(func, wait, immediate) {
var timeout, args, context, timestamp, result;
var later = function() {
var last = _.now() - timestamp;
if (last < wait && last >= 0) {
timeout = setTimeout(later, wait - last);
} else {
timeout = null;
if (!immediate) {
result = func.apply(context, args);
if (!timeout) context = args = null;
}
}
};
return function() {
context = this;
args = arguments;
timestamp = _.now();
var callNow = immediate && !timeout;
if (!timeout) timeout = setTimeout(later, wait);
if (callNow) {
result = func.apply(context, args);
context = args = null;
}
return result;
};
};
// Returns the first function passed as an argument to the second,
// allowing you to adjust arguments, run code before and after, and
// conditionally execute the original function.
_.wrap = function(func, wrapper) {
return _.partial(wrapper, func);
};
// Returns a negated version of the passed-in predicate.
_.negate = function(predicate) {
return function() {
return !predicate.apply(this, arguments);
};
};
// Returns a function that is the composition of a list of functions, each
// consuming the return value of the function that follows.
_.compose = function() {
var args = arguments;
var start = args.length - 1;
return function() {
var i = start;
var result = args[start].apply(this, arguments);
while (i--) result = args[i].call(this, result);
return result;
};
};
// Returns a function that will only be executed on and after the Nth call.
_.after = function(times, func) {
return function() {
if (--times < 1) {
return func.apply(this, arguments);
}
};
};
// Returns a function that will only be executed up to (but not including) the Nth call.
_.before = function(times, func) {
var memo;
return function() {
if (--times > 0) {
memo = func.apply(this, arguments);
}
if (times <= 1) func = null;
return memo;
};
};
// Returns a function that will be executed at most one time, no matter how
// often you call it. Useful for lazy initialization.
_.once = _.partial(_.before, 2);
// Object Functions
// ----------------
// Keys in IE < 9 that won't be iterated by `for key in ...` and thus missed.
var hasEnumBug = !{toString: null}.propertyIsEnumerable('toString');
var nonEnumerableProps = ['valueOf', 'isPrototypeOf', 'toString',
'propertyIsEnumerable', 'hasOwnProperty', 'toLocaleString'];
function collectNonEnumProps(obj, keys) {
var nonEnumIdx = nonEnumerableProps.length;
var constructor = obj.constructor;
var proto = (_.isFunction(constructor) && constructor.prototype) || ObjProto;
// Constructor is a special case.
var prop = 'constructor';
if (_.has(obj, prop) && !_.contains(keys, prop)) keys.push(prop);
while (nonEnumIdx--) {
prop = nonEnumerableProps[nonEnumIdx];
if (prop in obj && obj[prop] !== proto[prop] && !_.contains(keys, prop)) {
keys.push(prop);
}
}
}
// Retrieve the names of an object's own properties.
// Delegates to **ECMAScript 5**'s native `Object.keys`
_.keys = function(obj) {
if (!_.isObject(obj)) return [];
if (nativeKeys) return nativeKeys(obj);
var keys = [];
for (var key in obj) if (_.has(obj, key)) keys.push(key);
// Ahem, IE < 9.
if (hasEnumBug) collectNonEnumProps(obj, keys);
return keys;
};
// Retrieve all the property names of an object.
_.allKeys = function(obj) {
if (!_.isObject(obj)) return [];
var keys = [];
for (var key in obj) keys.push(key);
// Ahem, IE < 9.
if (hasEnumBug) collectNonEnumProps(obj, keys);
return keys;
};
// Retrieve the values of an object's properties.
_.values = function(obj) {
var keys = _.keys(obj);
var length = keys.length;
var values = Array(length);
for (var i = 0; i < length; i++) {
values[i] = obj[keys[i]];
}
return values;
};
// Returns the results of applying the iteratee to each element of the object
// In contrast to _.map it returns an object
_.mapObject = function(obj, iteratee, context) {
iteratee = cb(iteratee, context);
var keys = _.keys(obj),
length = keys.length,
results = {},
currentKey;
for (var index = 0; index < length; index++) {
currentKey = keys[index];
results[currentKey] = iteratee(obj[currentKey], currentKey, obj);
}
return results;
};
// Convert an object into a list of `[key, value]` pairs.
_.pairs = function(obj) {
var keys = _.keys(obj);
var length = keys.length;
var pairs = Array(length);
for (var i = 0; i < length; i++) {
pairs[i] = [keys[i], obj[keys[i]]];
}
return pairs;
};
// Invert the keys and values of an object. The values must be serializable.
_.invert = function(obj) {
var result = {};
var keys = _.keys(obj);
for (var i = 0, length = keys.length; i < length; i++) {
result[obj[keys[i]]] = keys[i];
}
return result;
};
// Return a sorted list of the function names available on the object.
// Aliased as `methods`
_.functions = _.methods = function(obj) {
var names = [];
for (var key in obj) {
if (_.isFunction(obj[key])) names.push(key);
}
return names.sort();
};
// Extend a given object with all the properties in passed-in object(s).
_.extend = createAssigner(_.allKeys);
// Assigns a given object with all the own properties in the passed-in object(s)
// (https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object/assign)
_.extendOwn = _.assign = createAssigner(_.keys);
// Returns the first key on an object that passes a predicate test
_.findKey = function(obj, predicate, context) {
predicate = cb(predicate, context);
var keys = _.keys(obj), key;
for (var i = 0, length = keys.length; i < length; i++) {
key = keys[i];
if (predicate(obj[key], key, obj)) return key;
}
};
// Return a copy of the object only containing the whitelisted properties.
_.pick = function(object, oiteratee, context) {
var result = {}, obj = object, iteratee, keys;
if (obj == null) return result;
if (_.isFunction(oiteratee)) {
keys = _.allKeys(obj);
iteratee = optimizeCb(oiteratee, context);
} else {
keys = flatten(arguments, false, false, 1);
iteratee = function(value, key, obj) { return key in obj; };
obj = Object(obj);
}
for (var i = 0, length = keys.length; i < length; i++) {
var key = keys[i];
var value = obj[key];
if (iteratee(value, key, obj)) result[key] = value;
}
return result;
};
// Return a copy of the object without the blacklisted properties.
_.omit = function(obj, iteratee, context) {
if (_.isFunction(iteratee)) {
iteratee = _.negate(iteratee);
} else {
var keys = _.map(flatten(arguments, false, false, 1), String);
iteratee = function(value, key) {
return !_.contains(keys, key);
};
}
return _.pick(obj, iteratee, context);
};
// Fill in a given object with default properties.
_.defaults = createAssigner(_.allKeys, true);
// Creates an object that inherits from the given prototype object.
// If additional properties are provided then they will be added to the
// created object.
_.create = function(prototype, props) {
var result = baseCreate(prototype);
if (props) _.extendOwn(result, props);
return result;
};
// Create a (shallow-cloned) duplicate of an object.
_.clone = function(obj) {
if (!_.isObject(obj)) return obj;
return _.isArray(obj) ? obj.slice() : _.extend({}, obj);
};
// Invokes interceptor with the obj, and then returns obj.
// The primary purpose of this method is to "tap into" a method chain, in
// order to perform operations on intermediate results within the chain.
_.tap = function(obj, interceptor) {
interceptor(obj);
return obj;
};
// Returns whether an object has a given set of `key:value` pairs.
_.isMatch = function(object, attrs) {
var keys = _.keys(attrs), length = keys.length;
if (object == null) return !length;
var obj = Object(object);
for (var i = 0; i < length; i++) {
var key = keys[i];
if (attrs[key] !== obj[key] || !(key in obj)) return false;
}
return true;
};
// Internal recursive comparison function for `isEqual`.
var eq = function(a, b, aStack, bStack) {
// Identical objects are equal. `0 === -0`, but they aren't identical.
// See the [Harmony `egal` proposal](http://wiki.ecmascript.org/doku.php?id=harmony:egal).
if (a === b) return a !== 0 || 1 / a === 1 / b;
// A strict comparison is necessary because `null == undefined`.
if (a == null || b == null) return a === b;
// Unwrap any wrapped objects.
if (a instanceof _) a = a._wrapped;
if (b instanceof _) b = b._wrapped;
// Compare `[[Class]]` names.
var className = toString.call(a);
if (className !== toString.call(b)) return false;
switch (className) {
// Strings, numbers, regular expressions, dates, and booleans are compared by value.
case '[object RegExp]':
// RegExps are coerced to strings for comparison (Note: '' + /a/i === '/a/i')
case '[object String]':
// Primitives and their corresponding object wrappers are equivalent; thus, `"5"` is
// equivalent to `new String("5")`.
return '' + a === '' + b;
case '[object Number]':
// `NaN`s are equivalent, but non-reflexive.
// Object(NaN) is equivalent to NaN
if (+a !== +a) return +b !== +b;
// An `egal` comparison is performed for other numeric values.
return +a === 0 ? 1 / +a === 1 / b : +a === +b;
case '[object Date]':
case '[object Boolean]':
// Coerce dates and booleans to numeric primitive values. Dates are compared by their
// millisecond representations. Note that invalid dates with millisecond representations
// of `NaN` are not equivalent.
return +a === +b;
}
var areArrays = className === '[object Array]';
if (!areArrays) {
if (typeof a != 'object' || typeof b != 'object') return false;
// Objects with different constructors are not equivalent, but `Object`s or `Array`s
// from different frames are.
var aCtor = a.constructor, bCtor = b.constructor;
if (aCtor !== bCtor && !(_.isFunction(aCtor) && aCtor instanceof aCtor &&
_.isFunction(bCtor) && bCtor instanceof bCtor)
&& ('constructor' in a && 'constructor' in b)) {
return false;
}
}
// Assume equality for cyclic structures. The algorithm for detecting cyclic
// structures is adapted from ES 5.1 section 15.12.3, abstract operation `JO`.
// Initializing stack of traversed objects.
// It's done here since we only need them for objects and arrays comparison.
aStack = aStack || [];
bStack = bStack || [];
var length = aStack.length;
while (length--) {
// Linear search. Performance is inversely proportional to the number of
// unique nested structures.
if (aStack[length] === a) return bStack[length] === b;
}
// Add the first object to the stack of traversed objects.
aStack.push(a);
bStack.push(b);
// Recursively compare objects and arrays.
if (areArrays) {
// Compare array lengths to determine if a deep comparison is necessary.
length = a.length;
if (length !== b.length) return false;
// Deep compare the contents, ignoring non-numeric properties.
while (length--) {
if (!eq(a[length], b[length], aStack, bStack)) return false;
}
} else {
// Deep compare objects.
var keys = _.keys(a), key;
length = keys.length;
// Ensure that both objects contain the same number of properties before comparing deep equality.
if (_.keys(b).length !== length) return false;
while (length--) {
// Deep compare each member
key = keys[length];
if (!(_.has(b, key) && eq(a[key], b[key], aStack, bStack))) return false;
}
}
// Remove the first object from the stack of traversed objects.
aStack.pop();
bStack.pop();
return true;
};
// Perform a deep comparison to check if two objects are equal.
_.isEqual = function(a, b) {
return eq(a, b);
};
// Is a given array, string, or object empty?
// An "empty" object has no enumerable own-properties.
_.isEmpty = function(obj) {
if (obj == null) return true;
if (isArrayLike(obj) && (_.isArray(obj) || _.isString(obj) || _.isArguments(obj))) return obj.length === 0;
return _.keys(obj).length === 0;
};
// Is a given value a DOM element?
_.isElement = function(obj) {
return !!(obj && obj.nodeType === 1);
};
// Is a given value an array?
// Delegates to ECMA5's native Array.isArray
_.isArray = nativeIsArray || function(obj) {
return toString.call(obj) === '[object Array]';
};
// Is a given variable an object?
_.isObject = function(obj) {
var type = typeof obj;
return type === 'function' || type === 'object' && !!obj;
};
// Add some isType methods: isArguments, isFunction, isString, isNumber, isDate, isRegExp, isError.
_.each(['Arguments', 'Function', 'String', 'Number', 'Date', 'RegExp', 'Error'], function(name) {
_['is' + name] = function(obj) {
return toString.call(obj) === '[object ' + name + ']';
};
});
// Define a fallback version of the method in browsers (ahem, IE < 9), where
// there isn't any inspectable "Arguments" type.
if (!_.isArguments(arguments)) {
_.isArguments = function(obj) {
return _.has(obj, 'callee');
};
}
// Optimize `isFunction` if appropriate. Work around some typeof bugs in old v8,
// IE 11 (#1621), and in Safari 8 (#1929).
if (typeof /./ != 'function' && typeof Int8Array != 'object') {
_.isFunction = function(obj) {
return typeof obj == 'function' || false;
};
}
// Is a given object a finite number?
_.isFinite = function(obj) {
return isFinite(obj) && !isNaN(parseFloat(obj));
};
// Is the given value `NaN`? (NaN is the only number which does not equal itself).
_.isNaN = function(obj) {
return _.isNumber(obj) && obj !== +obj;
};
// Is a given value a boolean?
_.isBoolean = function(obj) {
return obj === true || obj === false || toString.call(obj) === '[object Boolean]';
};
// Is a given value equal to null?
_.isNull = function(obj) {
return obj === null;
};
// Is a given variable undefined?
_.isUndefined = function(obj) {
return obj === void 0;
};
// Shortcut function for checking if an object has a given property directly
// on itself (in other words, not on a prototype).
_.has = function(obj, key) {
return obj != null && hasOwnProperty.call(obj, key);
};
// Utility Functions
// -----------------
// Run Underscore.js in *noConflict* mode, returning the `_` variable to its
// previous owner. Returns a reference to the Underscore object.
_.noConflict = function() {
root._ = previousUnderscore;
return this;
};
// Keep the identity function around for default iteratees.
_.identity = function(value) {
return value;
};
// Predicate-generating functions. Often useful outside of Underscore.
_.constant = function(value) {
return function() {
return value;
};
};
_.noop = function(){};
_.property = property;
// Generates a function for a given object that returns a given property.
_.propertyOf = function(obj) {
return obj == null ? function(){} : function(key) {
return obj[key];
};
};
// Returns a predicate for checking whether an object has a given set of
// `key:value` pairs.
_.matcher = _.matches = function(attrs) {
attrs = _.extendOwn({}, attrs);
return function(obj) {
return _.isMatch(obj, attrs);
};
};
// Run a function **n** times.
_.times = function(n, iteratee, context) {
var accum = Array(Math.max(0, n));
iteratee = optimizeCb(iteratee, context, 1);
for (var i = 0; i < n; i++) accum[i] = iteratee(i);
return accum;
};
// Return a random integer between min and max (inclusive).
_.random = function(min, max) {
if (max == null) {
max = min;
min = 0;
}
return min + Math.floor(Math.random() * (max - min + 1));
};
// A (possibly faster) way to get the current timestamp as an integer.
_.now = Date.now || function() {
return new Date().getTime();
};
// List of HTML entities for escaping.
var escapeMap = {
'&': '&',
'<': '<',
'>': '>',
'"': '"',
"'": ''',
'`': '`'
};
var unescapeMap = _.invert(escapeMap);
// Functions for escaping and unescaping strings to/from HTML interpolation.
var createEscaper = function(map) {
var escaper = function(match) {
return map[match];
};
// Regexes for identifying a key that needs to be escaped
var source = '(?:' + _.keys(map).join('|') + ')';
var testRegexp = RegExp(source);
var replaceRegexp = RegExp(source, 'g');
return function(string) {
string = string == null ? '' : '' + string;
return testRegexp.test(string) ? string.replace(replaceRegexp, escaper) : string;
};
};
_.escape = createEscaper(escapeMap);
_.unescape = createEscaper(unescapeMap);
// If the value of the named `property` is a function then invoke it with the
// `object` as context; otherwise, return it.
_.result = function(object, property, fallback) {
var value = object == null ? void 0 : object[property];
if (value === void 0) {
value = fallback;
}
return _.isFunction(value) ? value.call(object) : value;
};
// Generate a unique integer id (unique within the entire client session).
// Useful for temporary DOM ids.
var idCounter = 0;
_.uniqueId = function(prefix) {
var id = ++idCounter + '';
return prefix ? prefix + id : id;
};
// By default, Underscore uses ERB-style template delimiters, change the
// following template settings to use alternative delimiters.
_.templateSettings = {
evaluate : /<%([\s\S]+?)%>/g,
interpolate : /<%=([\s\S]+?)%>/g,
escape : /<%-([\s\S]+?)%>/g
};
// When customizing `templateSettings`, if you don't want to define an
// interpolation, evaluation or escaping regex, we need one that is
// guaranteed not to match.
var noMatch = /(.)^/;
// Certain characters need to be escaped so that they can be put into a
// string literal.
var escapes = {
"'": "'",
'\\': '\\',
'\r': 'r',
'\n': 'n',
'\u2028': 'u2028',
'\u2029': 'u2029'
};
var escaper = /\\|'|\r|\n|\u2028|\u2029/g;
var escapeChar = function(match) {
return '\\' + escapes[match];
};
// JavaScript micro-templating, similar to John Resig's implementation.
// Underscore templating handles arbitrary delimiters, preserves whitespace,
// and correctly escapes quotes within interpolated code.
// NB: `oldSettings` only exists for backwards compatibility.
_.template = function(text, settings, oldSettings) {
if (!settings && oldSettings) settings = oldSettings;
settings = _.defaults({}, settings, _.templateSettings);
// Combine delimiters into one regular expression via alternation.
var matcher = RegExp([
(settings.escape || noMatch).source,
(settings.interpolate || noMatch).source,
(settings.evaluate || noMatch).source
].join('|') + '|$', 'g');
// Compile the template source, escaping string literals appropriately.
var index = 0;
var source = "__p+='";
text.replace(matcher, function(match, escape, interpolate, evaluate, offset) {
source += text.slice(index, offset).replace(escaper, escapeChar);
index = offset + match.length;
if (escape) {
source += "'+\n((__t=(" + escape + "))==null?'':_.escape(__t))+\n'";
} else if (interpolate) {
source += "'+\n((__t=(" + interpolate + "))==null?'':__t)+\n'";
} else if (evaluate) {
source += "';\n" + evaluate + "\n__p+='";
}
// Adobe VMs need the match returned to produce the correct offest.
return match;
});
source += "';\n";
// If a variable is not specified, place data values in local scope.
if (!settings.variable) source = 'with(obj||{}){\n' + source + '}\n';
source = "var __t,__p='',__j=Array.prototype.join," +
"print=function(){__p+=__j.call(arguments,'');};\n" +
source + 'return __p;\n';
try {
var render = new Function(settings.variable || 'obj', '_', source);
} catch (e) {
e.source = source;
throw e;
}
var template = function(data) {
return render.call(this, data, _);
};
// Provide the compiled source as a convenience for precompilation.
var argument = settings.variable || 'obj';
template.source = 'function(' + argument + '){\n' + source + '}';
return template;
};
// Add a "chain" function. Start chaining a wrapped Underscore object.
_.chain = function(obj) {
var instance = _(obj);
instance._chain = true;
return instance;
};
// OOP
// ---------------
// If Underscore is called as a function, it returns a wrapped object that
// can be used OO-style. This wrapper holds altered versions of all the
// underscore functions. Wrapped objects may be chained.
// Helper function to continue chaining intermediate results.
var result = function(instance, obj) {
return instance._chain ? _(obj).chain() : obj;
};
// Add your own custom functions to the Underscore object.
_.mixin = function(obj) {
_.each(_.functions(obj), function(name) {
var func = _[name] = obj[name];
_.prototype[name] = function() {
var args = [this._wrapped];
push.apply(args, arguments);
return result(this, func.apply(_, args));
};
});
};
// Add all of the Underscore functions to the wrapper object.
_.mixin(_);
// Add all mutator Array functions to the wrapper.
_.each(['pop', 'push', 'reverse', 'shift', 'sort', 'splice', 'unshift'], function(name) {
var method = ArrayProto[name];
_.prototype[name] = function() {
var obj = this._wrapped;
method.apply(obj, arguments);
if ((name === 'shift' || name === 'splice') && obj.length === 0) delete obj[0];
return result(this, obj);
};
});
// Add all accessor Array functions to the wrapper.
_.each(['concat', 'join', 'slice'], function(name) {
var method = ArrayProto[name];
_.prototype[name] = function() {
return result(this, method.apply(this._wrapped, arguments));
};
});
// Extracts the result from a wrapped and chained object.
_.prototype.value = function() {
return this._wrapped;
};
// Provide unwrapping proxy for some methods used in engine operations
// such as arithmetic and JSON stringification.
_.prototype.valueOf = _.prototype.toJSON = _.prototype.value;
_.prototype.toString = function() {
return '' + this._wrapped;
};
// AMD registration happens at the end for compatibility with AMD loaders
// that may not enforce next-turn semantics on modules. Even though general
// practice for AMD registration is to be anonymous, underscore registers
// as a named module because, like jQuery, it is a base library that is
// popular enough to be bundled in a third party lib, but not be part of
// an AMD load request. Those cases could generate an error when an
// anonymous define() is called outside of a loader request.
if (typeof define === 'function' && define.amd) {
define('underscore', [], function() {
return _;
});
}
}.call(this)); | yummy-sphinx-theme | /yummy_sphinx_theme-0.1.1.tar.gz/yummy_sphinx_theme-0.1.1/yummy_sphinx_theme/static/js/underscore.js | underscore.js |
(function(){function n(n){function t(t,r,e,u,i,o){for(;i>=0&&o>i;i+=n){var a=u?u[i]:i;e=r(e,t[a],a,t)}return e}return function(r,e,u,i){e=b(e,i,4);var o=!k(r)&&m.keys(r),a=(o||r).length,c=n>0?0:a-1;return arguments.length<3&&(u=r[o?o[c]:c],c+=n),t(r,e,u,o,c,a)}}function t(n){return function(t,r,e){r=x(r,e);for(var u=O(t),i=n>0?0:u-1;i>=0&&u>i;i+=n)if(r(t[i],i,t))return i;return-1}}function r(n,t,r){return function(e,u,i){var o=0,a=O(e);if("number"==typeof i)n>0?o=i>=0?i:Math.max(i+a,o):a=i>=0?Math.min(i+1,a):i+a+1;else if(r&&i&&a)return i=r(e,u),e[i]===u?i:-1;if(u!==u)return i=t(l.call(e,o,a),m.isNaN),i>=0?i+o:-1;for(i=n>0?o:a-1;i>=0&&a>i;i+=n)if(e[i]===u)return i;return-1}}function e(n,t){var r=I.length,e=n.constructor,u=m.isFunction(e)&&e.prototype||a,i="constructor";for(m.has(n,i)&&!m.contains(t,i)&&t.push(i);r--;)i=I[r],i in n&&n[i]!==u[i]&&!m.contains(t,i)&&t.push(i)}var u=this,i=u._,o=Array.prototype,a=Object.prototype,c=Function.prototype,f=o.push,l=o.slice,s=a.toString,p=a.hasOwnProperty,h=Array.isArray,v=Object.keys,g=c.bind,y=Object.create,d=function(){},m=function(n){return n instanceof m?n:this instanceof m?void(this._wrapped=n):new m(n)};"undefined"!=typeof exports?("undefined"!=typeof module&&module.exports&&(exports=module.exports=m),exports._=m):u._=m,m.VERSION="1.8.3";var b=function(n,t,r){if(t===void 0)return n;switch(null==r?3:r){case 1:return function(r){return n.call(t,r)};case 2:return function(r,e){return n.call(t,r,e)};case 3:return function(r,e,u){return n.call(t,r,e,u)};case 4:return function(r,e,u,i){return n.call(t,r,e,u,i)}}return function(){return n.apply(t,arguments)}},x=function(n,t,r){return null==n?m.identity:m.isFunction(n)?b(n,t,r):m.isObject(n)?m.matcher(n):m.property(n)};m.iteratee=function(n,t){return x(n,t,1/0)};var _=function(n,t){return function(r){var e=arguments.length;if(2>e||null==r)return r;for(var u=1;e>u;u++)for(var i=arguments[u],o=n(i),a=o.length,c=0;a>c;c++){var f=o[c];t&&r[f]!==void 0||(r[f]=i[f])}return r}},j=function(n){if(!m.isObject(n))return{};if(y)return y(n);d.prototype=n;var t=new d;return d.prototype=null,t},w=function(n){return function(t){return null==t?void 0:t[n]}},A=Math.pow(2,53)-1,O=w("length"),k=function(n){var t=O(n);return"number"==typeof t&&t>=0&&A>=t};m.each=m.forEach=function(n,t,r){t=b(t,r);var e,u;if(k(n))for(e=0,u=n.length;u>e;e++)t(n[e],e,n);else{var i=m.keys(n);for(e=0,u=i.length;u>e;e++)t(n[i[e]],i[e],n)}return n},m.map=m.collect=function(n,t,r){t=x(t,r);for(var e=!k(n)&&m.keys(n),u=(e||n).length,i=Array(u),o=0;u>o;o++){var a=e?e[o]:o;i[o]=t(n[a],a,n)}return i},m.reduce=m.foldl=m.inject=n(1),m.reduceRight=m.foldr=n(-1),m.find=m.detect=function(n,t,r){var e;return e=k(n)?m.findIndex(n,t,r):m.findKey(n,t,r),e!==void 0&&e!==-1?n[e]:void 0},m.filter=m.select=function(n,t,r){var e=[];return t=x(t,r),m.each(n,function(n,r,u){t(n,r,u)&&e.push(n)}),e},m.reject=function(n,t,r){return m.filter(n,m.negate(x(t)),r)},m.every=m.all=function(n,t,r){t=x(t,r);for(var e=!k(n)&&m.keys(n),u=(e||n).length,i=0;u>i;i++){var o=e?e[i]:i;if(!t(n[o],o,n))return!1}return!0},m.some=m.any=function(n,t,r){t=x(t,r);for(var e=!k(n)&&m.keys(n),u=(e||n).length,i=0;u>i;i++){var o=e?e[i]:i;if(t(n[o],o,n))return!0}return!1},m.contains=m.includes=m.include=function(n,t,r,e){return k(n)||(n=m.values(n)),("number"!=typeof r||e)&&(r=0),m.indexOf(n,t,r)>=0},m.invoke=function(n,t){var r=l.call(arguments,2),e=m.isFunction(t);return m.map(n,function(n){var u=e?t:n[t];return null==u?u:u.apply(n,r)})},m.pluck=function(n,t){return m.map(n,m.property(t))},m.where=function(n,t){return m.filter(n,m.matcher(t))},m.findWhere=function(n,t){return m.find(n,m.matcher(t))},m.max=function(n,t,r){var e,u,i=-1/0,o=-1/0;if(null==t&&null!=n){n=k(n)?n:m.values(n);for(var a=0,c=n.length;c>a;a++)e=n[a],e>i&&(i=e)}else t=x(t,r),m.each(n,function(n,r,e){u=t(n,r,e),(u>o||u===-1/0&&i===-1/0)&&(i=n,o=u)});return i},m.min=function(n,t,r){var e,u,i=1/0,o=1/0;if(null==t&&null!=n){n=k(n)?n:m.values(n);for(var a=0,c=n.length;c>a;a++)e=n[a],i>e&&(i=e)}else t=x(t,r),m.each(n,function(n,r,e){u=t(n,r,e),(o>u||1/0===u&&1/0===i)&&(i=n,o=u)});return i},m.shuffle=function(n){for(var t,r=k(n)?n:m.values(n),e=r.length,u=Array(e),i=0;e>i;i++)t=m.random(0,i),t!==i&&(u[i]=u[t]),u[t]=r[i];return u},m.sample=function(n,t,r){return null==t||r?(k(n)||(n=m.values(n)),n[m.random(n.length-1)]):m.shuffle(n).slice(0,Math.max(0,t))},m.sortBy=function(n,t,r){return t=x(t,r),m.pluck(m.map(n,function(n,r,e){return{value:n,index:r,criteria:t(n,r,e)}}).sort(function(n,t){var r=n.criteria,e=t.criteria;if(r!==e){if(r>e||r===void 0)return 1;if(e>r||e===void 0)return-1}return n.index-t.index}),"value")};var F=function(n){return function(t,r,e){var u={};return r=x(r,e),m.each(t,function(e,i){var o=r(e,i,t);n(u,e,o)}),u}};m.groupBy=F(function(n,t,r){m.has(n,r)?n[r].push(t):n[r]=[t]}),m.indexBy=F(function(n,t,r){n[r]=t}),m.countBy=F(function(n,t,r){m.has(n,r)?n[r]++:n[r]=1}),m.toArray=function(n){return n?m.isArray(n)?l.call(n):k(n)?m.map(n,m.identity):m.values(n):[]},m.size=function(n){return null==n?0:k(n)?n.length:m.keys(n).length},m.partition=function(n,t,r){t=x(t,r);var e=[],u=[];return m.each(n,function(n,r,i){(t(n,r,i)?e:u).push(n)}),[e,u]},m.first=m.head=m.take=function(n,t,r){return null==n?void 0:null==t||r?n[0]:m.initial(n,n.length-t)},m.initial=function(n,t,r){return l.call(n,0,Math.max(0,n.length-(null==t||r?1:t)))},m.last=function(n,t,r){return null==n?void 0:null==t||r?n[n.length-1]:m.rest(n,Math.max(0,n.length-t))},m.rest=m.tail=m.drop=function(n,t,r){return l.call(n,null==t||r?1:t)},m.compact=function(n){return m.filter(n,m.identity)};var S=function(n,t,r,e){for(var u=[],i=0,o=e||0,a=O(n);a>o;o++){var c=n[o];if(k(c)&&(m.isArray(c)||m.isArguments(c))){t||(c=S(c,t,r));var f=0,l=c.length;for(u.length+=l;l>f;)u[i++]=c[f++]}else r||(u[i++]=c)}return u};m.flatten=function(n,t){return S(n,t,!1)},m.without=function(n){return m.difference(n,l.call(arguments,1))},m.uniq=m.unique=function(n,t,r,e){m.isBoolean(t)||(e=r,r=t,t=!1),null!=r&&(r=x(r,e));for(var u=[],i=[],o=0,a=O(n);a>o;o++){var c=n[o],f=r?r(c,o,n):c;t?(o&&i===f||u.push(c),i=f):r?m.contains(i,f)||(i.push(f),u.push(c)):m.contains(u,c)||u.push(c)}return u},m.union=function(){return m.uniq(S(arguments,!0,!0))},m.intersection=function(n){for(var t=[],r=arguments.length,e=0,u=O(n);u>e;e++){var i=n[e];if(!m.contains(t,i)){for(var o=1;r>o&&m.contains(arguments[o],i);o++);o===r&&t.push(i)}}return t},m.difference=function(n){var t=S(arguments,!0,!0,1);return m.filter(n,function(n){return!m.contains(t,n)})},m.zip=function(){return m.unzip(arguments)},m.unzip=function(n){for(var t=n&&m.max(n,O).length||0,r=Array(t),e=0;t>e;e++)r[e]=m.pluck(n,e);return r},m.object=function(n,t){for(var r={},e=0,u=O(n);u>e;e++)t?r[n[e]]=t[e]:r[n[e][0]]=n[e][1];return r},m.findIndex=t(1),m.findLastIndex=t(-1),m.sortedIndex=function(n,t,r,e){r=x(r,e,1);for(var u=r(t),i=0,o=O(n);o>i;){var a=Math.floor((i+o)/2);r(n[a])<u?i=a+1:o=a}return i},m.indexOf=r(1,m.findIndex,m.sortedIndex),m.lastIndexOf=r(-1,m.findLastIndex),m.range=function(n,t,r){null==t&&(t=n||0,n=0),r=r||1;for(var e=Math.max(Math.ceil((t-n)/r),0),u=Array(e),i=0;e>i;i++,n+=r)u[i]=n;return u};var E=function(n,t,r,e,u){if(!(e instanceof t))return n.apply(r,u);var i=j(n.prototype),o=n.apply(i,u);return m.isObject(o)?o:i};m.bind=function(n,t){if(g&&n.bind===g)return g.apply(n,l.call(arguments,1));if(!m.isFunction(n))throw new TypeError("Bind must be called on a function");var r=l.call(arguments,2),e=function(){return E(n,e,t,this,r.concat(l.call(arguments)))};return e},m.partial=function(n){var t=l.call(arguments,1),r=function(){for(var e=0,u=t.length,i=Array(u),o=0;u>o;o++)i[o]=t[o]===m?arguments[e++]:t[o];for(;e<arguments.length;)i.push(arguments[e++]);return E(n,r,this,this,i)};return r},m.bindAll=function(n){var t,r,e=arguments.length;if(1>=e)throw new Error("bindAll must be passed function names");for(t=1;e>t;t++)r=arguments[t],n[r]=m.bind(n[r],n);return n},m.memoize=function(n,t){var r=function(e){var u=r.cache,i=""+(t?t.apply(this,arguments):e);return m.has(u,i)||(u[i]=n.apply(this,arguments)),u[i]};return r.cache={},r},m.delay=function(n,t){var r=l.call(arguments,2);return setTimeout(function(){return n.apply(null,r)},t)},m.defer=m.partial(m.delay,m,1),m.throttle=function(n,t,r){var e,u,i,o=null,a=0;r||(r={});var c=function(){a=r.leading===!1?0:m.now(),o=null,i=n.apply(e,u),o||(e=u=null)};return function(){var f=m.now();a||r.leading!==!1||(a=f);var l=t-(f-a);return e=this,u=arguments,0>=l||l>t?(o&&(clearTimeout(o),o=null),a=f,i=n.apply(e,u),o||(e=u=null)):o||r.trailing===!1||(o=setTimeout(c,l)),i}},m.debounce=function(n,t,r){var e,u,i,o,a,c=function(){var f=m.now()-o;t>f&&f>=0?e=setTimeout(c,t-f):(e=null,r||(a=n.apply(i,u),e||(i=u=null)))};return function(){i=this,u=arguments,o=m.now();var f=r&&!e;return e||(e=setTimeout(c,t)),f&&(a=n.apply(i,u),i=u=null),a}},m.wrap=function(n,t){return m.partial(t,n)},m.negate=function(n){return function(){return!n.apply(this,arguments)}},m.compose=function(){var n=arguments,t=n.length-1;return function(){for(var r=t,e=n[t].apply(this,arguments);r--;)e=n[r].call(this,e);return e}},m.after=function(n,t){return function(){return--n<1?t.apply(this,arguments):void 0}},m.before=function(n,t){var r;return function(){return--n>0&&(r=t.apply(this,arguments)),1>=n&&(t=null),r}},m.once=m.partial(m.before,2);var M=!{toString:null}.propertyIsEnumerable("toString"),I=["valueOf","isPrototypeOf","toString","propertyIsEnumerable","hasOwnProperty","toLocaleString"];m.keys=function(n){if(!m.isObject(n))return[];if(v)return v(n);var t=[];for(var r in n)m.has(n,r)&&t.push(r);return M&&e(n,t),t},m.allKeys=function(n){if(!m.isObject(n))return[];var t=[];for(var r in n)t.push(r);return M&&e(n,t),t},m.values=function(n){for(var t=m.keys(n),r=t.length,e=Array(r),u=0;r>u;u++)e[u]=n[t[u]];return e},m.mapObject=function(n,t,r){t=x(t,r);for(var e,u=m.keys(n),i=u.length,o={},a=0;i>a;a++)e=u[a],o[e]=t(n[e],e,n);return o},m.pairs=function(n){for(var t=m.keys(n),r=t.length,e=Array(r),u=0;r>u;u++)e[u]=[t[u],n[t[u]]];return e},m.invert=function(n){for(var t={},r=m.keys(n),e=0,u=r.length;u>e;e++)t[n[r[e]]]=r[e];return t},m.functions=m.methods=function(n){var t=[];for(var r in n)m.isFunction(n[r])&&t.push(r);return t.sort()},m.extend=_(m.allKeys),m.extendOwn=m.assign=_(m.keys),m.findKey=function(n,t,r){t=x(t,r);for(var e,u=m.keys(n),i=0,o=u.length;o>i;i++)if(e=u[i],t(n[e],e,n))return e},m.pick=function(n,t,r){var e,u,i={},o=n;if(null==o)return i;m.isFunction(t)?(u=m.allKeys(o),e=b(t,r)):(u=S(arguments,!1,!1,1),e=function(n,t,r){return t in r},o=Object(o));for(var a=0,c=u.length;c>a;a++){var f=u[a],l=o[f];e(l,f,o)&&(i[f]=l)}return i},m.omit=function(n,t,r){if(m.isFunction(t))t=m.negate(t);else{var e=m.map(S(arguments,!1,!1,1),String);t=function(n,t){return!m.contains(e,t)}}return m.pick(n,t,r)},m.defaults=_(m.allKeys,!0),m.create=function(n,t){var r=j(n);return t&&m.extendOwn(r,t),r},m.clone=function(n){return m.isObject(n)?m.isArray(n)?n.slice():m.extend({},n):n},m.tap=function(n,t){return t(n),n},m.isMatch=function(n,t){var r=m.keys(t),e=r.length;if(null==n)return!e;for(var u=Object(n),i=0;e>i;i++){var o=r[i];if(t[o]!==u[o]||!(o in u))return!1}return!0};var N=function(n,t,r,e){if(n===t)return 0!==n||1/n===1/t;if(null==n||null==t)return n===t;n instanceof m&&(n=n._wrapped),t instanceof m&&(t=t._wrapped);var u=s.call(n);if(u!==s.call(t))return!1;switch(u){case"[object RegExp]":case"[object String]":return""+n==""+t;case"[object Number]":return+n!==+n?+t!==+t:0===+n?1/+n===1/t:+n===+t;case"[object Date]":case"[object Boolean]":return+n===+t}var i="[object Array]"===u;if(!i){if("object"!=typeof n||"object"!=typeof t)return!1;var o=n.constructor,a=t.constructor;if(o!==a&&!(m.isFunction(o)&&o instanceof o&&m.isFunction(a)&&a instanceof a)&&"constructor"in n&&"constructor"in t)return!1}r=r||[],e=e||[];for(var c=r.length;c--;)if(r[c]===n)return e[c]===t;if(r.push(n),e.push(t),i){if(c=n.length,c!==t.length)return!1;for(;c--;)if(!N(n[c],t[c],r,e))return!1}else{var f,l=m.keys(n);if(c=l.length,m.keys(t).length!==c)return!1;for(;c--;)if(f=l[c],!m.has(t,f)||!N(n[f],t[f],r,e))return!1}return r.pop(),e.pop(),!0};m.isEqual=function(n,t){return N(n,t)},m.isEmpty=function(n){return null==n?!0:k(n)&&(m.isArray(n)||m.isString(n)||m.isArguments(n))?0===n.length:0===m.keys(n).length},m.isElement=function(n){return!(!n||1!==n.nodeType)},m.isArray=h||function(n){return"[object Array]"===s.call(n)},m.isObject=function(n){var t=typeof n;return"function"===t||"object"===t&&!!n},m.each(["Arguments","Function","String","Number","Date","RegExp","Error"],function(n){m["is"+n]=function(t){return s.call(t)==="[object "+n+"]"}}),m.isArguments(arguments)||(m.isArguments=function(n){return m.has(n,"callee")}),"function"!=typeof/./&&"object"!=typeof Int8Array&&(m.isFunction=function(n){return"function"==typeof n||!1}),m.isFinite=function(n){return isFinite(n)&&!isNaN(parseFloat(n))},m.isNaN=function(n){return m.isNumber(n)&&n!==+n},m.isBoolean=function(n){return n===!0||n===!1||"[object Boolean]"===s.call(n)},m.isNull=function(n){return null===n},m.isUndefined=function(n){return n===void 0},m.has=function(n,t){return null!=n&&p.call(n,t)},m.noConflict=function(){return u._=i,this},m.identity=function(n){return n},m.constant=function(n){return function(){return n}},m.noop=function(){},m.property=w,m.propertyOf=function(n){return null==n?function(){}:function(t){return n[t]}},m.matcher=m.matches=function(n){return n=m.extendOwn({},n),function(t){return m.isMatch(t,n)}},m.times=function(n,t,r){var e=Array(Math.max(0,n));t=b(t,r,1);for(var u=0;n>u;u++)e[u]=t(u);return e},m.random=function(n,t){return null==t&&(t=n,n=0),n+Math.floor(Math.random()*(t-n+1))},m.now=Date.now||function(){return(new Date).getTime()};var B={"&":"&","<":"<",">":">",'"':""","'":"'","`":"`"},T=m.invert(B),R=function(n){var t=function(t){return n[t]},r="(?:"+m.keys(n).join("|")+")",e=RegExp(r),u=RegExp(r,"g");return function(n){return n=null==n?"":""+n,e.test(n)?n.replace(u,t):n}};m.escape=R(B),m.unescape=R(T),m.result=function(n,t,r){var e=null==n?void 0:n[t];return e===void 0&&(e=r),m.isFunction(e)?e.call(n):e};var q=0;m.uniqueId=function(n){var t=++q+"";return n?n+t:t},m.templateSettings={evaluate:/<%([\s\S]+?)%>/g,interpolate:/<%=([\s\S]+?)%>/g,escape:/<%-([\s\S]+?)%>/g};var K=/(.)^/,z={"'":"'","\\":"\\","\r":"r","\n":"n","\u2028":"u2028","\u2029":"u2029"},D=/\\|'|\r|\n|\u2028|\u2029/g,L=function(n){return"\\"+z[n]};m.template=function(n,t,r){!t&&r&&(t=r),t=m.defaults({},t,m.templateSettings);var e=RegExp([(t.escape||K).source,(t.interpolate||K).source,(t.evaluate||K).source].join("|")+"|$","g"),u=0,i="__p+='";n.replace(e,function(t,r,e,o,a){return i+=n.slice(u,a).replace(D,L),u=a+t.length,r?i+="'+\n((__t=("+r+"))==null?'':_.escape(__t))+\n'":e?i+="'+\n((__t=("+e+"))==null?'':__t)+\n'":o&&(i+="';\n"+o+"\n__p+='"),t}),i+="';\n",t.variable||(i="with(obj||{}){\n"+i+"}\n"),i="var __t,__p='',__j=Array.prototype.join,"+"print=function(){__p+=__j.call(arguments,'');};\n"+i+"return __p;\n";try{var o=new Function(t.variable||"obj","_",i)}catch(a){throw a.source=i,a}var c=function(n){return o.call(this,n,m)},f=t.variable||"obj";return c.source="function("+f+"){\n"+i+"}",c},m.chain=function(n){var t=m(n);return t._chain=!0,t};var P=function(n,t){return n._chain?m(t).chain():t};m.mixin=function(n){m.each(m.functions(n),function(t){var r=m[t]=n[t];m.prototype[t]=function(){var n=[this._wrapped];return f.apply(n,arguments),P(this,r.apply(m,n))}})},m.mixin(m),m.each(["pop","push","reverse","shift","sort","splice","unshift"],function(n){var t=o[n];m.prototype[n]=function(){var r=this._wrapped;return t.apply(r,arguments),"shift"!==n&&"splice"!==n||0!==r.length||delete r[0],P(this,r)}}),m.each(["concat","join","slice"],function(n){var t=o[n];m.prototype[n]=function(){return P(this,t.apply(this._wrapped,arguments))}}),m.prototype.value=function(){return this._wrapped},m.prototype.valueOf=m.prototype.toJSON=m.prototype.value,m.prototype.toString=function(){return""+this._wrapped},"function"==typeof define&&define.amd&&define("underscore",[],function(){return m})}).call(this);
//# sourceMappingURL=underscore-min.map | yummy-sphinx-theme | /yummy_sphinx_theme-0.1.1.tar.gz/yummy_sphinx_theme-0.1.1/yummy_sphinx_theme/static/js/underscore-min.js | underscore-min.js |
# [Yummy](https://github.com/yummyml/yummy) - delicious MLOps tools.
Yummy project contains several tools and components which can help build and speedup the MLOps process.
Most of the code is written in rust 🦀 but components expose easy to use python api.
| Package | Description | Documentation | Youtube
| -------------- | ------------------------------------------------------------------------------------- | ----------------------------------- | --------------
| yummy[feast] | Yummy [feast](https://docs.feast.dev/) feature store extensions | [📄README](yummy) | 🎞[YT](https://www.youtube.com/watch?v=YinQxF4Gx54)
| yummy[features] | Yummy features server compatble with [Feast](https://docs.feast.dev/) feature store | [📄README](yummy-rs/yummy-features-py) | 🎞[YT](https://www.youtube.com/watch?v=lXCJLc3hWgY)
| yummy[mlflow] | Yummy [mlflow](https://www.mlflow.org/) models server | [📄README](yummy-rs/yummy-mlflow-py) | 🎞[YT](https://www.youtube.com/watch?v=rjNZ1RwBlCA)
| yummy[delta] | Yummy [delta lake](https://delta.io/) api | [📄README](yummy-rs/yummy-delta-py) | 🎞Soon
| yummy | /yummy-0.0.9.tar.gz/yummy-0.0.9/README.md | README.md |
import re
import string
import sys
from itertools import chain, product
# flake8: noqa
__version__ = '0.1.2'
__all__ = ['braceexpand', 'alphabet', 'UnbalancedBracesError']
class UnbalancedBracesError(ValueError): pass
PY3 = sys.version_info[0] >= 3
if PY3:
xrange = range
alphabet = string.ascii_uppercase + string.ascii_lowercase
int_range_re = re.compile(r'^(\d+)\.\.(\d+)(?:\.\.-?(\d+))?$')
char_range_re = re.compile(r'^([A-Za-z])\.\.([A-Za-z])(?:\.\.-?(\d+))?$')
def braceexpand(pattern, escape=True):
"""braceexpand(pattern) -> iterator over generated strings
Returns an iterator over the strings resulting from brace expansion
of pattern. This function implements Brace Expansion as described in
bash(1), with the following limitations:
* A pattern containing unbalanced braces will raise an
UnbalancedBracesError exception. In bash, unbalanced braces will either
be partly expanded or ignored.
* A mixed-case character range like '{Z..a}' or '{a..Z}' will not
include the characters '[]^_`' between 'Z' and 'a'.
When escape is True (the default), characters in pattern can be
prefixed with a backslash to cause them not to be interpreted as
special characters for brace expansion (such as '{', '}', ',').
To pass through a a literal backslash, double it ('\\\\').
When escape is False, backslashes in pattern have no special
meaning and will be preserved in the output.
Examples:
>>> from braceexpand import braceexpand
# Integer range
>>> list(braceexpand('item{1..3}'))
['item1', 'item2', 'item3']
# Character range
>>> list(braceexpand('{a..c}'))
['a', 'b', 'c']
# Sequence
>>> list(braceexpand('index.html{,.backup}'))
['index.html', 'index.html.backup']
# Nested patterns
>>> list(braceexpand('python{2.{5..7},3.{2,3}}'))
['python2.5', 'python2.6', 'python2.7', 'python3.2', 'python3.3']
# Prefixing an integer with zero causes all numbers to be padded to
# the same width.
>>> list(braceexpand('{07..10}'))
['07', '08', '09', '10']
# An optional increment can be specified for ranges.
>>> list(braceexpand('{a..g..2}'))
['a', 'c', 'e', 'g']
# Ranges can go in both directions.
>>> list(braceexpand('{4..1}'))
['4', '3', '2', '1']
# Unbalanced braces raise an exception.
>>> list(braceexpand('{1{2,3}'))
Traceback (most recent call last):
...
UnbalancedBracesError: Unbalanced braces: '{1{2,3}'
# By default, the backslash is the escape character.
>>> list(braceexpand(r'{1\{2,3}'))
['1{2', '3']
# Setting 'escape' to False disables backslash escaping.
>>> list(braceexpand(r'\{1,2}', escape=False))
['\\\\1', '\\\\2']
"""
return (_flatten(t, escape) for t in parse_pattern(pattern, escape))
def parse_pattern(pattern, escape):
# pattern -> product(*parts)
start = 0
pos = 0
bracketdepth = 0
items = []
#print 'pattern:', pattern
while pos < len(pattern):
if escape and pattern[pos] == '\\':
pos += 2
continue
elif pattern[pos] == '{':
if bracketdepth == 0 and pos > start:
#print 'literal:', pattern[start:pos]
items.append([pattern[start:pos]])
start = pos
bracketdepth += 1
elif pattern[pos] == '}':
bracketdepth -= 1
if bracketdepth == 0:
#print 'expression:', pattern[start+1:pos]
items.append(parse_expression(pattern[start+1:pos], escape))
start = pos + 1 # skip the closing brace
pos += 1
if bracketdepth != 0: # unbalanced braces
raise UnbalancedBracesError("Unbalanced braces: '%s'" % pattern)
if start < pos:
#print 'literal:', pattern[start:]
items.append([pattern[start:]])
return product(*items)
def parse_expression(expr, escape):
int_range_match = int_range_re.match(expr)
if int_range_match:
return make_int_range(*int_range_match.groups())
char_range_match = char_range_re.match(expr)
if char_range_match:
return make_char_range(*char_range_match.groups())
return parse_sequence(expr, escape)
def parse_sequence(seq, escape):
# sequence -> chain(*sequence_items)
start = 0
pos = 0
bracketdepth = 0
items = []
#print 'sequence:', seq
while pos < len(seq):
if escape and seq[pos] == '\\':
pos += 2
continue
elif seq[pos] == '{':
bracketdepth += 1
elif seq[pos] == '}':
bracketdepth -= 1
elif seq[pos] == ',' and bracketdepth == 0:
items.append(parse_pattern(seq[start:pos], escape))
start = pos + 1 # skip the comma
pos += 1
if bracketdepth != 0 or not items: # unbalanced braces or not a sequence
return iter(['{' + seq + '}'])
# part after the last comma (may be the empty string)
items.append(parse_pattern(seq[start:], escape))
return chain(*items)
def make_int_range(start, end, step=None):
if any([s[0] == '0' for s in (start, end) if s != '0']):
padding = max(len(start), len(end))
else:
padding = 0
step = int(step) if step else 1
start = int(start)
end = int(end)
r = xrange(start, end+1, step) if start < end else \
xrange(start, end-1, -step)
return (str(i).rjust(padding, '0') for i in r)
def make_char_range(start, end, step=None):
step = int(step) if step else 1
start = alphabet.index(start)
end = alphabet.index(end)
return alphabet[start:end+1:step] if start < end else \
alphabet[start:end-1:-step]
escape_re = re.compile(r'\\(.)')
def _flatten(t, escape):
l = []
for item in t:
if isinstance(item, tuple): l.extend(_flatten(item, escape))
else: l.append(item)
s = ''.join(l)
# Strip escape characters from generated strings after expansion.
return escape_re.sub(r'\1', s) if escape else s
if __name__ == '__main__':
import doctest
doctest.testmod(optionflags=doctest.IGNORE_EXCEPTION_DETAIL) | yumrepos | /yumrepos-0.9.10.tar.gz/yumrepos-0.9.10/braceexpand.py | braceexpand.py |
yumwat change log
=================
1.0
---
- Install script now adds the plugin path to `/etc/yum.conf`, meaning
that it is complete.
0.4.3
-----
Released 13-09-27.
- Patch prevents the same package from being detailed twice.
0.4.2
-----
Released 13-09-25. First PyPI release.
- added uninstall script
0.4.1
-----
Reached 13-09-25.
- PEP 8
- Ran pythong on the project, despite the commit log reading that I ran
yumwat on itself (sigh)
0.4.0
-----
Reached on 13-09-25.
- Enable and disable yumwat and its timid mode via a single command
0.3.0
-----
Reached 13-09-21.
- Timid mode and ``--wat`` option
0.2.0
-----
Reached 13-09-21.
- ``--nowat`` option
0.1.0
-----
Reached 13-09-20.
- Initial functionality reached: yumwat prints a list with package names
and their descriptions
Pre-history
-----------
Yumwat was conceived and started a good while before 0.1.0 was considered
to have been reached.
| yumwat | /yumwat-1.0.1.tar.gz/yumwat-1.0.1/CHANGES.rst | CHANGES.rst |
yumwat
======
Wild card! Oddshocks writes a yum plugin!
Wat
---
When I use yum to update my system or install new packages, I sometimes spot a
package I am unfamiliar with, such as the ``mesa-dri-filesystem`` package I saw
the other day. In some graphical extensions of yum, a brief discription of the
package is listed under each package name. I figured I'd check out some docs on
writing yum plugins, to see if I could display a description under each package
name in the CLI updater. Then the Internet was like, "Yo, each yum plugin is a
single file, written in the Python programming language", and after I cleaned
up my brain which had EXPLODED out of my HEAD I made this repo and now we are
on a science adventure.
License
-------
GNU GPL v3+.
Installing
----------
Installation is easy! From the project directory (where this file is),
do the following.
Shell Script
************
I have written a bit of a shell script that does the above installation
for you. It is ``install.sh``, and will require root privs, like
so: ``sudo ./install.sh``.
Manual Installation
*******************
1. Install the plugin file: ``sudo cp yumwat.py /usr/lib/yum-plugins/``
2. Install the plugin's configuration file: ``sudo cp yumwat.conf
/etc/yum/pluginconf.d/``
3. Add the line ``pluginpath=/usr/lib/yum-plugins`` to the file
``/etc/yum.conf``.
Timid mode
----------
The ``yumwat.conf`` file contains the line ``timid=0``. If timid is
set to 0, timid mode is off, and yumwat will print output unless
the ``--nowat`` option is used with yum. If timid is set to 1,
timid mode is on, and yumwat will not print output unless the
``--wat`` option is used with yum.
See below for how to easily enable or disable timid mode.
Enabling and Disabling
----------------------
**Enable yumwat** -- ``sudo /usr/lib/yum-plugins/yumwat.py enable``
**Disable yumwat** -- ``sudo /usr/lib/yum-plugins/yumwat.py disable``
**Enable timid mode** -- ``sudo /usr/lib/yum-plugins/yumwat.py timid``
**Disable timid mode** -- ``sudo /usr/lib/yum-plugins/yumwat.py assertive``
Uninstalling
------------
Easier than installation! We just need to make sure you remove the
generated bytecode file, too. You can do this by running the uninstall
script packaged with this plugin with root privs::
sudo ./uninstall.sh
Otherwise, follow these steps:
1. Remove the configuration file: ``sudo rm /etc/yum/pluginconf.d/yumwat.conf``
2. Remove the plugin file and its generated bytecode: ``sudo rm
/usr/lib/yum-plugins/yumwat.{py,pyc}``
| yumwat | /yumwat-1.0.1.tar.gz/yumwat-1.0.1/README.rst | README.rst |
import os
import shutil
import sys
import time
import fnmatch
import tempfile
import tarfile
import optparse
from distutils import log
try:
from site import USER_SITE
except ImportError:
USER_SITE = None
try:
import subprocess
def _python_cmd(*args):
args = (sys.executable,) + args
return subprocess.call(args) == 0
except ImportError:
# will be used for python 2.3
def _python_cmd(*args):
args = (sys.executable,) + args
# quoting arguments if windows
if sys.platform == 'win32':
def quote(arg):
if ' ' in arg:
return '"%s"' % arg
return arg
args = [quote(arg) for arg in args]
return os.spawnl(os.P_WAIT, sys.executable, *args) == 0
DEFAULT_VERSION = "0.6.35"
DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/"
SETUPTOOLS_FAKED_VERSION = "0.6c11"
SETUPTOOLS_PKG_INFO = """\
Metadata-Version: 1.0
Name: setuptools
Version: %s
Summary: xxxx
Home-page: xxx
Author: xxx
Author-email: xxx
License: xxx
Description: xxx
""" % SETUPTOOLS_FAKED_VERSION
def _install(tarball, install_args=()):
# extracting the tarball
tmpdir = tempfile.mkdtemp()
log.warn('Extracting in %s', tmpdir)
old_wd = os.getcwd()
try:
os.chdir(tmpdir)
tar = tarfile.open(tarball)
_extractall(tar)
tar.close()
# going in the directory
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
os.chdir(subdir)
log.warn('Now working in %s', subdir)
# installing
log.warn('Installing Distribute')
if not _python_cmd('setup.py', 'install', *install_args):
log.warn('Something went wrong during the installation.')
log.warn('See the error message above.')
# exitcode will be 2
return 2
finally:
os.chdir(old_wd)
shutil.rmtree(tmpdir)
def _build_egg(egg, tarball, to_dir):
# extracting the tarball
tmpdir = tempfile.mkdtemp()
log.warn('Extracting in %s', tmpdir)
old_wd = os.getcwd()
try:
os.chdir(tmpdir)
tar = tarfile.open(tarball)
_extractall(tar)
tar.close()
# going in the directory
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
os.chdir(subdir)
log.warn('Now working in %s', subdir)
# building an egg
log.warn('Building a Distribute egg in %s', to_dir)
_python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
finally:
os.chdir(old_wd)
shutil.rmtree(tmpdir)
# returning the result
log.warn(egg)
if not os.path.exists(egg):
raise IOError('Could not build the egg.')
def _do_download(version, download_base, to_dir, download_delay):
egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg'
% (version, sys.version_info[0], sys.version_info[1]))
if not os.path.exists(egg):
tarball = download_setuptools(version, download_base,
to_dir, download_delay)
_build_egg(egg, tarball, to_dir)
sys.path.insert(0, egg)
import setuptools
setuptools.bootstrap_install_from = egg
def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=os.curdir, download_delay=15, no_fake=True):
# making sure we use the absolute path
to_dir = os.path.abspath(to_dir)
was_imported = 'pkg_resources' in sys.modules or \
'setuptools' in sys.modules
try:
try:
import pkg_resources
if not hasattr(pkg_resources, '_distribute'):
if not no_fake:
_fake_setuptools()
raise ImportError
except ImportError:
return _do_download(version, download_base, to_dir, download_delay)
try:
pkg_resources.require("distribute>=" + version)
return
except pkg_resources.VersionConflict:
e = sys.exc_info()[1]
if was_imported:
sys.stderr.write(
"The required version of distribute (>=%s) is not available,\n"
"and can't be installed while this script is running. Please\n"
"install a more recent version first, using\n"
"'easy_install -U distribute'."
"\n\n(Currently using %r)\n" % (version, e.args[0]))
sys.exit(2)
else:
del pkg_resources, sys.modules['pkg_resources'] # reload ok
return _do_download(version, download_base, to_dir,
download_delay)
except pkg_resources.DistributionNotFound:
return _do_download(version, download_base, to_dir,
download_delay)
finally:
if not no_fake:
_create_fake_setuptools_pkg_info(to_dir)
def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=os.curdir, delay=15):
"""Download distribute from a specified location and return its filename
`version` should be a valid distribute version number that is available
as an egg for download under the `download_base` URL (which should end
with a '/'). `to_dir` is the directory where the egg will be downloaded.
`delay` is the number of seconds to pause before an actual download
attempt.
"""
# making sure we use the absolute path
to_dir = os.path.abspath(to_dir)
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
tgz_name = "distribute-%s.tar.gz" % version
url = download_base + tgz_name
saveto = os.path.join(to_dir, tgz_name)
src = dst = None
if not os.path.exists(saveto): # Avoid repeated downloads
try:
log.warn("Downloading %s", url)
src = urlopen(url)
# Read/write all in one block, so we don't create a corrupt file
# if the download is interrupted.
data = src.read()
dst = open(saveto, "wb")
dst.write(data)
finally:
if src:
src.close()
if dst:
dst.close()
return os.path.realpath(saveto)
def _no_sandbox(function):
def __no_sandbox(*args, **kw):
try:
from setuptools.sandbox import DirectorySandbox
if not hasattr(DirectorySandbox, '_old'):
def violation(*args):
pass
DirectorySandbox._old = DirectorySandbox._violation
DirectorySandbox._violation = violation
patched = True
else:
patched = False
except ImportError:
patched = False
try:
return function(*args, **kw)
finally:
if patched:
DirectorySandbox._violation = DirectorySandbox._old
del DirectorySandbox._old
return __no_sandbox
def _patch_file(path, content):
"""Will backup the file then patch it"""
f = open(path)
existing_content = f.read()
f.close()
if existing_content == content:
# already patched
log.warn('Already patched.')
return False
log.warn('Patching...')
_rename_path(path)
f = open(path, 'w')
try:
f.write(content)
finally:
f.close()
return True
_patch_file = _no_sandbox(_patch_file)
def _same_content(path, content):
f = open(path)
existing_content = f.read()
f.close()
return existing_content == content
def _rename_path(path):
new_name = path + '.OLD.%s' % time.time()
log.warn('Renaming %s to %s', path, new_name)
os.rename(path, new_name)
return new_name
def _remove_flat_installation(placeholder):
if not os.path.isdir(placeholder):
log.warn('Unkown installation at %s', placeholder)
return False
found = False
for file in os.listdir(placeholder):
if fnmatch.fnmatch(file, 'setuptools*.egg-info'):
found = True
break
if not found:
log.warn('Could not locate setuptools*.egg-info')
return
log.warn('Moving elements out of the way...')
pkg_info = os.path.join(placeholder, file)
if os.path.isdir(pkg_info):
patched = _patch_egg_dir(pkg_info)
else:
patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO)
if not patched:
log.warn('%s already patched.', pkg_info)
return False
# now let's move the files out of the way
for element in ('setuptools', 'pkg_resources.py', 'site.py'):
element = os.path.join(placeholder, element)
if os.path.exists(element):
_rename_path(element)
else:
log.warn('Could not find the %s element of the '
'Setuptools distribution', element)
return True
_remove_flat_installation = _no_sandbox(_remove_flat_installation)
def _after_install(dist):
log.warn('After install bootstrap.')
placeholder = dist.get_command_obj('install').install_purelib
_create_fake_setuptools_pkg_info(placeholder)
def _create_fake_setuptools_pkg_info(placeholder):
if not placeholder or not os.path.exists(placeholder):
log.warn('Could not find the install location')
return
pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1])
setuptools_file = 'setuptools-%s-py%s.egg-info' % \
(SETUPTOOLS_FAKED_VERSION, pyver)
pkg_info = os.path.join(placeholder, setuptools_file)
if os.path.exists(pkg_info):
log.warn('%s already exists', pkg_info)
return
log.warn('Creating %s', pkg_info)
try:
f = open(pkg_info, 'w')
except EnvironmentError:
log.warn("Don't have permissions to write %s, skipping", pkg_info)
return
try:
f.write(SETUPTOOLS_PKG_INFO)
finally:
f.close()
pth_file = os.path.join(placeholder, 'setuptools.pth')
log.warn('Creating %s', pth_file)
f = open(pth_file, 'w')
try:
f.write(os.path.join(os.curdir, setuptools_file))
finally:
f.close()
_create_fake_setuptools_pkg_info = _no_sandbox(
_create_fake_setuptools_pkg_info
)
def _patch_egg_dir(path):
# let's check if it's already patched
pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
if os.path.exists(pkg_info):
if _same_content(pkg_info, SETUPTOOLS_PKG_INFO):
log.warn('%s already patched.', pkg_info)
return False
_rename_path(path)
os.mkdir(path)
os.mkdir(os.path.join(path, 'EGG-INFO'))
pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
f = open(pkg_info, 'w')
try:
f.write(SETUPTOOLS_PKG_INFO)
finally:
f.close()
return True
_patch_egg_dir = _no_sandbox(_patch_egg_dir)
def _before_install():
log.warn('Before install bootstrap.')
_fake_setuptools()
def _under_prefix(location):
if 'install' not in sys.argv:
return True
args = sys.argv[sys.argv.index('install') + 1:]
for index, arg in enumerate(args):
for option in ('--root', '--prefix'):
if arg.startswith('%s=' % option):
top_dir = arg.split('root=')[-1]
return location.startswith(top_dir)
elif arg == option:
if len(args) > index:
top_dir = args[index + 1]
return location.startswith(top_dir)
if arg == '--user' and USER_SITE is not None:
return location.startswith(USER_SITE)
return True
def _fake_setuptools():
log.warn('Scanning installed packages')
try:
import pkg_resources
except ImportError:
# we're cool
log.warn('Setuptools or Distribute does not seem to be installed.')
return
ws = pkg_resources.working_set
try:
setuptools_dist = ws.find(
pkg_resources.Requirement.parse('setuptools', replacement=False)
)
except TypeError:
# old distribute API
setuptools_dist = ws.find(
pkg_resources.Requirement.parse('setuptools')
)
if setuptools_dist is None:
log.warn('No setuptools distribution found')
return
# detecting if it was already faked
setuptools_location = setuptools_dist.location
log.warn('Setuptools installation detected at %s', setuptools_location)
# if --root or --preix was provided, and if
# setuptools is not located in them, we don't patch it
if not _under_prefix(setuptools_location):
log.warn('Not patching, --root or --prefix is installing Distribute'
' in another location')
return
# let's see if its an egg
if not setuptools_location.endswith('.egg'):
log.warn('Non-egg installation')
res = _remove_flat_installation(setuptools_location)
if not res:
return
else:
log.warn('Egg installation')
pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO')
if (os.path.exists(pkg_info) and
_same_content(pkg_info, SETUPTOOLS_PKG_INFO)):
log.warn('Already patched.')
return
log.warn('Patching...')
# let's create a fake egg replacing setuptools one
res = _patch_egg_dir(setuptools_location)
if not res:
return
log.warn('Patching complete.')
_relaunch()
def _relaunch():
log.warn('Relaunching...')
# we have to relaunch the process
# pip marker to avoid a relaunch bug
_cmd1 = ['-c', 'install', '--single-version-externally-managed']
_cmd2 = ['-c', 'install', '--record']
if sys.argv[:3] == _cmd1 or sys.argv[:3] == _cmd2:
sys.argv[0] = 'setup.py'
args = [sys.executable] + sys.argv
sys.exit(subprocess.call(args))
def _extractall(self, path=".", members=None):
"""Extract all members from the archive to the current working
directory and set owner, modification time and permissions on
directories afterwards. `path' specifies a different directory
to extract to. `members' is optional and must be a subset of the
list returned by getmembers().
"""
import copy
import operator
from tarfile import ExtractError
directories = []
if members is None:
members = self
for tarinfo in members:
if tarinfo.isdir():
# Extract directories with a safe mode.
directories.append(tarinfo)
tarinfo = copy.copy(tarinfo)
tarinfo.mode = 448 # decimal for oct 0700
self.extract(tarinfo, path)
# Reverse sort directories.
if sys.version_info < (2, 4):
def sorter(dir1, dir2):
return cmp(dir1.name, dir2.name)
directories.sort(sorter)
directories.reverse()
else:
directories.sort(key=operator.attrgetter('name'), reverse=True)
# Set correct owner, mtime and filemode on directories.
for tarinfo in directories:
dirpath = os.path.join(path, tarinfo.name)
try:
self.chown(tarinfo, dirpath)
self.utime(tarinfo, dirpath)
self.chmod(tarinfo, dirpath)
except ExtractError:
e = sys.exc_info()[1]
if self.errorlevel > 1:
raise
else:
self._dbg(1, "tarfile: %s" % e)
def _build_install_args(options):
"""
Build the arguments to 'python setup.py install' on the distribute package
"""
install_args = []
if options.user_install:
if sys.version_info < (2, 6):
log.warn("--user requires Python 2.6 or later")
raise SystemExit(1)
install_args.append('--user')
return install_args
def _parse_args():
"""
Parse the command line for options
"""
parser = optparse.OptionParser()
parser.add_option(
'--user', dest='user_install', action='store_true', default=False,
help='install in user site package (requires Python 2.6 or later)')
parser.add_option(
'--download-base', dest='download_base', metavar="URL",
default=DEFAULT_URL,
help='alternative URL from where to download the distribute package')
options, args = parser.parse_args()
# positional arguments are ignored
return options
def main(version=DEFAULT_VERSION):
"""Install or upgrade setuptools and EasyInstall"""
options = _parse_args()
tarball = download_setuptools(download_base=options.download_base)
return _install(tarball, _build_install_args(options))
if __name__ == '__main__':
sys.exit(main()) | yumwat | /yumwat-1.0.1.tar.gz/yumwat-1.0.1/distribute_setup.py | distribute_setup.py |
# Yuna DB: `dict`-like semantics for LMDB
## Introduction
Yuna is a key/value store that's built upon LMDB (the Symas Lightning
Memory-mapped Database). LMDB really is lightning-fast, but as a C
library only allows you to lookup a byte string value, using either
an integer key or a byte string key.
Yuna provides semantics similar to a `dict`. You can specify a serialization
format, and optionally a compression format to use, and Yuna will serialize
values and write them to the database as byte strings. It will
automatically recover the value from the bytes string.
For example, if `my_value` is any Python value that can be serialized by the chosen
serialization format in table `foo`, this would work:
`db.tables.foo.put(my_key, my_value)`
After putting the value you can get it again:
`my_value = db.tables.foo.get(my_key)`
And of course you can delete it:
`db.tables.foo.delete(my_key)`
If you want to use `dict` syntax, that's supported as well:
```
db.tables.foo[my_key] = my_value
my_value = db.tables.foo[my_key]
del db.tables.foo[my_key]
```
## Example
```
from yuna import Yuna, SERIALIZE_JSON, SERIALIZE_STR
with Yuna("/tmp/test.ydb", create=True) as db:
db.new_table("names", serialize=SERIALIZE_JSON)
key = "feynman"
value = dict(first_name="Richard", last_name="Feynman")
db.tables.names.put(key, value)
temp = db.tables.names.get(key)
assert value == temp
tbl_abbrevs = db.new_table("abbrevs", serialize=SERIALIZE_STR)
key = "l8r"
value = "see you later"
tbl_abbrevs[key] = value
assert value == tbl_abbrevs[key]
```
## Planned new features to come:
* Support integer keys with a `.insert()` method providing autoincrement
* Finish the Zstandard compression support
* Add a REPL (Python with the yuna module already loaded and the DB open)
* Add lots more unit tests
## Advantages of Yuna over writing your own serialization code by hand
* **Much more convenient.** Using LMDB directly and writing your own serialization code
means having to write some very similar boilerplate code, over and over. For example,
for a table where the data is serialized as JSON and compressed with LZ4:
```
# See LMDB documentation for details of transaction and get
with lmdb_env.begin(db="foo") as txn:
temp = txn.get(my_key, default=None)
if temp is None:
result = my_default_value
else:
result = deserialize_json(decompress_lz4(temp))
```
The above is replaced by one line:
```
result = db.tables.foo.get(my_key, my_default_value)
```
* **Looser coupling of code and data.** Yuna reads the database file
to find out what form of serialization is being used, and what form
of compression (if any) is being used. If you change your mind, you
only have to change the code that generates the Yuna file; the code
that uses the database doesn't have to change.
For example, if last week you were serializing in JSON and not compressing,
and this week you are serializing in MesgPack and compressing with LZ4,
the code that uses the database doesn't change at all. And in fact
you can switch between loading this week's file and last week's file
without having to change your code.
Even if you wrote your own wrapper functions to reduce the amount of
boilerplate in your code, you would have to make sure to call the correct
wrapper for each table. For example, if one table is compressed and
another is not, you would need a different wrapper for each, even if
they both used the same serialization format.
* **Standardized metadata allows standardized tools.** Yuna will soon
include a tool that will read the metadata and write a terse summary of
what's in the file. Yuna also offers a standard "name" and "version" feature,
which Yuna will check if you use them. If you accidentally load the wrong
file, it's better to get an immediate failure with a useful error message
instead of getting a runtime error because an expected table wasn't present in
the database file. Yuna raises an exception with a message like this:
`LMDB file 'name' mismatch: expected 'foo', got 'bar'`
When you make a breaking change in your database file format, you can
change the 'version' number, and get a similar exception if you accidentally
load an old, outdated file:
`LMDB file 'version' mismatch: expected 3, got 2`
| yuna-db | /yuna-db-0.2.5.tar.gz/yuna-db-0.2.5/README.md | README.md |
import os
import sys
# Hack sys.path so that this file will run against Yuna from this directory tree,
# even if someone ran "pip install yuna-db" before running this.
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, os.path.abspath(os.path.join(THIS_DIR, "src")))
import yuna
from yuna import Yuna
from yuna.lmdb_util import delete_file_or_dir
TEST_FILE = "/tmp/junk.ydb"
delete_file_or_dir(TEST_FILE)
with Yuna(TEST_FILE, "test", 1, create=True) as db:
db.new_table("a26", serialize=yuna.SERIALIZE_STR, compress=yuna.COMPRESS_LZ4)
tbl_a26 = db.tables.a26
tbl_a26.put("a", "1")
tbl_a26.put("b", "2")
#tbl_a26.put("c", "3")
tbl_a26["d"] = "4"
tbl_a26["e"] = "5"
lst = list(tbl_a26.keys())
print(f"a26 keys: {lst}")
lst = list(tbl_a26.keys(start='c'))
print(f"a26 keys from 'c': {lst}")
lst = list(tbl_a26.keys(start='c', stop='e'))
print(f"a26 keys from 'c' to < 'e': {lst}")
lst = list(tbl_a26.items())
print(f"a26 items: {lst}")
lst = list(tbl_a26.items(start='c'))
print(f"a26 items from 'c': {lst}")
lst = list(tbl_a26.items(start='c', stop='e'))
print(f"a26 items from 'c' to < 'e': {lst}")
lst = list(tbl_a26.values())
print(f"a26 values: {lst}")
lst = list(tbl_a26.values(start='c'))
print(f"a26 values from 'c': {lst}")
lst = list(tbl_a26.values(start='c', stop='e'))
print(f"a26 values from 'c' to < 'e': {lst}")
assert tbl_a26.get("a", None) == "1"
assert tbl_a26.get("b", None) == "2"
assert tbl_a26.get("c", None) is None
tbl_j = db.new_table("jjj", serialize=yuna.SERIALIZE_JSON)
D_FOO = {"f": 0, "o": 1}
D_BAR = {"b": 9, "a": 8, "r": 7}
tbl_j.put("foo", D_FOO)
tbl_j.put("bar", D_BAR)
assert tbl_j.get("foo", None) == D_FOO
assert tbl_j.get("bar", None) == D_BAR
assert tbl_j.get("baz", None) is None
db.reserved.put("foo", D_FOO)
assert db.reserved.get("foo", None) == D_FOO
temp = db.reserved.raw_get(b"foo")
db.reserved.raw_put(b"bar", temp)
temp = db.reserved.get("bar")
assert temp == D_FOO
db.reserved.raw_delete(b"bar")
assert db.reserved.get("bar", None) is None
print("reserved raw_keys:")
for bytes_key in db.reserved.raw_keys():
print(bytes_key)
print()
print("reserved raw_items:")
for bytes_key, bytes_value in db.reserved.raw_items():
print(bytes_key, "->", bytes_value)
print()
print("reserved raw_values:")
for bytes_value in db.reserved.raw_values():
print(bytes_value)
print()
db.reserved.delete("foo")
assert db.reserved.get("foo", None) is None
with Yuna(TEST_FILE, "test", 1, read_only=False) as db:
assert db.tables.a26.get("a", None) == "1"
assert db.tables.a26.get("b", None) == "2"
assert db.tables.a26.get("c", None) is None
assert db.tables.jjj.get("foo", None) == D_FOO
assert db.tables.jjj.get("bar", None) == D_BAR
assert db.tables.jjj.get("baz", None) is None
db.tables.jjj.truncate()
assert db.tables.jjj.get("foo", None) is None
assert db.tables.jjj.get("bar", None) is None
assert db.tables.jjj.get("baz", None) is None
db.tables.jjj.drop()
db.tables.a26.drop() | yuna-db | /yuna-db-0.2.5.tar.gz/yuna-db-0.2.5/example.py | example.py |
import json
import os
import shutil
from typing import Optional
import lmdb
# Create a unique object used to detect if optional arg not provided.
# Can't use None because user might want to provide None.
_LMDB_UTIL_NOT_PROVIDED = object()
# NOTES about terminology
#
# The LMDB documentation uses some unusual terms. The Yuna library uses some
# different terms.
#
# An LMDB file may contain multiple key/value stores. LMDB refers to these
# as "databases" or "named databases". Yuna will simply call these "tables".
#
# Every LMDB file has one, default key/value store that the user can store
# data in, but that LMDB also stores some data in. LMDB calls this
# the "unnamed database". Yuna calls this the "reserved table".
#
# LMDB calls the open LMDB file an "environment" and uses "env" as
# the variable name for what is returned. Yuna's low-level LMDB code
# uses this terminology but Yuna in general just refers to "the database".
# LMDB files have to be declared with limits: maximum number of tables,
# maximum total file size. In practice it works quite well to simply give
# very large limit numbers; the database will not actually take up the
# maximum declared size. On Linux, the file may appear to be that size,
# but in that case the "yuna_repack" utility can be used to make a copy
# of the database file that is minimum size.
#
# TODO: implement the "yuna_repack" utility
class YunaInvalidDB(ValueError):
pass
YUNA_FILE_EXTENSION = ".ydb"
YUNA_DB_META_KEY = "__YUNA_DB_META__"
YUNA_DEFAULT_MAX_TABLES = 100
YUNA_DEFAULT_MAX_DB_SIZE = 2**40 # one tebibyte (binary terabyte): 1024**4
_VALID_SAFETY_MODES = ('a', 'u')
def delete_file_or_dir(fname: str):
# If it doesn't exist, we don't complain, similar to /bin/rm -f on Linux
if not os.path.exists(fname):
return
# Something exists; delete it whether it is a file or a directory.
try:
os.remove(fname)
except IsADirectoryError:
shutil.rmtree(fname)
def _lmdb_reserved_delete(
env: lmdb.Environment,
bytes_key: bytes
) -> None:
with env.begin(write=True) as txn:
txn.delete(bytes_key)
def _lmdb_reserved_get(
env: lmdb.Environment,
bytes_key: bytes,
default: Optional[bytes]=_LMDB_UTIL_NOT_PROVIDED
) -> None:
with env.begin() as txn:
result = txn.get(bytes_key, default=None)
if result is None:
if default is _LMDB_UTIL_NOT_PROVIDED:
raise KeyError(key)
else:
return default
return result
def _lmdb_reserved_put(
env: lmdb.Environment,
bytes_key: bytes,
bytes_value: bytes
) -> None:
with env.begin(write=True) as txn:
txn.put(bytes_key, bytes_value)
def _lmdb_table_open(
env: lmdb.Environment,
name: str,
create: bool=False,
integerkey: bool=False
) -> None:
bytes_name = bytes(name, "utf-8")
return env.open_db(bytes_name, create=create, integerkey=integerkey)
def _lmdb_table_delete(
env: lmdb.Environment,
table: lmdb._Database,
bytes_key: bytes
) -> None:
with env.begin(db=table, write=True) as txn:
txn.delete(bytes_key)
def _lmdb_table_drop(
env: lmdb.Environment,
table: lmdb._Database
) -> None:
# With delete=True drops the entire table and all data contained inside it.
with env.begin(write=True) as txn:
txn.drop(table, delete=True)
def _lmdb_table_get(
env: lmdb.Environment,
table: lmdb._Database,
key: bytes,
default: Optional[bytes]=_LMDB_UTIL_NOT_PROVIDED,
) -> Optional[bytes]:
with env.begin(db=table) as txn:
result = txn.get(key, default=None)
if result is None:
if default is _LMDB_UTIL_NOT_PROVIDED:
raise KeyError(key)
else:
return default
return result
def _lmdb_table_put(
env: lmdb.Environment,
table: lmdb._Database,
key: bytes,
value: bytes,
) -> None:
with env.begin(db=table, write=True) as txn:
txn.put(key, value)
def _lmdb_table_truncate(
env: lmdb.Environment,
table: lmdb._Database
) -> None:
with env.begin(write=True) as txn:
# Drops every key/value pair, but with delete=False does not drop the table itself.
txn.drop(table, delete=False)
def _lmdb_sync(
env: lmdb.Environment
) -> None:
env.sync(force=True)
def _yuna_new_meta(
name: Optional[str]=None,
version: Optional[str]=None,
tables_map: Optional[dict]=None,
) -> dict:
if tables_map is None:
tables_map = {}
metadata = {}
if name is not None:
metadata["name"] = name
if version is not None:
metadata["version"] = version
metadata["tables"] = {}
metadata["yuna_version"] = 1
return metadata
def _yuna_get_meta(
env: lmdb.Environment,
name: Optional[str],
version: Optional[int],
) -> dict:
"""
Get the metadata stored in a Yuna DB file.
Given an open LMDB file, first check to see if it's a valid
Yuna DB file. If it is, return the metadata as a dict.
If @name is provided, check to see if the Yuna DB file has that name.
If @version is provided, check to see if the Yuna DB file has that version.
Raises YunaInvalidDB on any error.
"""
# Try to retrieve the metadata, always stored in the reserved table.
key = bytes(YUNA_DB_META_KEY, "utf-8")
value = _lmdb_reserved_get(env, key, default=None)
if value is None:
fname = env.path()
raise YunaInvalidDB(f"LMDB file is not a Yuna DB file: {fname!r}")
# We got something... does it decode as valid JSON?
try:
meta = json.loads(value)
if not isinstance(meta, dict):
# whatever was stored there wasn't a JSON-encoded metadata dictionary
raise ValueError
except (ValueError, TypeError, json.decoder.JSONDecodeError):
fname = env.path()
raise YunaInvalidDB(f"Yuna DB has corrupted metadata: {fname!r}")
# If user provided name and/or version, make appropriate checks.
if name is not None:
temp = meta.get("name", None)
if temp != name:
raise YunaInvalidDB(f"LMDB file 'name' mismatch: expected {name!r}, got {temp!r}")
if version is not None:
temp = meta.get("version", None)
if temp != version:
raise YunaInvalidDB(f"LMDB file 'version' mismatch: expected {version}, got {temp}")
return meta
def _yuna_put_meta(
env: lmdb.Environment,
meta: dict,
) -> None:
"""
Given @meta, a dictionary containing metadata, write it to the LMDB file.
"""
# Try to retrieve the metadata, always stored in the reserved table.
key = bytes(YUNA_DB_META_KEY, "ascii")
s = json.dumps(meta)
value = bytes(s, "utf-8")
_lmdb_reserved_put(env, key, value)
def _lmdb_open(
fname: str,
read_only: bool=True,
create: bool=False,
safety_mode: str='a',
single_file: bool=True,
max_tables: int=YUNA_DEFAULT_MAX_TABLES,
max_db_file_size: int=YUNA_DEFAULT_MAX_DB_SIZE,
extra_args: Optional[dict]=None,
):
"""
@safety_mode: legal values are 'a' (ACID safety) 'u' (unsafe; fastest)
"""
if safety_mode not in _VALID_SAFETY_MODES:
mesg = f"safety_mode must be one of {_VALID_SAFETY_MODES} "\
"but instead was {safety_mode!r}"
raise ValueError(mesg)
if not create:
if not os.path.exists(fname) and not fname.endswith(YUNA_FILE_EXTENSION):
temp = fname + YUNA_FILE_EXTENSION
if os.path.exists(fname):
fname = temp
if not os.path.exists(fname):
raise FileNotFoundError(fname)
# Create implies we want to be able to write. Don't even check it, just make sure read_only is False.
if create:
read_only = False
delete_file_or_dir(fname)
try:
kwargs = {
"create": create,
"map_size": max_db_file_size,
"max_dbs": max_tables,
"readonly": read_only,
"subdir": not single_file,
}
if safety_mode == 'u':
# Change all the settings to their fastest and least safe value.
# This is ideal for creating a DB file that will later be used
# read-only, and is a terrible idea for a DB that will be "live"
# and will have data written to it when a service is running.
kwargs["metasync"] = False
kwargs["sync"] = False
kwargs["writemap"] = True
kwargs["map_async"] = True
if extra_args:
kwargs.update(extra_args)
env = lmdb.open(fname, **kwargs)
return env
except Exception:
# currently Yuna is just letting LMDB exceptions be raised.
raise | yuna-db | /yuna-db-0.2.5.tar.gz/yuna-db-0.2.5/src/yuna/lmdb_util.py | lmdb_util.py |
from json import dumps as json_dumps
from json import loads as json_loads
from typing import Any, Callable, Dict, Iterator, List, Optional
import lmdb
from .lmdb_util import YUNA_DEFAULT_MAX_DB_SIZE, YUNA_DEFAULT_MAX_TABLES
from .lmdb_util import YUNA_DB_META_KEY, YUNA_FILE_EXTENSION
from .lmdb_util import _lmdb_open, _yuna_get_meta, _yuna_put_meta
from .lmdb_util import _lmdb_table_delete, _lmdb_table_get, _lmdb_table_put
# Create a unique object used to detect if optional arg not provided.
# Can't use None because user might want to provide None.
_YUNA_NOT_PROVIDED = object()
class SerializePlugins:
def __init__(self,
serialize: Callable,
deserialize: Callable,
init: Optional[Callable] = None,
options: Optional[Callable] = None,
):
self.serialize = serialize
self.deserialize = deserialize
self.init = init
self.options = options
class CompressPlugins:
def __init__(self,
compress: Callable,
decompress: Callable,
init: Optional[Callable] = None,
options: Optional[Callable] = None,
train: Optional[Callable] = None,
):
self.compress = compress
self.decompress = decompress
self.init = init
self.options = options
self.train = train
YUNA_SERIALIZE_CACHE: Dict[str, SerializePlugins] = {}
def _not_implemented(*args, **kwargs) -> None:
raise NotImplemented("not implemented yet")
def _empty_string_key_check(x: str) -> None:
# LMDB doesn't allow zero-length keys. For now, raise when one is seen. TODO: consider adding a workaround.
if not isinstance(x, int) and x is not None:
if not x:
raise ValueError("key cannot be empty string")
INTEGER_KEY = "integer_key"
SERIALIZE_JSON = "json"
def serialize_json(x: Any) -> bytes:
return json_dumps(x).encode("utf-8")
deserialize_json = json_loads
def _import_json() -> None:
import json
plugins = SerializePlugins(
serialize=serialize_json,
deserialize=deserialize_json,
)
YUNA_SERIALIZE_CACHE[SERIALIZE_JSON] = plugins
SERIALIZE_MSGPACK = "msgpack"
def _import_msgpack() -> None:
import msgpack
plugins = SerializePlugins(
serialize=msgpack.dumps,
deserialize=msgpack.loads,
)
YUNA_SERIALIZE_CACHE[SERIALIZE_MSGPACK] = plugins
SERIALIZE_STR = "str"
def serialize_str(s: str) -> bytes:
return s.encode('utf-8')
def deserialize_str(bytes_s: bytes) -> str:
return str(bytes_s, 'utf-8')
def _import_str() -> None:
# nothing to import; strings are built-in
plugins = SerializePlugins(
serialize=serialize_str,
deserialize=deserialize_str,
)
YUNA_SERIALIZE_CACHE[SERIALIZE_STR] = plugins
_SERIALIZE_IMPORT_FUNCTIONS = {
SERIALIZE_JSON: _import_json,
SERIALIZE_MSGPACK: _import_msgpack,
SERIALIZE_STR: _import_str,
}
YUNA_COMPRESS_CACHE: Dict[str, CompressPlugins] = {}
COMPRESS_LZ4 = "lz4"
def _import_lz4() -> None:
import lz4
import lz4.block
plugins = CompressPlugins(
compress=lz4.block.compress,
decompress=lz4.block.decompress,
)
YUNA_COMPRESS_CACHE[COMPRESS_LZ4] = plugins
COMPRESS_ZLIB = "zlib"
def _import_zlib() -> None:
import zlib
plugins = CompressPlugins(
init=None,
options=None, # TODO: add an options function
compress=zlib.compress,
decompress=zlib.decompress,
)
YUNA_COMPRESS_CACHE[COMPRESS_ZLIB] = plugins
COMPRESS_ZSTD = "zstd"
def _init_zstd():
raise RuntimeError("init not implemented yet but is coming")
def _options_zstd(*args, **kwargs):
raise RuntimeError("options not implemented yet but is coming")
def _train_zstd_factory() -> Callable:
# build the function inside this factory so that it will close over the module reference
fn_train_dictionary = zstandard.train_dictionary
def _train_zstd(size: int, samples: List[Any]) -> bytes:
"""
@size: how many bytes the dictionary should be
@samples: list of training data records
Builds a compression dictionary of size @size from data in @samples
"""
# TODO: see if an iterator works for @samples and update docs if it does
compression_dictionary = fn_train_dictionary(*args, **kwargs)
bytes_data = compression_dictionary.as_bytes()
return bytes_data
return _train_zstd
def _import_zstd() -> None:
import zstandard
plugins = CompressPlugins(
init=None, # TODO: add init()
options=None, # TODO: add an options function
compress=_not_implemented,
decompress=_not_implemented,
train=_train_zstd_factory(),
)
YUNA_COMPRESS_CACHE[COMPRESS_ZSTD] = plugins
_COMPRESS_IMPORT_FUNCTIONS = {
COMPRESS_LZ4: _import_lz4,
COMPRESS_ZLIB: _import_zlib,
COMPRESS_ZSTD: _import_zstd,
}
def get_serialize_plugins(tag: Optional[str]) -> dict:
if tag is None:
return None
plugins = YUNA_SERIALIZE_CACHE.get(tag, None)
if plugins is None:
fn_import = _SERIALIZE_IMPORT_FUNCTIONS.get(tag, None)
if fn_import is None:
raise ValueError(f"'{tag}': unknown serialization format")
fn_import()
plugins = YUNA_SERIALIZE_CACHE.get(tag, None)
if plugins is None:
# This error should be impossible...
# If it happens, check the import function and make sure it saves to the cache with the correct tag.
raise RuntimeError(f"'{tag}': serialize import succeeded but cannot get plugins")
return plugins
def get_compress_plugins(tag: str):
if tag is None:
return None
plugins = YUNA_COMPRESS_CACHE.get(tag, None)
if plugins is None:
fn_import = _COMPRESS_IMPORT_FUNCTIONS.get(tag, None)
if fn_import is None:
raise ValueError(f"'{tag}': unknown compression format")
fn_import()
plugins = YUNA_COMPRESS_CACHE.get(tag, None)
if plugins is None:
# This error should be impossible...
# If it happens, check the import function and make sure it saves to the cache with the correct tag.
raise RuntimeError(f"'{tag}': compress import succeeded but cannot get plugins")
return plugins
# Python strings are much more convenient as keys than Python byte strings.
# So, while it's legal to use byte strings as keys, Yuna doesn't really
# expect that case, so we will always have a key serialization function.
# This function does nothing, very quickly, to handle that case. If the
# user is passing a byte string anyway we can return it unchanged.
def _return_bytes_unchanged(x: bytes) -> bytes:
"""
Return a byte string unchanged.
Used as a key serialization function in cases where
no serialization is requested.
"""
return x
def _get_table_raw_factory(
env: lmdb.Environment,
table: lmdb._Database,
key_serialize_tag: Optional[str]
) -> Callable:
key_plugins = get_serialize_plugins(key_serialize_tag)
fn_key_serialize = key_plugins.serialize if key_plugins else _return_bytes_unchanged
def get(self, key: str, default: Optional[bytes]=_YUNA_NOT_PROVIDED) -> Optional[bytes]:
bytes_key = fn_key_serialize(key)
result = _lmdb_table_get(env, table, bytes_key, None)
if result is None:
if default is _YUNA_NOT_PROVIDED:
raise KeyError(key)
else:
return default
return result
return get
def _get_table_deserialize_factory(
env: lmdb.Environment,
table: lmdb._Database,
key_serialize_tag: Optional[str],
value_serialize_tag: str
) -> Callable:
key_plugins = get_serialize_plugins(key_serialize_tag)
fn_key_serialize = key_plugins.serialize if key_plugins else _return_bytes_unchanged
value_plugins = get_serialize_plugins(value_serialize_tag)
fn_value_deserialize = value_plugins.deserialize
def get(self, key: str, default: Any=_YUNA_NOT_PROVIDED) -> Any:
bytes_key = fn_key_serialize(key)
result = _lmdb_table_get(env, table, bytes_key, None)
if result is None:
if default is _YUNA_NOT_PROVIDED:
raise KeyError(key)
else:
return default
return fn_value_deserialize(result)
return get
def _get_table_decompress_factory(
env: lmdb.Environment,
table: lmdb._Database,
key_serialize_tag: Optional[str],
value_compress_tag: str
) -> Callable:
key_plugins = get_serialize_plugins(key_serialize_tag)
fn_key_serialize = key_plugins.serialize if key_plugins else _return_bytes_unchanged
value_plugins = get_compress_plugins(value_compress_tag)
fn_value_decompress = value_plugins.decompress
def get(self, key: str, default: Any=_YUNA_NOT_PROVIDED) -> Any:
bytes_key = fn_key_serialize(key)
result = _lmdb_table_get(env, table, bytes_key, None)
if result is None:
if default is _YUNA_NOT_PROVIDED:
raise KeyError(key)
else:
return default
return fn_value_decompress(result)
return get
def _get_table_deserialize_decompress_factory(
env: lmdb.Environment,
table: lmdb._Database,
key_serialize_tag: Optional[str],
value_serialize_tag: str,
value_compress_tag: str
) -> Callable:
key_plugins = get_serialize_plugins(key_serialize_tag)
fn_key_serialize = key_plugins.serialize if key_plugins else _return_bytes_unchanged
value_serialize_plugins = get_serialize_plugins(value_serialize_tag)
fn_value_deserialize = value_serialize_plugins.deserialize
value_compress_plugins = get_compress_plugins(value_compress_tag)
fn_value_decompress = value_compress_plugins.decompress
def get(self, key: str, default: Any=_YUNA_NOT_PROVIDED) -> Any:
bytes_key = fn_key_serialize(key)
result = _lmdb_table_get(env, table, bytes_key, None)
if result is None:
if default is _YUNA_NOT_PROVIDED:
raise KeyError(key)
else:
return default
return fn_value_deserialize(fn_value_decompress(result))
return get
def get_factory(
env: lmdb.Environment,
table: lmdb._Database,
key_serialize_tag: Optional[str],
value_serialize_tag: Optional[str],
value_compress_tag: Optional[str]
) -> Callable:
if value_serialize_tag and value_compress_tag:
return _get_table_deserialize_decompress_factory(
env, table, key_serialize_tag, value_serialize_tag, value_compress_tag)
elif not value_serialize_tag and value_compress_tag:
return _get_table_decompress_factory(
env, table, key_serialize_tag, value_compress_tag)
elif value_serialize_tag and not value_compress_tag:
return _get_table_deserialize_factory(
env, table, key_serialize_tag, value_serialize_tag)
else:
return _get_table_raw_factory(env, table, key_serialize_tag)
def _put_table_raw_factory(
env: lmdb.Environment,
table: lmdb._Database,
key_serialize_tag: Optional[str]
) -> Callable:
key_plugins = get_serialize_plugins(key_serialize_tag)
fn_key_serialize = key_plugins.serialize if key_plugins else _return_bytes_unchanged
def put(self, key: str, bytes_value: bytes) -> None:
bytes_key = fn_key_serialize(key)
_lmdb_table_put(env, table, bytes_key, bytes_value)
return put
def _put_table_serialize_factory(
env: lmdb.Environment,
table: lmdb._Database,
key_serialize_tag: Optional[str],
value_serialize_tag: str
) -> Callable:
key_plugins = get_serialize_plugins(key_serialize_tag)
fn_key_serialize = key_plugins.serialize if key_plugins else _return_bytes_unchanged
value_plugins = get_serialize_plugins(value_serialize_tag)
fn_value_serialize = value_plugins.serialize
def put(self, key: str, value: Any) -> None:
bytes_key = fn_key_serialize(key)
bytes_value = fn_value_serialize(value)
_lmdb_table_put(env, table, bytes_key, bytes_value)
return put
def _put_table_compress_factory(
env: lmdb.Environment,
table: lmdb._Database,
key_serialize_tag: Optional[str],
value_compress_tag: str
) -> Callable:
key_plugins = get_serialize_plugins(key_serialize_tag)
fn_key_serialize = key_plugins.serialize if key_plugins else _return_bytes_unchanged
value_plugins = get_compress_plugins(value_compress_tag)
fn_value_compress = value_plugins.compress
def put(self, key: str, value: Any) -> None:
bytes_key = fn_key_serialize(key)
bytes_value = fn_value_compress(result)
_lmdb_table_put(env, table, bytes_key, bytes_value)
return put
def _put_table_serialize_compress_factory(
env: lmdb.Environment,
table: lmdb._Database,
key_serialize_tag: Optional[str],
value_serialize_tag: str,
value_compress_tag: str
) -> Callable:
key_plugins = get_serialize_plugins(key_serialize_tag)
fn_key_serialize = key_plugins.serialize if key_plugins else _return_bytes_unchanged
value_serialize_plugins = get_serialize_plugins(value_serialize_tag)
fn_value_serialize = value_serialize_plugins.serialize
value_compress_plugins = get_compress_plugins(value_compress_tag)
fn_value_compress = value_compress_plugins.compress
def put(self, key: str, value: Any) -> None:
bytes_key = fn_key_serialize(key)
bytes_value = fn_value_compress(fn_value_serialize(value))
_lmdb_table_put(env, table, bytes_key, bytes_value)
return put
def put_factory(
env: lmdb.Environment,
table: lmdb._Database,
key_serialize_tag: Optional[str],
value_serialize_tag: Optional[str],
value_compress_tag: Optional[str]
) -> Callable:
if value_serialize_tag and value_compress_tag:
return _put_table_serialize_compress_factory(
env, table, key_serialize_tag, value_serialize_tag, value_compress_tag)
elif not value_serialize_tag and value_compress_tag:
return _put_table_compress_factory(
env, table, key_serialize_tag, value_compress_tag)
elif value_serialize_tag and not value_compress_tag:
return _put_table_serialize_factory(
env, table, key_serialize_tag, value_serialize_tag)
else:
return _put_table_raw_factory(env, table, key_serialize_tag)
def delete_factory(
env: lmdb.Environment,
table: lmdb._Database,
key_serialize_tag: Optional[str]
) -> Callable:
key_plugins = get_serialize_plugins(key_serialize_tag)
fn_key_serialize = key_plugins.serialize if key_plugins else _return_bytes_unchanged
def delete(self, key: str) -> None:
bytes_key = fn_key_serialize(key)
_lmdb_table_delete(env, table, bytes_key)
return delete
# TODO: change the types on keys from str to Any
def keys_factory(
env: lmdb.Environment,
table: lmdb._Database,
key_serialize_tag: Optional[str]
) -> Callable:
key_plugins = get_serialize_plugins(key_serialize_tag)
fn_key_serialize = key_plugins.serialize if key_plugins else _return_bytes_unchanged
fn_key_deserialize = key_plugins.deserialize if key_plugins else _return_bytes_unchanged
def keys(self, start: Optional[str]=None, stop: Optional[str]=None) -> Iterator:
_empty_string_key_check(start)
_empty_string_key_check(stop)
bytes_start = fn_key_serialize(start) if (start is not None) else None
bytes_stop = fn_key_serialize(stop) if (stop is not None) else None
with env.begin() as txn:
with txn.cursor(table) as cursor:
if bytes_start is not None:
cursor.set_range(bytes_start)
if bytes_stop is None:
for bytes_key, _ in cursor:
key = fn_key_deserialize(bytes_key)
yield key
else:
for bytes_key, _ in cursor:
if bytes_key >= bytes_stop:
break
key = fn_key_deserialize(bytes_key)
yield key
return keys
def _items_table_raw_factory(
env: lmdb.Environment,
table: lmdb._Database,
key_serialize_tag: Optional[str]
) -> Callable:
key_plugins = get_serialize_plugins(key_serialize_tag)
fn_key_serialize = key_plugins.serialize if key_plugins else _return_bytes_unchanged
fn_key_deserialize = key_plugins.deserialize if key_plugins else _return_bytes_unchanged
def items(self, start: Optional[str]=None, stop: Optional[str]=None) -> Iterator:
_empty_string_key_check(start)
_empty_string_key_check(stop)
bytes_start = fn_key_serialize(start) if (start is not None) else None
bytes_stop = fn_key_serialize(stop) if (stop is not None) else None
with env.begin() as txn:
with txn.cursor(table) as cursor:
if bytes_start is not None:
cursor.set_range(bytes_start)
if bytes_stop is None:
if fn_key_deserialize is _return_bytes_unchanged:
# The very fastest possible case: return byte keys and byte values, just use yield from!
yield from cursor
else:
for bytes_key, bytes_value in cursor:
key = fn_key_deserialize(bytes_key)
yield key, bytes_value
else:
for bytes_key, bytes_value in cursor:
if bytes_key >= bytes_stop:
break
key = fn_key_deserialize(bytes_key)
yield key, bytes_value
return items
def _items_table_deserialize_factory(
env: lmdb.Environment,
table: lmdb._Database,
key_serialize_tag: Optional[str],
value_serialize_tag: str
) -> Callable:
key_plugins = get_serialize_plugins(key_serialize_tag)
fn_key_serialize = key_plugins.serialize if key_plugins else _return_bytes_unchanged
fn_key_deserialize = key_plugins.deserialize if key_plugins else _return_bytes_unchanged
value_plugins = get_serialize_plugins(value_serialize_tag)
fn_value_deserialize = value_plugins.deserialize
def items(self, start: Optional[str]=None, stop: Optional[str]=None) -> Iterator:
_empty_string_key_check(start)
_empty_string_key_check(stop)
bytes_start = fn_key_serialize(start) if (start is not None) else None
bytes_stop = fn_key_serialize(stop) if (stop is not None) else None
with env.begin() as txn:
with txn.cursor(table) as cursor:
if bytes_start is not None:
cursor.set_range(bytes_start)
if bytes_stop is None:
for bytes_key, bytes_value in cursor:
key = fn_key_deserialize(bytes_key)
value = fn_value_deserialize(bytes_value)
yield key, value
else:
for bytes_key, bytes_value in cursor:
if bytes_key >= bytes_stop:
break
key = fn_key_deserialize(bytes_key)
value = fn_value_deserialize(bytes_value)
yield key, value
return items
def _items_table_decompress_factory(
env: lmdb.Environment,
table: lmdb._Database,
key_serialize_tag: Optional[str],
value_compress_tag: str
) -> Callable:
key_plugins = get_serialize_plugins(key_serialize_tag)
fn_key_serialize = key_plugins.serialize if key_plugins else _return_bytes_unchanged
fn_key_deserialize = key_plugins.deserialize if key_plugins else _return_bytes_unchanged
value_plugins = get_compress_plugins(value_compress_tag)
fn_value_decompress = value_plugins.decompress
def items(self, start: Optional[str]=None, stop: Optional[str]=None) -> Iterator:
_empty_string_key_check(start)
_empty_string_key_check(stop)
bytes_start = fn_key_serialize(start) if (start is not None) else None
bytes_stop = fn_key_serialize(stop) if (stop is not None) else None
with env.begin() as txn:
with txn.cursor(table) as cursor:
if bytes_start is not None:
cursor.set_range(bytes_start)
if bytes_stop is None:
for bytes_key, bytes_value in cursor:
key = fn_key_deserialize(bytes_key)
value = fn_value_decompress(bytes_value)
yield key, value
else:
for bytes_key, bytes_value in cursor:
if bytes_key >= bytes_stop:
break
key = fn_key_deserialize(bytes_key)
value = fn_value_decompress(bytes_value)
yield key, value
return items
def _items_table_deserialize_decompress_factory(
env: lmdb.Environment,
table: lmdb._Database,
key_serialize_tag: Optional[str],
value_serialize_tag: str,
value_compress_tag: str
) -> Callable:
key_plugins = get_serialize_plugins(key_serialize_tag)
fn_key_serialize = key_plugins.serialize if key_plugins else _return_bytes_unchanged
fn_key_deserialize = key_plugins.deserialize if key_plugins else _return_bytes_unchanged
value_serialize_plugins = get_serialize_plugins(value_serialize_tag)
fn_value_deserialize = value_serialize_plugins.deserialize
value_compress_plugins = get_compress_plugins(value_compress_tag)
fn_value_decompress = value_compress_plugins.decompress
def items(self, start: Optional[str]=None, stop: Optional[str]=None) -> Iterator:
_empty_string_key_check(start)
_empty_string_key_check(stop)
bytes_start = fn_key_serialize(start) if (start is not None) else None
bytes_stop = fn_key_serialize(stop) if (stop is not None) else None
with env.begin() as txn:
with txn.cursor(table) as cursor:
if bytes_start is not None:
cursor.set_range(bytes_start)
if bytes_stop is None:
for bytes_key, bytes_value in cursor:
key = fn_key_deserialize(bytes_key)
value = fn_value_deserialize(fn_value_decompress(bytes_value))
yield key, value
else:
for bytes_key, bytes_value in cursor:
if bytes_key >= bytes_stop:
break
key = fn_key_deserialize(bytes_key)
value = fn_value_deserialize(fn_value_decompress(bytes_value))
yield key, value
return items
def items_factory(
env: lmdb.Environment,
table: lmdb._Database,
key_serialize_tag: Optional[str],
value_serialize_tag: Optional[str],
value_compress_tag: Optional[str]
) -> Callable:
if value_serialize_tag and value_compress_tag:
return _items_table_deserialize_decompress_factory(
env, table, key_serialize_tag, value_serialize_tag, value_compress_tag)
elif not value_serialize_tag and value_compress_tag:
return _items_table_decompress_factory(
env, table, key_serialize_tag, value_compress_tag)
elif value_serialize_tag and not value_compress_tag:
return _items_table_deserialize_factory(
env, table, key_serialize_tag, value_serialize_tag)
else:
return _items_table_raw_factory(env, table, key_serialize_tag)
def _values_table_raw_factory(
env: lmdb.Environment,
table: lmdb._Database,
key_serialize_tag: Optional[str]
) -> Callable:
key_plugins = get_serialize_plugins(key_serialize_tag)
fn_key_serialize = key_plugins.serialize if key_plugins else _return_bytes_unchanged
def values(self, start: Optional[str]=None, stop: Optional[str]=None) -> Iterator:
_empty_string_key_check(start)
_empty_string_key_check(stop)
bytes_start = fn_key_serialize(start) if (start is not None) else None
bytes_stop = fn_key_serialize(stop) if (stop is not None) else None
with env.begin() as txn:
with txn.cursor(table) as cursor:
if bytes_start is not None:
cursor.set_range(bytes_start)
if bytes_stop is None:
for _, bytes_value in cursor:
yield bytes_value
else:
for bytes_key, bytes_value in cursor:
if bytes_key >= bytes_stop:
break
yield bytes_value
return values
def _values_table_deserialize_factory(
env: lmdb.Environment,
table: lmdb._Database,
key_serialize_tag: Optional[str],
value_serialize_tag: str
) -> Callable:
key_plugins = get_serialize_plugins(key_serialize_tag)
fn_key_serialize = key_plugins.serialize if key_plugins else _return_bytes_unchanged
value_plugins = get_serialize_plugins(value_serialize_tag)
fn_value_deserialize = value_plugins.deserialize
def values(self, start: Optional[str]=None, stop: Optional[str]=None) -> Iterator:
_empty_string_key_check(start)
_empty_string_key_check(stop)
bytes_start = fn_key_serialize(start) if (start is not None) else None
bytes_stop = fn_key_serialize(stop) if (stop is not None) else None
with env.begin() as txn:
with txn.cursor(table) as cursor:
if bytes_start is not None:
cursor.set_range(bytes_start)
if bytes_stop is None:
for _, bytes_value in cursor:
value = fn_value_deserialize(bytes_value)
yield value
else:
for bytes_key, bytes_value in cursor:
if bytes_key >= bytes_stop:
break
value = fn_value_deserialize(bytes_value)
yield value
return values
def _values_table_decompress_factory(
env: lmdb.Environment,
table: lmdb._Database,
key_serialize_tag: Optional[str],
value_compress_tag: str
) -> Callable:
key_plugins = get_serialize_plugins(key_serialize_tag)
fn_key_serialize = key_plugins.serialize if key_plugins else _return_bytes_unchanged
value_plugins = get_compress_plugins(value_compress_tag)
fn_value_decompress = value_plugins.decompress
def values(self, start: Optional[str]=None, stop: Optional[str]=None) -> Iterator:
_empty_string_key_check(start)
_empty_string_key_check(stop)
bytes_start = fn_key_serialize(start) if (start is not None) else None
bytes_stop = fn_key_serialize(stop) if (stop is not None) else None
with env.begin() as txn:
with txn.cursor(table) as cursor:
if bytes_start is not None:
cursor.set_range(bytes_start)
if bytes_stop is None:
for _, bytes_value in cursor:
value = fn_value_decompress(bytes_value)
yield value
else:
for bytes_key, bytes_value in cursor:
if bytes_key >= bytes_stop:
break
value = fn_value_decompress(bytes_value)
yield value
return values
def _values_table_deserialize_decompress_factory(
env: lmdb.Environment,
table: lmdb._Database,
key_serialize_tag: Optional[str],
value_serialize_tag: str,
value_compress_tag: str
) -> Callable:
key_plugins = get_serialize_plugins(key_serialize_tag)
fn_key_serialize = key_plugins.serialize if key_plugins else _return_bytes_unchanged
value_serialize_plugins = get_serialize_plugins(value_serialize_tag)
fn_value_deserialize = value_serialize_plugins.deserialize
value_compress_plugins = get_compress_plugins(value_compress_tag)
fn_value_decompress = value_compress_plugins.decompress
def values(self, start: Optional[str]=None, stop: Optional[str]=None) -> Iterator:
_empty_string_key_check(start)
_empty_string_key_check(stop)
bytes_start = fn_key_serialize(start) if (start is not None) else None
bytes_stop = fn_key_serialize(stop) if (stop is not None) else None
with env.begin() as txn:
with txn.cursor(table) as cursor:
if bytes_start is not None:
cursor.set_range(bytes_start)
if bytes_stop is None:
for _, bytes_value in cursor:
value = fn_value_deserialize(fn_value_decompress(bytes_value))
yield value
else:
for bytes_key, bytes_value in cursor:
if bytes_key >= bytes_stop:
break
value = fn_value_deserialize(fn_value_decompress(bytes_value))
yield value
return values
def values_factory(
env: lmdb.Environment,
table: lmdb._Database,
key_serialize_tag: Optional[str],
value_serialize_tag: Optional[str],
value_compress_tag: Optional[str]
) -> Callable:
if value_serialize_tag and value_compress_tag:
return _values_table_deserialize_decompress_factory(
env, table, key_serialize_tag, value_serialize_tag, value_compress_tag)
elif not value_serialize_tag and value_compress_tag:
return _values_table_decompress_factory(
env, table, key_serialize_tag, value_compress_tag)
elif value_serialize_tag and not value_compress_tag:
return _values_table_deserialize_factory(
env, table, key_serialize_tag, value_serialize_tag)
else:
return _values_table_raw_factory(env, table, key_serialize_tag) | yuna-db | /yuna-db-0.2.5.tar.gz/yuna-db-0.2.5/src/yuna/plugins.py | plugins.py |
__version__ = "0.2.5"
import os
import types
from typing import Any, Iterator, Optional
import lmdb
from .lmdb_util import YUNA_DEFAULT_MAX_DB_SIZE, YUNA_DEFAULT_MAX_TABLES
from .lmdb_util import YUNA_DB_META_KEY, YUNA_FILE_EXTENSION
from .lmdb_util import _lmdb_open, _yuna_get_meta, _yuna_new_meta, _yuna_put_meta
from .lmdb_util import _lmdb_reserved_delete, _lmdb_reserved_get, _lmdb_reserved_put
from .lmdb_util import _lmdb_table_drop, _lmdb_table_open, _lmdb_table_truncate
from .lmdb_util import delete_file_or_dir
from . import plugins
from .plugins import _YUNA_NOT_PROVIDED
from .plugins import SERIALIZE_JSON, SERIALIZE_MSGPACK, SERIALIZE_STR
from .plugins import COMPRESS_LZ4, COMPRESS_ZLIB, COMPRESS_ZSTD
from .plugins import serialize_json, deserialize_json
from .plugins import serialize_str, deserialize_str
from .plugins import _empty_string_key_check
class YunaSharedData:
"""
Private data for Yuna, in a class by itself so it can be shared
among the multiple classes implementing Yuna.
"""
def __init__(self, env: lmdb.Environment, tables_map: dict, metadata: dict, read_only: bool):
self.env = env
self.tables_map = tables_map
self.metadata = metadata
self.read_only = read_only
self.is_dirty = False
class YunaReservedTable:
"""
This class provides method functions to get/put values from the
LMDB reserved table.
This will be .reserved in the open Yuna instance.
"""
# This is opinionated code. You cannot specify a serialization or compression format.
# If you need to do anything that requires a specific serialization or compression
# format, create a table and use that. The reserved table should be mostly left alone.
# Note that LMDB stores things in the reserved table, and Bad Things would happen if
# you clobbered one of their special values. In particular, any name used for a table
# must not be clobbered.
#
# If you somehow have a real need to put something other than JSON into the reserved
# table, serialize it yourself and use .raw_put() to store it.
#
# LMDB lets you have any number of tables; use those and leave the reserved table alone.
def __init__(self, env: lmdb.Environment):
self.env = env
def delete(self, key: str) -> None:
"""
delete a key/value pair from the reserved table.
"""
bytes_key = serialize_str(key)
_lmdb_reserved_delete(self.env, bytes_key)
def get(self, key: str, default: Any=_YUNA_NOT_PROVIDED) -> Any:
bytes_key = serialize_str(key)
bytes_value = _lmdb_reserved_get(self.env, bytes_key, None)
if bytes_value is None:
if default is _YUNA_NOT_PROVIDED:
raise KeyError(key)
else:
return default
value = deserialize_json(bytes_value)
return value
def put(self, key: str, value: Any) -> None:
bytes_key = serialize_str(key)
bytes_value = serialize_json(value)
_lmdb_reserved_put(self.env, bytes_key, bytes_value)
def keys(self, start: Optional[str]=None, stop: Optional[str]=None) -> Iterator:
_empty_string_key_check(start)
_empty_string_key_check(stop)
bytes_start = serialize_str(start) if (start is not None) else None
bytes_stop = serialize_str(stop) if (stop is not None) else None
with self.env.begin() as txn:
with txn.cursor() as cursor:
if bytes_start is not None:
cursor.set_range(bytes_start)
if bytes_stop is None:
for bytes_key, _ in cursor:
key = deserialize_str(bytes_key)
yield key
else:
for bytes_key, _ in cursor:
if bytes_key >= bytes_stop:
break
key = deserialize_str(bytes_key)
yield key
def raw_delete(self, bytes_key: bytes) -> None:
_lmdb_reserved_delete(self.env, bytes_key)
def raw_get(self, bytes_key: bytes, default: Any=_YUNA_NOT_PROVIDED) -> Any:
bytes_value = _lmdb_reserved_get(self.env, bytes_key, None)
if bytes_value is None:
if default is _YUNA_NOT_PROVIDED:
raise KeyError(key)
else:
return default
return bytes_value
def raw_put(self, bytes_key: bytes, bytes_value: bytes) -> None:
_lmdb_reserved_put(self.env, bytes_key, bytes_value)
def raw_keys(self, bytes_start: Optional[bytes]=None, bytes_stop: Optional[bytes]=None) -> Iterator:
_empty_string_key_check(bytes_start)
_empty_string_key_check(bytes_stop)
with self.env.begin() as txn:
with txn.cursor() as cursor:
if bytes_start is not None:
cursor.set_range(bytes_start)
if bytes_stop is None:
for bytes_key, _ in cursor:
yield bytes_key
else:
for bytes_key, _ in cursor:
if bytes_key >= bytes_stop:
break
yield bytes_key
def raw_items(self, bytes_start: Optional[bytes]=None, bytes_stop: Optional[bytes]=None) -> Iterator:
_empty_string_key_check(bytes_start)
_empty_string_key_check(bytes_stop)
with self.env.begin() as txn:
with txn.cursor() as cursor:
if bytes_start is not None:
cursor.set_range(bytes_start)
if bytes_stop is None:
for bytes_key, bytes_value in cursor:
yield bytes_key, bytes_value
else:
for bytes_key, bytes_value in cursor:
if bytes_key >= bytes_stop:
break
yield bytes_key, bytes_value
def raw_values(self, bytes_start: Optional[bytes]=None, bytes_stop: Optional[bytes]=None) -> Iterator:
_empty_string_key_check(bytes_start)
_empty_string_key_check(bytes_stop)
with self.env.begin() as txn:
with txn.cursor() as cursor:
if bytes_start is not None:
cursor.set_range(bytes_start)
if bytes_stop is None:
for _, bytes_value in cursor:
yield bytes_value
else:
for bytes_key, bytes_value in cursor:
if bytes_key >= bytes_stop:
break
yield bytes_value
class YunaTablesMap:
"""
A trvial class, just used as a container for instances of YunaTable.
This will be .tables in the open Yuna instance.
"""
def __iter__(self):
return iter(vars(self).values())
class YunaTableMetadata:
def __init__(self,
name: str,
key_serialize: Optional[str] = None,
serialize: Optional[str] = None,
compress: Optional[str] = None,
):
self.name = name
self.key_serialize = key_serialize
self.serialize = serialize
self.compress = compress
class YunaTableBase:
# This class exists to document the method functions of a YunaTable.
#
# Most of the functions in YunaTable are made by a factory, and will
# be set to override these functions. But Python will find and use
# the docstrings from these functions. So this class is mainly
# to provide docstrings for all the functions.
def raw_delete(self, bytes_key: bytes) -> None:
"""
Delete a key/value pair from the table using the exact bytes key.
No key serialization will be performed.
"""
raise NotImplemented("must override")
def delete(self, key: str) -> None:
"""
Delete a key/value pair from the table.
"""
raise NotImplemented("must override")
def raw_put(self, bytes_key: bytes, bytes_value: bytes) -> None:
"""
Put a bytes value to the table using the bytes key.
No key serialization will be performed. No value serialization
or compression will be performed. The exact bytes key will be used
to put the exact bytes value into the table.
If there's already a value in the table it will be overitten.
"""
raise NotImplemented("must override")
def put(self, key: str, value: Any) -> None:
"""
Put a value to the table using the key.
If there's already a value in the table it will be overitten.
"""
raise NotImplemented("must override")
def get(self, key: str, default: Any=_YUNA_NOT_PROVIDED) -> Any:
"""
Get a value from the table using the key.
If the key is not present in the table, and a default value
was provided, returns the default value.
If the key is not present in the table, and no default value
was provided, raises KeyError.
"""
raise NotImplemented("must override")
def raw_get(self, bytes_key: bytes, default: Optional[bytes]=_YUNA_NOT_PROVIDED) -> Any:
"""
Get a value from the table using the bytes_key. This must be the
exact bytes key value; no key serialization will be performed.
If the key is not present in the table, and a default value
was provided, returns the default value.
If the key is not present in the table, and no default value
was provided, raises KeyError.
"""
raise NotImplemented("must override")
def keys(self, start: Optional[str]=None, stop: Optional[str]=None) -> Iterator:
"""
Get an iterator that yields up keys from the table.
Keys will be yielded in the order of their byte key values
(i.e. the values of the keys after any serialization).
If start was provided, the iterator will start on the first key that
is equal to or greater than the provided start value.
If stop was provided, the iterator will stop before yielding
a key that is equal to or greater than the provided stop value.
For example, if a table included the following key/value pairs:
a=1, b=2, d=4, e=5
Then keys(start='c', stop='e') would only yield one key: 'd'
"""
raise NotImplemented("must override")
def raw_keys(self, bytes_start: Optional[bytes]=None, bytes_stop: Optional[bytes]=None) -> Iterator:
"""
Get an iterator that yields up raw keys from the table. These will
be the actual byte strings of the keys; no key deserialization
will be performed.
If bytes_start was provided, the iterator will start on the first key that
is equal to or greater than the provided start value.
If bytes_stop was provided, the iterator will stop before yielding
a key that is equal to or greater than the provided stop value.
For example, if a table included the following key/value pairs:
b'a'=0x01, b'b'=0x02, b'd'=0x04, b'e'=0x05
Then raw_keys(start=b'c', stop=b'e') would only yield one key: b'd'
"""
raise NotImplemented("must override")
def items(self, start: Optional[str]=None, stop: Optional[str]=None) -> Iterator:
"""
Get an iterator that yields up key/value pairs from the table.
Each item will be a tuple of the form (key, value)
Tuples will be yielded in the order of their keys after serialization.
If start was provided, the iterator will start on the first key that
is equal to or greater than the provided start value.
If stop was provided, the iterator will stop before yielding
a key that is equal to or greater than the provided stop value.
For example, if a table included the following key/value pairs:
a=1, b=2, d=4, e=5
Then items(start='c', stop='e') would only yield one tuple: ('d', 4)
"""
raise NotImplemented("must override")
def raw_items(self, bytes_start: Optional[bytes]=None, bytes_stop: Optional[bytes]=None) -> Iterator:
"""
Get an iterator that yields up raw key/value pairs from the table.
Each item will be a tuple of the form (bytes_key, bytes_value)
Tuples will be yielded in the order of their byte keys.
No key deserialization will be performed. No value deserialization
or decompression will be performed.
If start was provided, the iterator will start on the first key that
is equal to or greater than the provided start value.
If stop was provided, the iterator will stop before yielding
a key that is equal to or greater than the provided stop value.
For example, if a table included the following key/value pairs:
b'a'=0x01, b'b'=0x02, b'd'=0x04, b'e'=0x05
raw_items(start=b'c', stop=b'e') would only yield one tuple: (b'd', 0x04)
"""
raise NotImplemented("must override")
def values(self, start: Optional[str]=None, stop: Optional[str]=None) -> Iterator:
"""
Get an iterator that yields up values from the table.
Values will be yielded in the order of their keys after serialization
(but the keys themselves will not be yielded).
If start was provided, the iterator will start on the value for the
first key that is equal to or greater than the provided start value.
If stop was provided, the iterator will stop before yielding
the value for the first key that is equal to or greater than the
provided stop value.
For example, if a table included the following key/value pairs:
a=1, b=2, d=4, e=5
Then values(start='c', stop='e') would only yield one value: 4
"""
raise NotImplemented("must override")
def raw_values(self, bytes_start: Optional[bytes]=None, bytes_stop: Optional[bytes]=None) -> Iterator:
"""
Get an iterator that yields up actual byte values from the table.
Values will be yielded in the order of their byte keys
(but the keys themselves will not be yielded).
No deserialization or decompression of values will be performed.
If start was provided, the iterator will start on the value for the
first key that is equal to or greater than the provided start value.
If stop was provided, the iterator will stop before yielding
the value for the first key that is equal to or greater than the
provided stop value.
For example, if a table included the following key/value pairs:
b'a'=0x01, b'b'=0x02, b'd'=0x04, b'e'=0x05
Then raw_values(start=b'c', stop=b'e') would only yield one value: 0x04
"""
raise NotImplemented("must override")
class YunaTable(YunaTableBase):
"""
This class implements a table for Yuna.
Provides method functions for delete, get, put, etc.
"""
def __init__(self,
shared: YunaSharedData,
name: str,
key_serialize: Optional[str],
serialize: Optional[str],
compress: Optional[str],
):
if name in shared.tables_map:
raise ValueError(f"table '{name}' is already open in this database")
# TODO: check key_serialize to see if we are doing integer keys here
try:
temp = plugins.get_serialize_plugins(key_serialize)
except ValueError:
raise ValueError("unknown serialization format for key_serialize: {key_serialize!r}")
try:
temp = plugins.get_serialize_plugins(serialize)
except ValueError:
raise ValueError("unknown serialization format for serialize: {serialize!r}")
try:
temp = plugins.get_compress_plugins(compress)
except ValueError:
raise ValueError("unknown compression format for compress: {compress!r}")
meta = YunaTableMetadata(
name=name,
key_serialize=key_serialize, serialize=serialize, compress=compress
)
self._shared = shared
# Check to see if the table name is in the metadata. If it is in there, assume the table exists
# in the LMDB file, so we wouldn't want to create. If it's not in there, we need to create it.
create = name not in self._shared.metadata["tables"]
# integerkey forced false for now
self.lmdb_table = _lmdb_table_open(self._shared.env, name, create=create, integerkey=False)
self.name = name
self.meta = meta
key_serialize = meta.key_serialize
# add method functions based on what's documented in the metadata
env = self._shared.env
lmdb_table = self.lmdb_table
temp = plugins.delete_factory(env, lmdb_table, key_serialize)
self.delete = types.MethodType(temp, self)
temp = plugins.delete_factory(env, lmdb_table, None)
temp.__name__ = "raw_delete"
self.raw_delete = types.MethodType(temp, self)
temp = plugins.get_factory(env, lmdb_table, key_serialize, serialize, compress)
self.get = types.MethodType(temp, self)
temp = plugins.get_factory(env, lmdb_table, None, None, None)
temp.__name__ = "raw_get"
self.raw_get = types.MethodType(temp, self)
temp = plugins.put_factory(env, lmdb_table, key_serialize, serialize, compress)
self.put = types.MethodType(temp, self)
temp = plugins.put_factory(env, lmdb_table, None, None, None)
temp.__name__ = "raw_put"
self.raw_put = types.MethodType(temp, self)
temp = plugins.items_factory(env, lmdb_table, key_serialize, serialize, compress)
self.items = types.MethodType(temp, self)
temp = plugins.items_factory(env, lmdb_table, None, None, None)
temp.__name__ = "raw_items"
self.raw_items = types.MethodType(temp, self)
temp = plugins.keys_factory(env, lmdb_table, key_serialize)
self.keys = types.MethodType(temp, self)
temp = plugins.keys_factory(env, lmdb_table, None)
temp.__name__ = "raw_keys"
self.raw_keys = types.MethodType(temp, self)
temp = plugins.values_factory(env, lmdb_table, key_serialize, serialize, compress)
self.values = types.MethodType(temp, self)
temp = plugins.values_factory(env, lmdb_table, None, None, None)
temp.__name__ = "raw_values"
self.raw_values = types.MethodType(temp, self)
# Table instance fully created so keep track of it
self._shared.tables_map[name] = self
if create:
self._shared.metadata["tables"][name] = vars(self.meta)
_yuna_put_meta(self._shared.env, self._shared.metadata)
else:
assert self._shared.metadata["tables"][name] == vars(self.meta)
def __delitem__(self, key):
return self.delet(key)
def __getitem__(self, key):
return self.get(key)
def __setitem__(self, key, value):
return self.put(key, value)
def drop(self):
"""
Drop a table. Delete all key/value pairs and the table itself.
"""
if self._shared.read_only:
raise RuntimeError("database was opened read-only; cannot drop table")
self._shared.is_dirty = True
_lmdb_table_drop(self._shared.env, self.lmdb_table)
del self._shared.tables_map[self.name]
del self._shared.metadata["tables"][self.name]
self._shared = self.lmdb_table = self.name = self.meta = None
def truncate(self):
"""
Delete all key/value pairs from table.
"""
_lmdb_table_truncate(self._shared.env, self.lmdb_table)
class Yuna:
"""
Key/value store with dict-like semantics. A wrapper around LMDB.
"""
def __init__(self,
fname: str,
# YunaDB name and version
name: Optional[str]=None,
version: Optional[int]=None,
# details of an LMDB file follow
read_only: bool=True,
create: bool=False,
safety_mode: str='a',
single_file: bool=True,
max_tables: int=YUNA_DEFAULT_MAX_TABLES,
max_db_file_size: int=YUNA_DEFAULT_MAX_DB_SIZE,
**kwargs
):
if create:
read_only = False
env = _lmdb_open(fname,
read_only=read_only, create=create, safety_mode=safety_mode, single_file=single_file,
max_tables=max_tables, max_db_file_size=max_db_file_size,
extra_args=kwargs)
if create:
metadata = _yuna_new_meta(name=name, version=version)
else:
metadata = _yuna_get_meta(env, name=name, version=version)
tables = YunaTablesMap()
reserved = YunaReservedTable(env=env)
self.pathname = os.path.abspath(fname)
self.metadata = metadata
self.reserved = reserved
self._shared = YunaSharedData(env=env, tables_map=vars(tables), metadata=metadata, read_only=read_only)
self.tables = tables
# Set up an entry in .tables for each table listed in metadata, with delete/get/put functions ready to use.
for meta in metadata["tables"].values():
YunaTable(self._shared, meta["name"], meta["key_serialize"], meta["serialize"], meta["compress"])
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
# if .close() was already called we have no work to do
if "_shared" in vars(self):
self.close()
return False # if there was an exception, do not suppress it
def sync(self):
"""
Ensure that all data is flushed to disk.
Useful when Yuna was opened in "unsafe" mode.
"""
if self._shared.is_dirty and not self._shared.read_only:
_yuna_put_meta(self._shared.env, self._shared.metadata)
self._shared.is_dirty = False
_lmdb_sync(self._shared.env)
def close(self):
"""
Close the Yuna instance.
Ensures that all data is flushed to disk.
"""
if "_shared" not in vars(self):
return
if self._shared.is_dirty and not self._shared.read_only:
_yuna_put_meta(self._shared.env, self._shared.metadata)
self._shared.is_dirty = False
self._shared.env.close()
del self.tables
del self._shared
@property
def table_names(self):
return sorted(vars(self.tables))
def new_table(self,
name: str,
key_serialize: Optional[str]=SERIALIZE_STR,
serialize: Optional[str]=None,
compress: Optional[str]=None
):
"""
Open a new Yuna table.
Creates the table in the LMDB file, updates the Yuna metadata,
and sets up serialization and optional compression as requested.
"""
if self._shared.read_only:
raise RuntimeError("database was opened read-only; cannot make new table")
tbl = YunaTable(self._shared, name, key_serialize, serialize, compress)
# YunaTable takes care of adding the new table to self.tables
assert name in vars(self.tables)
# YunaTable also updates the metadata
assert name in self._shared.metadata["tables"]
self._shared.is_dirty = True
return tbl
def new_table_like(self,
tbl: YunaTable,
name: Optional[str],
):
"""
Open a new Yuna table that's like another table that's already open.
Looks at the metadata in the table to find how the already-open table
was set up, then calls .new_table() with the same settings to make
a new table set up exactly the same as the already-open table.
If name is given as None, the new table will be given the same name
as the already-open table. This only makes sense if the new table is
in a different Yuna database file than the already-open table.
"""
# just copy all the metadata
meta = vars(tbl.meta).copy()
# if we have a new table name, set it in now
if name is not None:
meta["name"] = name
return self.new_table(**meta)
def repack(self):
"""
Repack the database file to be minimal size.
Can only be done after you call the .close() method, to make
sure that all the data is safely written and the database
is in a clean state.
This actually makes a copy of the database file, then deletes
the original file and renames the copy to the original filename.
"""
# first, check to see if this instance was properly closed
if "_shared" in vars(self):
raise RuntimeError("must call call .close() before calling .repack()")
# Use LMDB copy operation with compact=True for most efficient repacking
pathname_repack = self.pathname + ".yuna_repack"
# If someone interrupted an attempt to repack, clean up old repack attempt file now.
delete_file_or_dir(pathname_repack)
with YunaReadOnly(self.pathname, None, None) as db_old:
db_old._shared.env.copy(pathname_repack, compact=True)
# If no exception was raised, we have a new compacted database file! Rename old to new.
delete_file_or_dir(self.pathname)
# If there's a lockfile, just delete it along with original file.
temp = self.pathname + "-lock"
if os.path.exists(temp):
os.remove(temp)
os.rename(pathname_repack, self.pathname)
class YunaReadOnly(Yuna):
def __init__(self, *args, **kwargs):
kwargs["read_only"] = True
kwargs["create"] = False
kwargs["safety_mode"] = 'u'
super().__init__(*args, **kwargs) | yuna-db | /yuna-db-0.2.5.tar.gz/yuna-db-0.2.5/src/yuna/__init__.py | __init__.py |
# yunbi
A Python wrapper for the yunbi.com api
 
## Install
You can install and upgrade this wrapper from pip:
```
$ pip install -U yunbi
```
## Docs
This wrapper is based on the API list on yunbi.com, you can check it out from [YUNBI EXCHANGE API LIST](https://yunbi.com/swagger/#/default).
What you need to know is that,
in `get_trades` and `get_trades_my` functions, the official API use `from` as a key, which is also a reserved words in Python.
To solve this problem, you need to use `from_id` instead of `from`.
For example,
```
get_trades('ethcny', from_id=123456)
```
## Example
```
from yunbi import Yunbi
y = Yunbi() # Access to public API
y.get_tickers_market('ethcny') # Get ETH/CNY market's tickets
y = Yunbi('YOUR API KEY', 'YOUR SECRET KEY') # Access to public and private API
y.get_members_me() # Get your account information
```
## License
[MIT License](https://github.com/imlonghao/yunbi/blob/master/LICENSE)
| yunbi | /yunbi-0.2.2.tar.gz/yunbi-0.2.2/README.md | README.md |
import json
import cv2
from collections import Counter
import numpy as np
david_config = {"3":2.5,"4":5,"5":10,"6":15,"7":20,"8":25,"9":40,"10":65,"11":80}
david_index = [2.5,5,10,15,20,25,40,65,80]
ct_config = {"T":"2","C":"1"}
color_type = [1,2,3]
def imshow(imdata):
h,w,c = imdata.shape
if h > 400:
w = 400*w//h
h = 400
imdata2 = cv2.resize(imdata,[w,h])
cv2.imshow("ws",imdata2)
cv2.waitKey(0)
cv2.destroyAllWindows()
def read_label(label_path):
'''
读取labelme标注的图片结果
:param label_path: 标记的位置
:return: {"C":[[],[]],"T":[[],[]],"label":[从小到大每个标记点的位置]}
'''
with open(label_path,'r') as f:
dataStr = f.read()
dataObj = json.loads(dataStr)
shapes = dataObj['shapes']
shapesDir = {}
result = {}
for i in shapes:
shapesDir[i['label']] = i['points']
result['T'] = shapesDir[ct_config['T']]
result['C'] = shapesDir[ct_config['C']]
result['label'] = []
for i in david_config:
result['label'].append(shapesDir[i])
return result
def get_deepest_y_loc(imc):
im = cv2.cvtColor(imc, cv2.COLOR_BGR2LAB)
im = im[:, :, 0]
im = np.sum(im, axis=0)
deep_loc = np.where(im==np.min(im))[0][0]
return deep_loc
def get_deepest_y_ave(imdata,loc,type=1):
'''
获取指定区域最深色像素位置的均值
:param imdata: 通过opencv读到的信息,bgr格式
:param loc:[[x,y],[lengthx,lengthy]]
:param type: 指定返回的颜色空间,
:return:
'''
check_color = imdata[int(loc[0][1]):int(loc[1][1]),int(loc[0][0]):int(loc[1][0]),:]
deepst_loc = get_deepest_y_loc(check_color)
if type == 1:
# RGB
check_color = cv2.cvtColor(check_color,cv2.COLOR_BGR2RGB)
check_color = check_color[:,:,0]
im = np.average(check_color,axis=0)
return im[deepst_loc]
elif type == 2:
check_color = cv2.cvtColor(check_color,cv2.COLOR_BGR2GRAY)
im = np.average(check_color,axis=0)
return im[deepst_loc]
elif type == 3:
check_color = cv2.cvtColor(check_color,cv2.COLOR_BGR2LAB)
check_color1 = check_color[:,:,0]
im = np.average(check_color1,axis=0)
check_color2 = check_color[:, :, 1]
im2 = np.average(check_color2, axis=0)
check_color3 = check_color[:, :, 2]
im3 = np.average(check_color3, axis=0)
# return np.sum(im)/len(im)
return im[deepst_loc] + im2[deepst_loc] + im3[deepst_loc]
return 0
def check(picpath,label_path,type,fig=None):
imdata = cv2.imread(picpath)
label_info = read_label(label_path)
T = label_info['T']
label = label_info["label"]
T_ave = get_deepest_y_ave(imdata,T,type)
label_ave = []
for i in label:
ave = get_deepest_y_ave(imdata,i,type)
label_ave.append(ave)
if fig is not None:
fig.plot([i for i in range(len(label_ave))],label_ave)
fig.plot([i for i in range(len(label_ave))],[T_ave for i in range(len(label_ave))])
for x, y in enumerate(label_ave):
plt.text(x, y, f'{round(y,3)}')
fig.show()
print(f"T:{T_ave},label:{label_ave}")
T_ave = [T_ave] * len(label_ave)
d = abs(np.subtract(T_ave, label_ave))
index = np.where(d == np.min(d))[0][0]
print(np.min(d))
return david_index[index]
if __name__ == "__main__":
from matplotlib import pyplot as plt
p = r"D:\BaiduNetdiskDownload\picture\picture\iphone\iphone_pos1_"
pics = [ p + f"{i}.JPG" for i in range(21,41)]
labels = [ p + f"{i}.json" for i in range(21,41)]
z = zip(pics,labels)
res = []
for i in z:
res.append(check(i[0],i[1],3,plt))
print(Counter(res))
# picpath = r"D:\BaiduNetdiskDownload\picture\picture\android\android_pos1_1.JPG"
# label_path = r"D:\BaiduNetdiskDownload\picture\picture\android\android_pos1_1.json" | yuncheng-util-pkg | /yuncheng_util_pkg-1.3-py3-none-any.whl/yuncheng_util_pkg/check_color_board.py | check_color_board.py |
UNDEFINED_RESULT = -2
import shutil,os
from .yuncheng_al_class import *
from .util_file import *
class Seral():
def toJson(self):
return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True, indent=4)
class YunchengAlResult(Seral):
def __init__(self,filepath,jsonObj):
self.filepath = filepath
if jsonObj is None:
self.code = 404
else:
self.lhClineRight = jsonObj.get("lhClineRight",UNDEFINED_RESULT)
self.barcode = jsonObj.get("barcode",UNDEFINED_RESULT)
self.lhCheckUrl = jsonObj.get("lhCheckUrl",UNDEFINED_RESULT)
self.lhPaperAlType = jsonObj.get("lhPaperAlType",UNDEFINED_RESULT)
self.lhClineLeft = jsonObj.get("lhClineLeft",UNDEFINED_RESULT)
self.reverse = jsonObj.get("reverse",0)
self.lhTlineLeft = jsonObj.get("lhTlineLeft",UNDEFINED_RESULT)
self.errorCode = jsonObj.get("errorCode", 0)
self.cLocation = jsonObj.get("cLocation", UNDEFINED_RESULT)
self.code = jsonObj.get("code", 0)
self.lhValue = jsonObj.get("lhValue", UNDEFINED_RESULT)
self.lhTlineRight = jsonObj.get("lhTlineRight", UNDEFINED_RESULT)
self.lhRatio = jsonObj.get("lhRatio", UNDEFINED_RESULT)
self.tLocation = jsonObj.get("tLocation",UNDEFINED_RESULT)
class Compare(Seral):
def __init__(self,filepath:str,result1:YunchengAlResult,result2:YunchengAlResult):
self.filepath = filepath
self.result1 = result1
self.result2 = result2
def compare_pre(self):
if self.result1.code == 404 and self.result2.code != 404:
return False
if self.result1.code != 404 and self.result2.code == 404:
return False
return True
def compare_value(self):
key = "lhValue"
v1 = self.result1.lhValue
v2 = self.result2.lhValue
return int(v1 != v2)
def compare_line(self):
'''
1,一个有T,一个没有T
:param result1:
:param result2:
:return:
'''
key1 = "lhTlineLeft"
key2 = "lhTlineRight"
key3 = "lhClineLeft"
key4 = "lhClineRight"
v1_key1 = self.result1.lhTlineLeft
v2_key1 = self.result2.lhTlineLeft
v1_key2 = self.result1.lhTlineRight
v2_key2 = self.result2.lhTlineRight
v1_key3 = self.result1.lhClineLeft
v2_key3 = self.result2.lhClineLeft
v1_key4 = self.result1.lhClineRight
v2_key4 = self.result2.lhClineRight
result = 0
if (v1_key2 == 0 and v2_key2 != 0) or (v1_key2 != 0 and v2_key2 == 0):
result = result ^ 0x01
if (v1_key4 == 0 and v2_key4 != 0) or (v1_key4 != 0 and v2_key4 == 0):
result = result ^ 0x02
if result != 0:
return result
if v1_key2 != 0:
if v1_key1 > v2_key2 or v1_key2 < v2_key1:
result = result ^ 0x04
if v1_key3 != 0:
if v1_key3 > v2_key4 or v1_key4 < v2_key3:
result = result ^ 0x08
return result
def compare_brand(self):
key = "lhPaperAlType"
v1 = self.result1.lhPaperAlType
v2 = self.result2.lhPaperAlType
return int(v1 != v2)
def compare_direction(self):
key = "reverse"
v1 = self.result1.reverse
v2 = self.result2.reverse
return int(v1 != v2)
def compare_al_result(self) -> 'Compare_result':
compare_result = {"wrong": 0}
if not self.compare_pre():
compare_result['wrong'] = 1
return compare_result
brand_result = self.compare_brand()
line_result = self.compare_line()
direction_result = self.compare_direction()
value_result = self.compare_value()
return Compare_result(self.filepath,line_result,brand_result,direction_result,value_result,self.result1,self.result2)
def save_compare_result(self):
pass
class Compare_result(Seral):
def __init__(self,file,line_result,brand_result,dirction_result,value_result,result1:YunchengAlResult,result2:YunchengAlResult):
self.file = file
self.line_result = line_result
self.brand_result = brand_result
self.dirction_result = dirction_result
self.value_result = value_result
self.result1 = result1
self.result2 = result2
def show_dif_brand_result(self,saveResult:[],sameResult:[]=None):
if self.brand_result != 0:
saveResult.append({"file":self.file,"url1-result":self.result1.lhPaperAlType,"url2-result":self.result2.lhPaperAlType})
else:
if sameResult is not None:
sameResult.append({"file": self.file, "url1-result": self.result1.lhPaperAlType,
"url2-result": self.result2.lhPaperAlType})
def show_dif_line_result(self,saveResult:[],sameResult:[]=None):
if self.line_result != 0:
saveResult.append({"file":self.file,"url1-result":[self.result1.lhTlineLeft,self.result1.lhTlineRight,self.result1.lhClineLeft,self.result1.lhClineRight,self.result1.reverse],
"url2-result":[self.result2.lhTlineLeft,self.result2.lhTlineRight,self.result2.lhClineLeft,self.result2.lhClineRight,self.result2.reverse]})
else:
if sameResult is not None:
sameResult.append({"file": self.file,
"url1-result": [self.result1.lhTlineLeft, self.result1.lhTlineRight,
self.result1.lhClineLeft, self.result1.lhClineRight,
self.result1.reverse],
"url2-result": [self.result2.lhTlineLeft, self.result2.lhTlineRight,
self.result2.lhClineLeft, self.result2.lhClineRight,
self.result2.reverse]})
else:
pass
def show_dif_value_result(self,saveResult:[],sameResult:[]=None):
if self.value_result != 0:
saveResult.append({"file":self.file,"url1-result":self.result1.lhValue,"url2-result":self.result2.lhValue})
elif sameResult is not None:
sameResult.append({"file":self.file,"url1-result":self.result1.lhValue,"url2-result":self.result2.lhValue})
def show_dif_direction_result(self,saveResult:[],sameResult:[]=None):
if self.dirction_result != 0:
saveResult.append({"file":self.file,"url1-result":self.result1.reverse,"url2-result":self.result2.reverse})
elif sameResult is not None:
sameResult.append({"file":self.file,"url1-result":self.result1.reverse,"url2-result":self.result2.reverse})
class Get_result_and_compare():
def __init__(self,url1,url2,picLocals,saveFile,useCache,url1ResultSavePath,url2ResultSavePath):
print(f"test files:{len(picLocals)}")
self.url1 = url1
self.url2 = url2
self.url1ResultSavePath = url1ResultSavePath
self.url2ResultSavePath = url2ResultSavePath
self.picLocals = picLocals
self.saveFile = saveFile
self.useCache = useCache
if useCache:
self.cache_init()
def cache_init(self):
try:
with open(self.url1ResultSavePath,'r') as f:
self.result1Cache = json.loads(f.read())
except Exception as e:
self.result1Cache = {}
try:
with open(self.url2ResultSavePath, 'r') as f:
self.result2Cache = json.loads(f.read())
except Exception as e:
self.result2Cache = {}
def cache_save(self):
if self.useCache is True and self.url1ResultSavePath is not None and self.url2ResultSavePath is not None:
with open(self.url1ResultSavePath,'w') as f:
f.write(json.dumps(self.result1Cache))
with open(self.url2ResultSavePath,'w') as f:
f.write(json.dumps(self.result2Cache))
def cache_insert(self,key,v,index):
cache = self.result1Cache
if index == 2:
cache = self.result2Cache
cache[key] = v
def cache_get(self,key,index):
cache = self.result1Cache
if index == 2:
cache = self.result2Cache
return cache.get(key,None)
def get_result(self,session,url,jsonData,index=1):
result = None
if self.useCache:
id = jsonData['id']
result = self.cache_get(id,index)
if result is None:
result = post_for_request(url, jsonData,session)
if self.useCache:
id = jsonData['id']
self.cache_insert(id,result,index)
return result
def compare_two_pic_local(self):
session = requests.session()
lastResult = []
for index,i in enumerate(self.picLocals):
try:
print(f"test:{index}")
imdata = read_pic_to_base64(i)
id = i
jsonData = make_al_input(imdata,id)
result1 = self.get_result(session, self.url1, jsonData,1)
result2 = self.get_result(session, self.url2, jsonData,2)
com = Compare(i,YunchengAlResult(i, result1),YunchengAlResult(i, result2))
lastResult.append(com.compare_al_result())
except Exception as e:
print("error:{}".format(e))
self.cache_save()
self.lastResult = lastResult
def summary(self,saveSameData=False):
value_dif = []
line_dif = []
brand_dif = []
direct_dif = []
value_same = None
line_same = None
brand_same = None
direct_same = None
if saveSameData:
value_same = []
line_same = []
brand_same = []
direct_same = []
for i in self.lastResult:
i.show_dif_direction_result(direct_dif,direct_same)
i.show_dif_line_result(line_dif,line_same)
i.show_dif_value_result(value_dif,value_same)
i.show_dif_brand_result(brand_dif,brand_same)
cou = 0
for i in self.lastResult:
if i.result1.lhPaperAlType == i.result2.lhPaperAlType and i.result1.lhPaperAlType in (7,8,9):
cou += 1
print("7,8,9 相同的共有:{}".format(cou))
print(f"sum:{len(self.lastResult)},brand_dif:{len(brand_dif)},direct_dif:{len(direct_dif)},line_dif:{len(line_dif)},value_dif:{len(value_dif)}")
lastResult = {"v":value_dif,"line":line_dif,"brand":brand_dif,"rever":direct_dif}
if saveSameData:
lastResult['value_same'] = value_same
lastResult['line_same'] = line_same
lastResult['brand_same'] = brand_same
lastResult['rever_same'] = direct_same
with open(self.saveFile,'w') as f:
f.write(json.dumps(lastResult))
class DecodeTheSummaryFile():
def __init__(self,summaryFile,savePath,sameDataSave = None):
self.fig_number = 10
self.summaryFile = summaryFile
with open(summaryFile,'r') as f:
data = f.read()
data = json.loads(data)
self.brand = data['brand']
self.line = data['line']
self.rever = data['rever']
self.value = data['v']
self.savePath = savePath
self.sameDataSave = sameDataSave
if sameDataSave is not None and data.__contains__("brand_same") is True:
self.brand_same = data['brand_same']
self.line_same = data['line_same']
self.rever_same = data['rever_same']
self.value_same = data['value_same']
def show_brand_dif(self):
count = []
for i in self.brand:
if i['url1-result'] in (7,8,9) or i['url2-result'] in (7,8,9):
count.append(i)
figIndex = 0
for index,i in enumerate(count):
if index % self.fig_number == 0:
plt.figure(figsize=(10, 20))
filepath = i['file']
title = 'url1:{},url2:{},{}'.format(i['url1-result'],i['url2-result'],os.path.basename(filepath))
data = cv2.imread(filepath)
rows = index % self.fig_number + 1
plt.subplot(self.fig_number,1,rows)
plt.imshow(data[:,:,::-1])
plt.title(title)
if rows == 10 or index == len(count) - 1:
plt.savefig(f'{self.savePath}/brand-{figIndex}.png')
figIndex += 1
# plt.show()
def show_value_dif(self):
figIndex = 0
for index, i in enumerate(self.value):
filepath = i['file']
title = 'url1:{},url2:{},{}'.format(i['url1-result'], i['url2-result'],os.path.basename(filepath))
data = cv2.imread(filepath)
if index % self.fig_number == 0:
plt.figure(figsize=(10, 20))
rows = index % self.fig_number + 1
plt.subplot(self.fig_number, 1, rows)
plt.imshow(data[:, :, ::-1])
plt.title(title)
if rows == self.fig_number or index == len(self.value) - 1:
plt.savefig(f'{self.savePath}/value-{figIndex}.png')
figIndex += 1
plt.show()
def show_direction_dif(self):
figIndex = 0
for index,i in enumerate(self.rever):
if index % self.fig_number == 0:
plt.figure(figsize=(10, 20))
rows = index % self.fig_number + 1
plt.subplot(self.fig_number,1,rows)
filepath = i['file']
title = 'url1:{},url2:{},{}'.format(i['url1-result'],i['url2-result'],os.path.basename(i['file']))
data = cv2.imread(filepath)
plt.imshow(data[:,:,::-1])
plt.title(title)
if rows == 10 or index == len(self.rever) - 1:
plt.savefig(f'{self.savePath}/dire-{figIndex}.png')
figIndex += 1
plt.show()
def draw_line(self,filepath,line):
imdata = cv2.imread(filepath)
if line[4] == 1:
imdata = setRotateJust180(imdata)
h,w,_ = imdata.shape
if line[0] > 0.1:
t1 = int(w*line[0])
t2 = int(w*line[1])
imdata[h*3//4:,t1:t2,:] = (255,0,0)
if line[2] > 0.1:
c1 = int(w*line[2])
c2 = int(w*line[3])
imdata[h*3//4:,c1:c2,:] = (0,0,255)
return imdata
def show_line_dir(self):
figIndex = 0
for index,i in enumerate(self.line):
filepath = i['file']
if index % self.fig_number == 0:
fig = plt.figure(figsize=(10, 25))
rows = index % self.fig_number + 1
plt.subplot(self.fig_number*2,1,rows*2-1)
imdata1 = self.draw_line(filepath,i['url1-result'])
plt.imshow(imdata1[:,:,::-1])
plt.title("url1 result,reverse:{}".format(i['url1-result'][4]))
plt.subplot(self.fig_number*2,1,rows*2)
imdata2 = self.draw_line(filepath,i['url2-result'])
plt.imshow(imdata2[:,:,::-1])
plt.title("url2 result,reverse:{}".format(i['url2-result'][4]))
if rows == self.fig_number or index == len(self.line)-1:
plt.savefig(f'{self.savePath}/line-{figIndex}.png')
plt.show()
figIndex+=1
def get_brand_dif(self,save_dir):
if os.path.exists(save_dir) is False:
os.makedirs(save_dir)
for i in self.brand:
filepath = i['file']
name = os.path.join(save_dir,os.path.basename(filepath))
shutil.copy(filepath,name)
def get_concrete_brand(self,id):
# print(self.brand)
for i in self.brand:
if i['file'].find(id) != -1:
print(i)
break
def summary_brand_dif(self):
count = 0
for i in self.brand:
if i['url1-result'] in (7,8,9) or i['url2-result'] in (7,8,9):
count += 1
print(count)
def get_brand_same(self):
if self.sameDataSave is None or self.brand_same is None:
pass
figIndex = 0
for index,i in enumerate(self.brand_same):
if index % self.fig_number == 0:
plt.figure(figsize=(10, 20))
filepath = i['file']
title = 'brand:url1-result:{},url2-result:{},path:{}'.format(i['url1-result'],i['url2-result'],os.path.basename(filepath))
data = cv2.imread(filepath)
rows = index % self.fig_number + 1
plt.subplot(self.fig_number,1,rows)
plt.imshow(data[:,:,::-1])
plt.title(title)
if rows == 10 or index == len(self.brand_same) - 1:
plt.savefig(f'{self.sameDataSave}/brand-{figIndex}.png')
figIndex += 1
def get_line_same(self):
if self.sameDataSave is None or self.line_same is None:
pass
figIndex = 0
for index, i in enumerate(self.line_same):
filepath = i['file']
if index % self.fig_number == 0:
fig = plt.figure(figsize=(10, 25))
rows = index % self.fig_number + 1
plt.subplot(self.fig_number * 2, 1, rows * 2 - 1)
imdata1 = self.draw_line(filepath, i['url1-result'])
plt.imshow(imdata1[:, :, ::-1])
plt.title("url1 result")
plt.subplot(self.fig_number * 2, 1, rows * 2)
imdata2 = self.draw_line(filepath, i['url2-result'])
plt.imshow(imdata2[:, :, ::-1])
plt.title("url2 result")
if rows == self.fig_number or index == len(self.line_same) - 1:
plt.savefig(f'{self.sameDataSave}/line-{figIndex}.png')
figIndex += 1
def get_value_same(self):
figIndex = 0
for index,i in enumerate(self.value_same):
filepath = i['file']
title = 'url1-result:{}-url2-result:{}'.format(i['url1-result'],i['url2-result'])
data = cv2.imread(filepath)
if index % self.fig_number == 0:
plt.figure(figsize=(10, 20))
rows = index % self.fig_number + 1
plt.subplot(self.fig_number, 1, rows)
plt.imshow(data[:,:,::-1])
plt.title(title)
if rows == self.fig_number or index == len(self.value_same) - 1:
plt.savefig(f'{self.sameDataSave}/value-{figIndex}.png')
figIndex += 1
def get_direction_same(self):
figIndex = 0
for index,i in enumerate(self.rever_same):
if index % self.fig_number == 0:
plt.figure(figsize=(10, 20))
rows = index % self.fig_number + 1
plt.subplot(self.fig_number,1,rows)
filepath = i['file']
title = 'url1:{},url2:{},{}'.format(i['url1-result'],i['url2-result'],os.path.basename(i['file']))
data = cv2.imread(filepath)
plt.imshow(data[:,:,::-1])
plt.title(title)
if rows == 10 or index == len(self.rever_same) - 1:
plt.savefig(f'{self.sameDataSave}/dire-{figIndex}.png')
figIndex += 1
def com_two_pics_test(url1,savePath1,url2,savePath2,pics,summaryFile):
'''
将两个地址的算法结果进行对比,该程序会将结果进行缓存,如果发现该结果已经计算过则不再进行重复请求,
:param url1: 地址1
:param savePath1: 结果1保存位置
:param url2: 地址2
:param savePath2: 结果2保存位置
:param pics: 本地图片地址集合['path1','path2']
:param summaryFile: 对比结果保存位置
:return: None
'''
g = Get_result_and_compare(url1, url2, pics, summaryFile, True, savePath1, savePath2)
g.compare_two_pic_local()
g.summary()
def analysis_summary_result(summaryFile,difSavePath):
'''
对比对结果进行解读
:param summaryFile: 比对结果
:param difSavePath: 不同结果进行标准后保存的位置
:return: None
'''
dif = DecodeTheSummaryFile(summaryFile,difSavePath)
dif.show_brand_dif()
dif.show_line_dir()
dif.show_direction_dif()
dif.show_value_dif() | yuncheng-util-pkg | /yuncheng_util_pkg-1.3-py3-none-any.whl/yuncheng_util_pkg/compare_two_al_result.py | compare_two_al_result.py |
from yuncheng_util_pkg.util_net import get_for_request
from yuncheng_util_pkg.util_net import post_for_request
from yuncheng_util_pkg.util_file import down_pic
from yuncheng_util_pkg.yuncheng_al_class import ask_for_yuncheng_al
import cv2
import copy
import numpy as np
import matplotlib.pyplot as plt
import logging
logger = logging.getLogger("t")
def get_pipeline_error_info(id,savePath,url="http://192.168.1.222:12010/get-wrong-info/",checkUrl = 'http://192.168.1.222:8001/lh/lhRawdataAnalysis'):
'''
获取第二阶段算法pipeline的结果,并将识别错误的结果进行重复调用+结果展示
:param id: 数据库中记录的本地执行的主键
:param savePath: 结果保存的位置
:param url: 调用的pipeline所在的地址
:param checkUrl: 算法提供的接口的地址
:return: 算法执行的结果
'''
ur = url+str(id)
res = get_for_request(ur)
_down_pipeline_info(res,savePath)
return _ask_for_yuncheng(res,checkUrl)
def _down_pipeline_info(res:object,savepath:str):
'''
下载第二阶段pipeline wrong的返回结果到指定的路径,不单独使用
:param res:
:param savepath:
:return:
'''
for i in res['result']:
url = i['url']
picpath = down_pic(url,savepath)
i['path'] = picpath
def _ask_for_yuncheng(res:object,al_url):
'''
同样配合着上面获取第二阶段识别错误结果的接口使用
:param res:
:param al_url:
:return:
'''
pics = [i['path'] for i in res['result']]
result = ask_for_yuncheng_al(al_url,pics)
for i in result:
for k in res['result']:
if i['id'] == k['path']:
i['pipeinfo'] = k
continue
return result
def make_model(platform,sdkversion,modelVersion):
return {"platform":platform,"sdkVersion":sdkversion,"modelVersion":modelVersion}
def drawbox(imdata,title,points,anno_start,anno_end,color=(0,0,255)):
'''
在图片上将point的点进行标记,
:param imdata:
:param title:
:param points: 4个点的坐标
:param anno_start: 打标箭头开始点
:param anno_end: 打标记箭头结束点
:param color:
:return:
'''
if points is not None and len(points) > 0:
try:
# rect = cv2.minAreaRect(np.array(points))
# print(rect)
# box = cv2.boxPoints(rect)
# print(box)
boxArray = np.int0(points)
print(boxArray)
cv2.drawContours(imdata, [boxArray], 0, color, 5)
plt.annotate(title, xy=anno_end, xytext=anno_start,
xycoords='data',
arrowprops=dict(facecolor='black', shrink=0.05)
)
except Exception as e:
print(e)
else:
plt.annotate(title, xy=anno_end, xytext=anno_start,
xycoords='data',
arrowprops=dict(facecolor='black', shrink=0.05)
)
def label_pic(result,pic):
imdata = cv2.imread(pic)
v1 = result['v1']
v2 = result['v2']
title1 = "v1-error:{}".format(v1['errorCode'])
point1 = v1.get("points",[])
title2 = "v2-error:{}".format(v2['errorCode'])
point2 = v2.get("points", [])
anno_start1 = (100,100)
anno_start2 = (200,150)
anno_end1 = (50,50)
anno_end2 = (100,150)
if point1 != []:
anno_end1 = (point1[0][0],point1[0][1])
if point2 != []:
anno_end2 = (point2[0][0],point2[0][1])
drawbox(imdata, title1, point1,anno_start1,anno_end1,color=(0,0,255))
drawbox(imdata, title2, point2,anno_start2,anno_end2,color=(0,255,0))
plt.title(pic)
plt.imshow(imdata[:, :, ::-1])
plt.show()
def get_cut_pipeline_info(model1,model2,savePath="pics",url="http://192.168.1.222:12010/get-paper-cut-compare-detail"):
'''
将第一阶段的识别结果进行下载,并进行绘图展示
:param model1:
:param model2:
:param savePath:
:param url:
:return:
'''
result = post_for_request(url,{"model1":model1,"model2":model2})
'''
{
"code": 200,
"data": [{"url": "下载地址",
"v1": {"errorCode": 0,"batch": 361,"type": 1,"points": [[72,154],[71,106],[182,103],[183,151]]},
"v2": {"errorCode": 0,"batch": 361,"type": 1,"points": [[121,259],[119,178],[305,172],[308,253]]}
}]
}
'''
if result is None:
return "error no info"
for i in result['data'][20:40]:
pic = down_pic(i['url'],savePath)
label_pic(i,pic) | yuncheng-util-pkg | /yuncheng_util_pkg-1.3-py3-none-any.whl/yuncheng_util_pkg/analysis_pipeline.py | analysis_pipeline.py |
import json
from yuncheng_util_pkg.util_net import *
from yuncheng_util_pkg.util_file import *
from yuncheng_util_pkg.util import *
import os
import requests
import cv2
# class Serialize():
# def jsonTran(self):
# return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True, indent=4)
# class AlInput(Serialize):
# def __init__(self,imdata,id):
# self.imdata = imdata
# self.id = id
#
# class AlOutput(Serialize):
# def __init__(self, entries: dict = {}):
# for k, v in entries.items():
# if isinstance(v, dict):
# self.__dict__[k] = AlOutput(v)
# else:
# self.__dict__[k] = v
def make_al_input(imdata,id):
requestbody = {"file":imdata,"id":os.path.basename(id),"filepath":id}
return requestbody
def make_pano_al_input(imdata,id,points,ossUrl):
requestbody = {"file":imdata,"id":os.path.basename(id),"filepath":id,
'points':points,'ossUrl':ossUrl,'bucketName':'yunchenguslh'}
return requestbody
def use_cache_for_request(url,id,session,useCache=False,savePath=None):
if useCache is False or savePath is None:
return post_for_request(url,id,session)
make_dirs(savePath)
filename = os.path.join(savePath,os.path.basename(id['id']).split('.')[0]+'.json')
result = read_json(filename)
if result is not None:
return result
result = post_for_request(url, id, session)
if result == 'error':
return None
write_json(filename,result)
return result
def ask_for_yuncheng_al(url,pics,useCache=False,savePath=None):
'''
:param url:
:param pics:
:return:
'''
session = requests.session()
bodys = []
for i in pics:
try:
imdata = read_pic_to_base64(i)
id = i
body = make_al_input(imdata,id)
bodys.append(body)
except Exception as e:
print(e)
results = []
for i in bodys:
try:
result = use_cache_for_request(url,i,session,useCache,savePath)
results.append({
'id':i['filepath'],
'result':result
})
except Exception as e:
print(e)
return results
def show_yuncheng_result(results):
for i in results:
path = i['id']
result = i['result']
imdata =cv2.imread(path)
label_yc_line(imdata[:,:,::-1],result,title=os.path.basename(path))
def ask_for_yuncheng_pano_point_al(url,datas,useCache=False,savePath=None):
'''
提供了一个测试接口,该接口会去调用一个特殊接口,该接口接收的参数是一个全景图+几个坐标点,然后接口会把
小图拿出来,然后进行计算,
:param url:
:param datas:[{'pic':path,'point':'[[1,1],[2,2],[1,2],[2,1]]',
'ossUrl':'****.jpg'}]
:return:
'''
session = requests.session()
bodys = []
for i in datas:
try:
imdata = read_pic_to_base64(i['pic'])
point = i['points']
ossUrl = i['ossUrl']
id = i['pic']
body = make_pano_al_input(imdata,id,point,ossUrl)
bodys.append(body)
except Exception as e:
print(e)
results = []
for i in bodys:
try:
result = use_cache_for_request(url,i,session,useCache,savePath)
results.append({
'id':i['filepath'],
'result':result
})
except Exception as e:
print(e)
return results | yuncheng-util-pkg | /yuncheng_util_pkg-1.3-py3-none-any.whl/yuncheng_util_pkg/yuncheng_al_class.py | yuncheng_al_class.py |
import MySQLdb as mdb
class GetDbData:
def __init__(self,host,name,pw,db):
self.host = host
self.name = name
self.pw = pw
self.db = db
def connect(self):
connectResult = 0
try:
self.con = mdb.connect(host=self.host,user=self.name,passwd=self.pw,db=self.db);
if self.con is not None:
print('db连接已经建立')
else:
print('db连接无法建立')
connectResult = 1
except Exception as e:
connectResult = 1
print('db连接出现问题,{}'.format(e))
return connectResult
def excuteSql(self,sql):
try:
connect = self.connect()
if connect == 0:
cur = self.con.cursor()
# 创建一个数据表 writers(id,name)
cur.execute(sql)
rows = cur.fetchall()
else:
print('db连接出问题,该sql不能执行!')
rows = None
except Exception as e:
rows = None
print('something wrong happened:{}'.format(e))
finally:
if hasattr(self,'con') is True:
self.con.close()
print('db连接已经关闭')
else:
print('由于连接不存在,db无需关闭连接')
print('sql执行结果返回')
return rows
def excuteManySql(self,sqls):
results = []
try:
connect = self.connect()
if connect == 0:
cur = self.con.cursor()
for sql in sqls:
cur.execute(sql)
rows = cur.fetchall()
results.append(rows)
else:
print('db连接出问题,该sql不能执行!')
rows = None
except Exception as e:
rows = None
print('something wrong happened:{}'.format(e))
finally:
if hasattr(self,'con') is True:
self.con.close()
print('db连接已经关闭')
else:
print('由于连接不存在,db无需关闭连接')
print('sql执行结果返回')
return results | yuncheng-util-pkg | /yuncheng_util_pkg-1.3-py3-none-any.whl/yuncheng_util_pkg/db_util.py | db_util.py |
import requests
from yuncheng_util_pkg.util_net import get_for_request
from yuncheng_util_pkg.util_file import *
import matplotlib.pyplot as plt
import cv2
import os
import copy
import json
def _show_value_change(result,savePath,save=False,groupsize=10):
'''
将value修改的结果进行展示,
如果save为False,那就是单张展示,如果save为True,就按照groupsize,一张中展示多少张试纸结果
:param result:
:param savePath:
:param save:
:param groupsize:
:return:
'''
def show_image(picpath,title1,title2,show=True):
if picpath is None:
print("error none:"+picpath)
imdata = cv2.imread(picpath)
plt.imshow(imdata[:,:,::-1])
plt.title(title1+','+title2)
plt.xlabel(os.path.basename(picpath))
if show:
plt.show()
def save_pic(results,groupsize,savePath):
grops = len(results)//groupsize
if grops*groupsize < len(results):
grops += 1
for i in range(grops):
plt.figure(figsize=(10,40))
for index,k in enumerate(results[i*groupsize:(i+1)*groupsize]):
plt.subplot(groupsize,1,index+1)
show_image(k['path'],"al:{}".format(k['lhValue']),'manual:{}'.format(k['lhValueManual']),False)
filename = os.path.join(savePath,"value-{}.jpg".format(i))
plt.savefig(filename)
plt.show()
if save is False:
[show_image(i['path'],"al:{}".format(i['lhValue']),'manual:{}'.format(i['lhValueManual'])) for i in result['data']]
print(len(result['data']))
else:
save_pic(result['data'],groupsize,savePath)
def _show_line_change(results,savePath,save=False,groupsize=10):
def make_line_label(imdata,cl,cr,tl,tr):
imdata2 = copy.deepcopy(imdata)
h, w, _ = imdata.shape
if cl > 10:
imdata2[h*3//4:,cl:cr,:] = (255,0,0)
if tl > 10:
imdata2[h * 3 // 4:, tl:tr, :] = (0, 0, 255)
return imdata2
def get_al_ct(info:str):
info2 = json.loads(info)
al_ct = {'cl':0,'cr':0,'tl':0,'tr':0}
if info2['cl'] != '':
al_ct['cl'] = float(info2['cl'])
if info2['cr'] != '':
al_ct['cr'] = float(info2['cr'])
if info2['tl'] != '':
al_ct['tl'] = float(info2['tl'])
if info2['tr'] != '':
al_ct['tr'] = float(info2['tr'])
return al_ct
def get_ma_ct(c,t):
newc = float(c)
newt = float(t)
ma_ct = {'cl':newc-0.005,'cr':newc+0.005,'tl':newt-0.005,'tr':newt+0.005}
return ma_ct
def show_image(picpath,alct,manualct,show=True,sum=2,col=1,row=1):
'''
:param picpath:
:param alct: {'cl':0.44,'cr':0.45,'tl':0.35,'tr':0.36}
:param manualct: {'cl':0.44,'cr':0.45,'tl':0.35,'tr':0.36}
:return:
'''
imdata = cv2.imread(picpath)
h,w, _ = imdata.shape
al_cl,al_cr,al_tl,al_tr = int(alct['cl']*w),int(alct['cr']*w),int(alct['tl']*w),int(alct['tr']*w)
ma_cl, ma_cr, ma_tl, ma_tr = int(manualct['cl'] * w), int(manualct['cr'] * w), \
int(manualct['tl'] * w), int(manualct['tr'] * w)
imdata2 = make_line_label(imdata,al_cl,al_cr,al_tl,al_tr)
imdata3 = make_line_label(imdata,ma_cl, ma_cr, ma_tl, ma_tr)
plt.subplot(sum,col,row)
plt.title("al")
plt.imshow(imdata2[:,:,::-1])
plt.subplot(sum,col,row+1)
plt.title("manual")
plt.imshow(imdata3[:, :, ::-1])
plt.xlabel(os.path.basename(picpath))
if show:
plt.show()
def save_pic(results,groupsize,savePath):
grops = len(results)//groupsize
if grops*groupsize < len(results):
grops += 1
for i in range(grops):
plt.figure(figsize=(10,40))
for index,k in enumerate(results[i*groupsize:(i+1)*groupsize]):
# plt.subplot(groupsize,1,index+1)
show_image(k['path'],k['alct'],k['mact'],False,groupsize*2,1,(index+1)*2)
filename = os.path.join(savePath,"ct-{}.jpg".format(i))
plt.savefig(filename)
plt.show()
# 由于result的数据不符合要求,需要重新设计
adapters = []
for i in results['data']:
adapter = {"path":i['path'],'alct':get_al_ct(i['info']),'mact':get_ma_ct(i['cLoc'],i['tLoc'])}
adapters.append(adapter)
if save is False:
for i in adapters:
show_image(i['path'],i['alct'],i['mact'])
return
save_pic(adapters,groupsize,savePath)
def get_user_change(url,savePath,changeType=0,save=False,groupsize=10):
'''
获取统计程序关于value修改的记录的数据,并将这些图片进行保存,并批注
:param url: 请求的地址
:param savePath: 图片的保存地址
:param changeType: 比较的类型, 0 ct,1,value
:param save: 批注结果是否保存,如果为False,则进行单张图片的实时展示,如果为True,则根据下面的批数据,进行成批的保存
:param groupsize: 每一张图片中保存张试纸图片。
:return:
'''
result = get_for_request(url)
if result == 'error':
return result
else:
# 都会进行图片的下载
for i in result['data']:
picUrl = i['url']
path = down_pic(picUrl, savePath)
i['path'] = path
if i.get("reverse",0) != 0:
setPathRotateJust180(i['path'])
if changeType == 0:
_show_line_change(result, savePath, save, groupsize)
elif changeType == 1:
_show_value_change(result, savePath, save, groupsize)
return result | yuncheng-util-pkg | /yuncheng_util_pkg-1.3-py3-none-any.whl/yuncheng_util_pkg/yuncheng_label.py | yuncheng_label.py |
import os
import requests
import base64
import cv2
import json
import matplotlib.pyplot as plt
import numpy as np
from yuncheng_util_pkg import util
def down_pic(url,savePath):
'''
把url相关的图片,下载到指定文件夹下
:param url: 图片的地址
:param savePath: 图片保存的文件夹路径
:return: 文件的保存全路径
'''''
if os.path.exists(savePath) is False:
os.makedirs(savePath)
name = os.path.basename(url)
picPath = os.path.join(savePath,name)
if os.path.exists(picPath):
return picPath
data = requests.get(url)
if data.status_code != 200:
return None
with open(picPath,'wb') as f:
f.write(data.content)
return picPath
def read_pic_to_base64(path):
'''
将指定路径的文件读出base64编码的字符串
:param path: 文件路径
:return: base64字符串
'''
with open(path, 'rb') as f:
data = f.read()
imagedata = base64.b64encode(data).decode()
return imagedata
def setRotateJust180(image,angle = 180):
'''
将图片按照指定的度数进行旋转
1,找到矩形的中心点,
2,找到需要旋转的角度
3,然后绕着这个点进行旋转
4,将得到的图返回
:param image:
:return:
'''
h, w = image.shape[:2]
x1 = w // 2
y1 = h // 2
# 定义旋转矩阵
M = cv2.getRotationMatrix2D((x1, y1), angle, 1.0) # 12
# 得到旋转后的图像
rotated = cv2.warpAffine(image, M, (w, h)) # 13
return rotated
def setPathRotateJust180(path,angle = 180):
'''
:param path:
:return:
'''
image = cv2.imread(path)
image = setRotateJust180(image,angle)
cv2.imwrite(path,image)
def label_yc_line(imdata, result,title=""):
'''
将孕橙算法标记出的ct位置,指示在图片中
:param imdata: 图片的cv::mat格式的信息
:param result: 孕橙算法得到的结果
:return:经过标记的mat
'''
h, w, _ = imdata.shape
if result.get('reverse',0) == 1:
imdata = setRotateJust180(imdata,180)
if result.get('cLocation', 0) > 0.1:
t1 = int(w * result['lhClineLeft'])
t2 = int(w * result['lhClineRight'])
imdata[h * 3 // 4:, t1:t2, :] = (255, 0, 0)
if result.get('tLocation', 0) > 0.1:
t1 = int(w * result['lhTlineLeft'])
t2 = int(w * result['lhTlineRight'])
imdata[h * 3 // 4:, t1:t2, :] = (0, 0, 255)
show(imdata,title)
return imdata
def show(imdata,title=""):
plt.title(title)
plt.imshow(imdata)
plt.show()
def read_str(file):
try:
with open(file,'r') as f:
data = f.read()
return data
except Exception as e:
print(e)
return ""
def read_json(file):
data = read_str(file)
try:
result = json.loads(data)
return result
except Exception as e:
print()
return None
def write_str(file,data)->bool:
try:
with open(file, 'w') as f:
f.write(data)
return True
except Exception as e:
print(e)
return False
def write_json(file,data)->bool:
try:
data = json.dumps(data)
with open(file,'w') as f:
f.write(data)
return True
except Exception as e:
print(e)
return False
def label_yc_line_v2(imdata_bgr, result,position,saveName=None,title=""):
'''
将孕橙算法标记出的ct位置,指示在图片中
:param imdata: 图片的cv::mat格式的信息
:param result: 孕橙算法得到的结果
:return:经过标记的mat
'''
h, w, _ = imdata_bgr.shape
height_start = int(h * position)
height_end = h - height_start
new_image = np.zeros((h*2,w,3))
new_image[h:,:,:] = imdata_bgr
if result.get('reverse',0) == 1:
imdata_bgr = setRotateJust180(imdata_bgr,180)
imdata2 = imdata_bgr.copy()
if result.get('cLocation', 0) > 0.1:
t1 = int(w * result['cLocation'])
t2 = t1 + 1
imdata2[height_start:height_end, t1:t2, :] = (255, 0, 0)
if result.get('tLocation', 0) > 0.1:
t1 = int(w * result['tLocation'])
t2 = t1+1
imdata2[height_start:height_end, t1:t2, :] = (0, 0, 255)
new_image[:h,:,:] = imdata2
show(new_image[:,:,::-1],title)
if saveName is not None:
savePath = os.path.dirname(saveName)
util.make_dirs(savePath)
cv2.imwrite(saveName,new_image) | yuncheng-util-pkg | /yuncheng_util_pkg-1.3-py3-none-any.whl/yuncheng_util_pkg/util_file.py | util_file.py |
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from pb import type_pb2 as dtp_dot_type__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='dtp.api.proto',
package='dwjk.dtp',
syntax='proto3',
serialized_pb=_b('\n\rdtp.api.proto\x12\x08\x64wjk.dtp\x1a\x0e\x64tp.type.proto\"\x9f\x01\n\rRequestHeader\x12\x0e\n\x06\x61pi_id\x18\x01 \x01(\r\x12\x12\n\nrequest_id\x18\x02 \x01(\t\x12\r\n\x05token\x18\x03 \x01(\t\x12\x0f\n\x07user_id\x18\x04 \x01(\t\x12\x12\n\naccount_no\x18\x05 \x01(\t\x12\n\n\x02ip\x18\x06 \x01(\t\x12\x0b\n\x03mac\x18\x07 \x01(\t\x12\x10\n\x08harddisk\x18\x08 \x01(\t\x12\x0b\n\x03tag\x18\t \x01(\t\"q\n\x0eResponseHeader\x12\x0e\n\x06\x61pi_id\x18\x01 \x01(\r\x12\x12\n\nrequest_id\x18\x02 \x01(\t\x12*\n\x04\x63ode\x18\x03 \x01(\x0e\x32\x1c.dwjk.dtp.ResponseStatusCode\x12\x0f\n\x07message\x18\x04 \x01(\t\"\xcc\x01\n\x0cReportHeader\x12\x0e\n\x06\x61pi_id\x18\x01 \x01(\r\x12*\n\x04\x63ode\x18\x02 \x01(\x0e\x32\x1c.dwjk.dtp.ResponseStatusCode\x12\x0f\n\x07message\x18\x03 \x01(\t\x12\x12\n\nrequest_id\x18\x04 \x01(\t\x12\x0f\n\x07user_id\x18\x05 \x01(\t\x12\x12\n\naccount_no\x18\x06 \x01(\t\x12\n\n\x02ip\x18\x07 \x01(\t\x12\x0b\n\x03mac\x18\x08 \x01(\t\x12\x10\n\x08harddisk\x18\t \x01(\t\x12\x0b\n\x03tag\x18\n \x01(\t\"O\n\x13LoginAccountRequest\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12\x10\n\x08password\x18\x02 \x01(\t\x12\x12\n\nlogin_name\x18\x03 \x01(\t\"%\n\x14LoginAccountResponse\x12\r\n\x05token\x18\x01 \x01(\t\"*\n\x14LogoutAccountRequest\x12\x12\n\naccount_no\x18\x01 \x01(\t\"\x17\n\x15LogoutAccountResponse\"\xc5\x02\n\nPlaceOrder\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12\x19\n\x11order_original_id\x18\x02 \x01(\t\x12$\n\x08\x65xchange\x18\x03 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x04 \x01(\t\x12\r\n\x05price\x18\x05 \x01(\t\x12\x10\n\x08quantity\x18\x06 \x01(\r\x12\'\n\norder_side\x18\x07 \x01(\x0e\x32\x13.dwjk.dtp.OrderSide\x12\'\n\norder_type\x18\x08 \x01(\x0e\x32\x13.dwjk.dtp.OrderType\x12\x10\n\x08\x63ontacts\x18\t \x01(\t\x12\r\n\x05phone\x18\n \x01(\t\x12\x15\n\rconvention_no\x18\x0b \x01(\t\x12)\n\x0border_limit\x18\x0c \x01(\x0e\x32\x14.dwjk.dtp.OrderLimit\"p\n\x0b\x43\x61ncelOrder\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12$\n\x08\x65xchange\x18\x02 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x19\n\x11order_exchange_id\x18\x03 \x01(\t\x12\x0c\n\x04\x63ode\x18\x04 \x01(\t\"\x81\x01\n\x0e\x43\x61ncelResponse\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12$\n\x08\x65xchange\x18\x02 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x19\n\x11order_exchange_id\x18\x03 \x01(\t\x12\x1a\n\x12\x63\x61ncel_exchange_id\x18\x04 \x01(\t\"\xdf\x01\n\tPlaceVote\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12\x19\n\x11order_original_id\x18\x02 \x01(\t\x12$\n\x08\x65xchange\x18\x03 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x04 \x01(\t\x12\x12\n\nplacard_id\x18\x05 \x01(\t\x12\x11\n\tmotion_id\x18\x06 \x01(\t\x12\x14\n\x0c\x66\x61vour_count\x18\x07 \x01(\r\x12\x18\n\x10opposition_count\x18\x08 \x01(\r\x12\x18\n\x10\x61\x62stention_count\x18\t \x01(\r\"\xdc\x01\n\x0cPlaceDeclare\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12\x19\n\x11order_original_id\x18\x02 \x01(\t\x12$\n\x08\x65xchange\x18\x03 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x04 \x01(\t\x12-\n\rbusiness_type\x18\x05 \x01(\x0e\x32\x16.dwjk.dtp.BusinessType\x12+\n\x0c\x64\x65\x63lare_type\x18\x06 \x01(\x0e\x32\x15.dwjk.dtp.DeclareType\x12\r\n\x05\x63ount\x18\x07 \x01(\r\"\x95\x03\n\x10MarginPlaceOrder\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12\x19\n\x11order_original_id\x18\x02 \x01(\t\x12$\n\x08\x65xchange\x18\x03 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x04 \x01(\t\x12\r\n\x05price\x18\x05 \x01(\t\x12\x10\n\x08quantity\x18\x06 \x01(\r\x12\'\n\norder_side\x18\x07 \x01(\x0e\x32\x13.dwjk.dtp.OrderSide\x12\'\n\norder_type\x18\x08 \x01(\x0e\x32\x13.dwjk.dtp.OrderType\x12\x10\n\x08\x63ontacts\x18\t \x01(\t\x12\r\n\x05phone\x18\n \x01(\t\x12\x15\n\rconvention_no\x18\x0b \x01(\t\x12)\n\x0border_limit\x18\x0c \x01(\x0e\x32\x14.dwjk.dtp.OrderLimit\x12\x12\n\ncompact_no\x18\r \x01(\t\x12\x34\n\rposition_type\x18\x0e \x01(\x0e\x32\x1d.dwjk.dtp.CompactPositionType\"v\n\x11MarginCancelOrder\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12$\n\x08\x65xchange\x18\x02 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x19\n\x11order_exchange_id\x18\x03 \x01(\t\x12\x0c\n\x04\x63ode\x18\x04 \x01(\t\"\x87\x01\n\x14MarginCancelResponse\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12$\n\x08\x65xchange\x18\x02 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x19\n\x11order_exchange_id\x18\x03 \x01(\t\x12\x1a\n\x12\x63\x61ncel_exchange_id\x18\x04 \x01(\t\"\xa7\x01\n\x16MarginRepayAmountOrder\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12\x19\n\x11order_original_id\x18\x02 \x01(\t\x12\x12\n\ncompact_no\x18\x03 \x01(\t\x12\x14\n\x0crepay_amount\x18\x04 \x01(\t\x12\x34\n\rposition_type\x18\x05 \x01(\x0e\x32\x1d.dwjk.dtp.CompactPositionType\"\xd9\x01\n\x18MarginRepaySecurityOrder\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12\x19\n\x11order_original_id\x18\x02 \x01(\t\x12$\n\x08\x65xchange\x18\x03 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x04 \x01(\t\x12\x10\n\x08quantity\x18\x05 \x01(\r\x12\x12\n\ncompact_no\x18\x06 \x01(\t\x12\x34\n\rposition_type\x18\x07 \x01(\x0e\x32\x1d.dwjk.dtp.CompactPositionType\"\xf7\x02\n\x0fPlaceBatchOrder\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12\x12\n\nall_failed\x18\x02 \x01(\x08\x12<\n\norder_list\x18\x03 \x03(\x0b\x32(.dwjk.dtp.PlaceBatchOrder.BatchOrderItem\x1a\xfd\x01\n\x0e\x42\x61tchOrderItem\x12$\n\x08\x65xchange\x18\x01 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x02 \x01(\t\x12\'\n\norder_side\x18\x03 \x01(\x0e\x32\x13.dwjk.dtp.OrderSide\x12\r\n\x05price\x18\x04 \x01(\t\x12\x10\n\x08quantity\x18\x05 \x01(\r\x12\'\n\norder_type\x18\x06 \x01(\x0e\x32\x13.dwjk.dtp.OrderType\x12\x19\n\x11order_original_id\x18\x07 \x01(\t\x12)\n\x0border_limit\x18\x0c \x01(\x0e\x32\x14.dwjk.dtp.OrderLimit\"j\n\x12PlaceBatchResponse\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12\x13\n\x0btotal_count\x18\x03 \x01(\r\x12\x15\n\rsuccess_count\x18\x04 \x01(\r\x12\x14\n\x0c\x66\x61iled_count\x18\x05 \x01(\r\":\n\x10\x43\x61ncelBatchOrder\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12\x12\n\nexchangeID\x18\x02 \x03(\t\"k\n\x13\x43\x61ncelBatchResponse\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12\x13\n\x0btotal_count\x18\x02 \x01(\r\x12\x15\n\rsuccess_count\x18\x03 \x01(\r\x12\x14\n\x0c\x66\x61iled_count\x18\x04 \x01(\r\"\xa9\x02\n\x0cPlacedReport\x12\x19\n\x11order_exchange_id\x18\x01 \x01(\t\x12\x13\n\x0bplaced_time\x18\x02 \x01(\t\x12\x15\n\rfreeze_amount\x18\x03 \x01(\t\x12%\n\x06status\x18\x04 \x01(\x0e\x32\x15.dwjk.dtp.OrderStatus\x12\x19\n\x11order_original_id\x18\x05 \x01(\t\x12\x12\n\naccount_no\x18\x06 \x01(\t\x12$\n\x08\x65xchange\x18\x07 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x08 \x01(\t\x12\x10\n\x08quantity\x18\t \x01(\r\x12\'\n\norder_side\x18\n \x01(\x0e\x32\x13.dwjk.dtp.OrderSide\x12\r\n\x05price\x18\x0b \x01(\t\"\xf8\x03\n\nFillReport\x12\x18\n\x10\x66ill_exchange_id\x18\x01 \x01(\t\x12\x11\n\tfill_time\x18\x02 \x01(\t\x12)\n\x0b\x66ill_status\x18\x03 \x01(\x0e\x32\x14.dwjk.dtp.FillStatus\x12\x12\n\nfill_price\x18\x04 \x01(\t\x12\x15\n\rfill_quantity\x18\x05 \x01(\r\x12\x13\n\x0b\x66ill_amount\x18\x06 \x01(\t\x12\x14\n\x0c\x63lear_amount\x18\x07 \x01(\t\x12\x1b\n\x13total_fill_quantity\x18\x08 \x01(\r\x12\x19\n\x11total_fill_amount\x18\t \x01(\t\x12 \n\x18total_cancelled_quantity\x18\n \x01(\r\x12\x19\n\x11order_exchange_id\x18\x0b \x01(\t\x12\x19\n\x11order_original_id\x18\x0c \x01(\t\x12\x12\n\naccount_no\x18\r \x01(\t\x12$\n\x08\x65xchange\x18\x0e \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x0f \x01(\t\x12\r\n\x05price\x18\x10 \x01(\t\x12\x10\n\x08quantity\x18\x11 \x01(\r\x12\'\n\norder_side\x18\x12 \x01(\x0e\x32\x13.dwjk.dtp.OrderSide\x12\x1a\n\x12total_clear_amount\x18\x13 \x01(\t\"\xc4\x02\n\x12\x43\x61ncellationReport\x12\x19\n\x11order_exchange_id\x18\x01 \x01(\t\x12\x19\n\x11order_original_id\x18\x02 \x01(\t\x12\x12\n\naccount_no\x18\x03 \x01(\t\x12$\n\x08\x65xchange\x18\x04 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x05 \x01(\t\x12\x10\n\x08quantity\x18\x06 \x01(\r\x12\'\n\norder_side\x18\x07 \x01(\x0e\x32\x13.dwjk.dtp.OrderSide\x12%\n\x06status\x18\x08 \x01(\x0e\x32\x15.dwjk.dtp.OrderStatus\x12\x1b\n\x13total_fill_quantity\x18\t \x01(\r\x12\x1a\n\x12\x63\x61ncelled_quantity\x18\n \x01(\r\x12\x15\n\rfreeze_amount\x18\x0b \x01(\t\"\xb7\x02\n\nVoteReport\x12\x19\n\x11order_exchange_id\x18\x01 \x01(\t\x12\x13\n\x0bplaced_time\x18\x02 \x01(\t\x12%\n\x06status\x18\x03 \x01(\x0e\x32\x15.dwjk.dtp.OrderStatus\x12\x19\n\x11order_original_id\x18\x04 \x01(\t\x12\x12\n\naccount_no\x18\x05 \x01(\t\x12$\n\x08\x65xchange\x18\x06 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x07 \x01(\t\x12\x12\n\nplacard_id\x18\x08 \x01(\t\x12\x11\n\tmotion_id\x18\t \x01(\t\x12\x14\n\x0c\x66\x61vour_count\x18\n \x01(\r\x12\x18\n\x10opposition_count\x18\x0b \x01(\r\x12\x18\n\x10\x61\x62stention_count\x18\x0c \x01(\r\"\xb4\x02\n\rDeclareReport\x12\x19\n\x11order_exchange_id\x18\x01 \x01(\t\x12\x13\n\x0bplaced_time\x18\x02 \x01(\t\x12%\n\x06status\x18\x03 \x01(\x0e\x32\x15.dwjk.dtp.OrderStatus\x12\x19\n\x11order_original_id\x18\x04 \x01(\t\x12\x12\n\naccount_no\x18\x05 \x01(\t\x12$\n\x08\x65xchange\x18\x06 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x07 \x01(\t\x12-\n\rbusiness_type\x18\x08 \x01(\x0e\x32\x16.dwjk.dtp.BusinessType\x12+\n\x0c\x64\x65\x63lare_type\x18\t \x01(\x0e\x32\x15.dwjk.dtp.DeclareType\x12\r\n\x05\x63ount\x18\n \x01(\r\"\xf9\x02\n\x12MarginPlacedReport\x12\x19\n\x11order_exchange_id\x18\x01 \x01(\t\x12\x13\n\x0bplaced_time\x18\x02 \x01(\t\x12\x15\n\rfreeze_amount\x18\x03 \x01(\t\x12%\n\x06status\x18\x04 \x01(\x0e\x32\x15.dwjk.dtp.OrderStatus\x12\x19\n\x11order_original_id\x18\x05 \x01(\t\x12\x12\n\naccount_no\x18\x06 \x01(\t\x12$\n\x08\x65xchange\x18\x07 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x08 \x01(\t\x12\x10\n\x08quantity\x18\t \x01(\r\x12\'\n\norder_side\x18\n \x01(\x0e\x32\x13.dwjk.dtp.OrderSide\x12\r\n\x05price\x18\x0b \x01(\t\x12\x12\n\ncompact_no\x18\x0c \x01(\t\x12\x34\n\rposition_type\x18\r \x01(\x0e\x32\x1d.dwjk.dtp.CompactPositionType\"\xe4\x01\n\x17MarginRepayAmountReport\x12\x13\n\x0bplaced_time\x18\x01 \x01(\t\x12%\n\x06status\x18\x02 \x01(\x0e\x32\x15.dwjk.dtp.OrderStatus\x12\x19\n\x11order_original_id\x18\x03 \x01(\t\x12\x12\n\naccount_no\x18\x04 \x01(\t\x12\x14\n\x0crepay_amount\x18\x05 \x01(\t\x12\x12\n\ncompact_no\x18\x06 \x01(\t\x12\x34\n\rposition_type\x18\x07 \x01(\x0e\x32\x1d.dwjk.dtp.CompactPositionType\"\xb1\x02\n\x19MarginRepaySecurityReport\x12\x19\n\x11order_exchange_id\x18\x01 \x01(\t\x12\x13\n\x0bplaced_time\x18\x02 \x01(\t\x12%\n\x06status\x18\x03 \x01(\x0e\x32\x15.dwjk.dtp.OrderStatus\x12\x19\n\x11order_original_id\x18\x04 \x01(\t\x12\x12\n\naccount_no\x18\x05 \x01(\t\x12$\n\x08\x65xchange\x18\x06 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x07 \x01(\t\x12\x10\n\x08quantity\x18\x08 \x01(\r\x12\x12\n\ncompact_no\x18\t \x01(\t\x12\x34\n\rposition_type\x18\n \x01(\x0e\x32\x1d.dwjk.dtp.CompactPositionType\"\xb4\x04\n\x10MarginFillReport\x12\x18\n\x10\x66ill_exchange_id\x18\x01 \x01(\t\x12\x11\n\tfill_time\x18\x02 \x01(\t\x12)\n\x0b\x66ill_status\x18\x03 \x01(\x0e\x32\x14.dwjk.dtp.FillStatus\x12\x12\n\nfill_price\x18\x04 \x01(\t\x12\x15\n\rfill_quantity\x18\x05 \x01(\r\x12\x13\n\x0b\x66ill_amount\x18\x06 \x01(\t\x12\x14\n\x0c\x63lear_amount\x18\x07 \x01(\t\x12\x1b\n\x13total_fill_quantity\x18\x08 \x01(\r\x12\x19\n\x11total_fill_amount\x18\t \x01(\t\x12 \n\x18total_cancelled_quantity\x18\n \x01(\r\x12\x19\n\x11order_exchange_id\x18\x0b \x01(\t\x12\x19\n\x11order_original_id\x18\x0c \x01(\t\x12\x12\n\naccount_no\x18\r \x01(\t\x12$\n\x08\x65xchange\x18\x0e \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x0f \x01(\t\x12\r\n\x05price\x18\x10 \x01(\t\x12\x10\n\x08quantity\x18\x11 \x01(\r\x12\'\n\norder_side\x18\x12 \x01(\x0e\x32\x13.dwjk.dtp.OrderSide\x12\x1a\n\x12total_clear_amount\x18\x13 \x01(\t\x12\x34\n\rposition_type\x18\x14 \x01(\x0e\x32\x1d.dwjk.dtp.CompactPositionType\"\x80\x03\n\x18MarginCancellationReport\x12\x19\n\x11order_exchange_id\x18\x01 \x01(\t\x12\x19\n\x11order_original_id\x18\x02 \x01(\t\x12\x12\n\naccount_no\x18\x03 \x01(\t\x12$\n\x08\x65xchange\x18\x04 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x05 \x01(\t\x12\x10\n\x08quantity\x18\x06 \x01(\r\x12\'\n\norder_side\x18\x07 \x01(\x0e\x32\x13.dwjk.dtp.OrderSide\x12%\n\x06status\x18\x08 \x01(\x0e\x32\x15.dwjk.dtp.OrderStatus\x12\x1b\n\x13total_fill_quantity\x18\t \x01(\r\x12\x1a\n\x12\x63\x61ncelled_quantity\x18\n \x01(\r\x12\x15\n\rfreeze_amount\x18\x0b \x01(\t\x12\x34\n\rposition_type\x18\x0c \x01(\x0e\x32\x1d.dwjk.dtp.CompactPositionType\"\xa0\x02\n\x12QueryOrdersRequest\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12\x19\n\x11order_exchange_id\x18\x02 \x01(\t\x12\x34\n\x0equery_criteria\x18\x03 \x01(\x0e\x32\x1c.dwjk.dtp.QueryOrderCriteria\x12-\n\npagination\x18\x04 \x01(\x0b\x32\x19.dwjk.dtp.QueryPagination\x12$\n\x08\x65xchange\x18\x05 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x06 \x01(\t\x12\x19\n\x11order_original_id\x18\x07 \x01(\t\x12\'\n\norder_side\x18\x08 \x01(\x0e\x32\x13.dwjk.dtp.OrderSide\"\xfc\x04\n\x13QueryOrdersResponse\x12\x37\n\norder_list\x18\x01 \x03(\x0b\x32#.dwjk.dtp.QueryOrdersResponse.Order\x12-\n\npagination\x18\x02 \x01(\x0b\x32\x19.dwjk.dtp.QueryPagination\x1a\xfc\x03\n\x05Order\x12\x19\n\x11order_exchange_id\x18\x01 \x01(\t\x12\x19\n\x11order_original_id\x18\x02 \x01(\t\x12$\n\x08\x65xchange\x18\x03 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x04 \x01(\t\x12\x0c\n\x04name\x18\x05 \x01(\t\x12\r\n\x05price\x18\x06 \x01(\t\x12\x10\n\x08quantity\x18\x07 \x01(\r\x12\'\n\norder_side\x18\x08 \x01(\x0e\x32\x13.dwjk.dtp.OrderSide\x12\'\n\norder_type\x18\t \x01(\x0e\x32\x13.dwjk.dtp.OrderType\x12%\n\x06status\x18\n \x01(\x0e\x32\x15.dwjk.dtp.OrderStatus\x12\x12\n\norder_time\x18\x0b \x01(\t\x12\x12\n\naccount_no\x18\x0c \x01(\t\x12\x1a\n\x12\x61verage_fill_price\x18\r \x01(\t\x12\x1b\n\x13total_fill_quantity\x18\x0e \x01(\r\x12\x19\n\x11total_fill_amount\x18\x0f \x01(\t\x12\x15\n\rfreeze_amount\x18\x10 \x01(\t\x12\x14\n\x0c\x63lear_amount\x18\x11 \x01(\t\x12 \n\x18total_cancelled_quantity\x18\x12 \x01(\r\x12\x16\n\x0estatus_message\x18\x13 \x01(\t\"\x86\x02\n\x11QueryFillsRequest\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12\x19\n\x11order_exchange_id\x18\x02 \x01(\t\x12-\n\npagination\x18\x03 \x01(\x0b\x32\x19.dwjk.dtp.QueryPagination\x12$\n\x08\x65xchange\x18\x04 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x05 \x01(\t\x12\x19\n\x11order_original_id\x18\x06 \x01(\t\x12\'\n\norder_side\x18\x07 \x01(\x0e\x32\x13.dwjk.dtp.OrderSide\x12\x1b\n\x13include_cancel_fill\x18\x08 \x01(\x08\"\xd1\x03\n\x12QueryFillsResponse\x12\x34\n\tfill_list\x18\x01 \x03(\x0b\x32!.dwjk.dtp.QueryFillsResponse.Fill\x12-\n\npagination\x18\x02 \x01(\x0b\x32\x19.dwjk.dtp.QueryPagination\x1a\xd5\x02\n\x04\x46ill\x12\x18\n\x10\x66ill_exchange_id\x18\x01 \x01(\t\x12\x11\n\tfill_time\x18\x02 \x01(\t\x12)\n\x0b\x66ill_status\x18\x03 \x01(\x0e\x32\x14.dwjk.dtp.FillStatus\x12\x12\n\nfill_price\x18\x04 \x01(\t\x12\x15\n\rfill_quantity\x18\x05 \x01(\x05\x12\x13\n\x0b\x66ill_amount\x18\x06 \x01(\t\x12\x14\n\x0c\x63lear_amount\x18\x07 \x01(\t\x12\x19\n\x11order_exchange_id\x18\x08 \x01(\t\x12\x19\n\x11order_original_id\x18\t \x01(\t\x12$\n\x08\x65xchange\x18\n \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x0b \x01(\t\x12\x0c\n\x04name\x18\x0c \x01(\t\x12\'\n\norder_side\x18\r \x01(\x0e\x32\x13.dwjk.dtp.OrderSide\")\n\x13QueryCapitalRequest\x12\x12\n\naccount_no\x18\x01 \x01(\t\"\x97\x01\n\x14QueryCapitalResponse\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12\x0f\n\x07\x62\x61lance\x18\x02 \x01(\t\x12\x11\n\tavailable\x18\x03 \x01(\t\x12\x0e\n\x06\x66reeze\x18\x04 \x01(\t\x12\x12\n\nsecurities\x18\x05 \x01(\t\x12\r\n\x05total\x18\x06 \x01(\t\x12\x14\n\x0c\x63lear_amount\x18\x07 \x01(\t\"\x8d\x01\n\x14QueryPositionRequest\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12$\n\x08\x65xchange\x18\x02 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x03 \x01(\t\x12-\n\npagination\x18\x04 \x01(\x0b\x32\x19.dwjk.dtp.QueryPagination\"\xcb\x03\n\x15QueryPositionResponse\x12\x45\n\rposition_list\x18\x01 \x03(\x0b\x32..dwjk.dtp.QueryPositionResponse.PositionDetail\x12-\n\npagination\x18\x02 \x01(\x0b\x32\x19.dwjk.dtp.QueryPagination\x1a\xbb\x02\n\x0ePositionDetail\x12$\n\x08\x65xchange\x18\x01 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x0f\n\x07\x62\x61lance\x18\x04 \x01(\x03\x12\x1a\n\x12\x61vailable_quantity\x18\x05 \x01(\x03\x12\x17\n\x0f\x66reeze_quantity\x18\x06 \x01(\x05\x12\x14\n\x0c\x62uy_quantity\x18\x07 \x01(\x03\x12\x15\n\rsell_quantity\x18\x08 \x01(\x03\x12\x14\n\x0cmarket_value\x18\t \x01(\t\x12\x0c\n\x04\x63ost\x18\n \x01(\t\x12\x18\n\x10initial_quantity\x18\x0b \x01(\x03\x12\x19\n\x11purchase_quantity\x18\x0c \x01(\x03\x12\x1b\n\x13redemption_quantity\x18\r \x01(\x03\"(\n\x12QueryRationRequest\x12\x12\n\naccount_no\x18\x01 \x01(\t\"\xc0\x01\n\x13QueryRationResponse\x12\x39\n\x0bration_list\x18\x01 \x03(\x0b\x32$.dwjk.dtp.QueryRationResponse.Ration\x1an\n\x06Ration\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12$\n\x08\x65xchange\x18\x02 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x10\n\x08quantity\x18\x03 \x01(\r\x12\x18\n\x10science_quantity\x18\x04 \x01(\r\"\x82\x01\n\x17QueryBondStorageRequest\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12$\n\x08\x65xchange\x18\x02 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12-\n\npagination\x18\x03 \x01(\x0b\x32\x19.dwjk.dtp.QueryPagination\"\xf7\x01\n\x18QueryBondStorageResponse\x12I\n\x0b\x64\x65tail_list\x18\x01 \x03(\x0b\x32\x34.dwjk.dtp.QueryBondStorageResponse.BondStorageDetail\x12-\n\npagination\x18\x02 \x01(\x0b\x32\x19.dwjk.dtp.QueryPagination\x1a\x61\n\x11\x42ondStorageDetail\x12$\n\x08\x65xchange\x18\x01 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x02 \x01(\t\x12\x18\n\x10storage_quantity\x18\x03 \x01(\x03\"\x82\x01\n\x17QueryPendingRepoRequest\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12$\n\x08\x65xchange\x18\x02 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12-\n\npagination\x18\x03 \x01(\x0b\x32\x19.dwjk.dtp.QueryPagination\"\x9f\x03\n\x18QueryPendingRepoResponse\x12I\n\x0b\x64\x65tail_list\x18\x01 \x03(\x0b\x32\x34.dwjk.dtp.QueryPendingRepoResponse.PendingRepoDetail\x12-\n\npagination\x18\x02 \x01(\x0b\x32\x19.dwjk.dtp.QueryPagination\x1a\x88\x02\n\x11PendingRepoDetail\x12\x19\n\x11order_exchange_id\x18\x01 \x01(\t\x12$\n\x08\x65xchange\x18\x02 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x10\n\x08\x63urrency\x18\x03 \x01(\t\x12\x0c\n\x04\x63ode\x18\x04 \x01(\t\x12\x0c\n\x04name\x18\x05 \x01(\t\x12\x18\n\x10transaction_type\x18\x06 \x01(\x03\x12\x11\n\tfill_date\x18\x07 \x01(\t\x12\x15\n\rfill_quantity\x18\x08 \x01(\t\x12\x12\n\nfill_price\x18\t \x01(\t\x12\x13\n\x0b\x66ill_amount\x18\n \x01(\t\x12\x17\n\x0finterest_amount\x18\x0b \x01(\t\"m\n#QueryRepoActualOccupationDayRequest\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12$\n\x08\x65xchange\x18\x02 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x03 \x01(\t\"\xe7\x01\n$QueryRepoActualOccupationDayResponse\x12P\n\ndetal_list\x18\x01 \x03(\x0b\x32<.dwjk.dtp.QueryRepoActualOccupationDayResponse.OccupationDay\x1am\n\rOccupationDay\x12$\n\x08\x65xchange\x18\x01 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x02 \x01(\t\x12\x12\n\nactual_day\x18\x03 \x01(\x03\x12\x14\n\x0cstandard_day\x18\x04 \x01(\x03\"M\n\x11QueryQuotaRequest\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12$\n\x08\x65xchange\x18\x02 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\"\xe9\x01\n\x12QueryQuotaResponse\x12\x36\n\nquota_list\x18\x01 \x03(\x0b\x32\".dwjk.dtp.QueryQuotaResponse.Quota\x1a\x9a\x01\n\x05Quota\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12$\n\x08\x65xchange\x18\x02 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x13\n\x0btotal_quota\x18\x03 \x01(\t\x12\x15\n\rsurplus_quota\x18\x04 \x01(\t\x12+\n\x0cquota_status\x18\x05 \x01(\x0e\x32\x15.dwjk.dtp.QuotaStatus\"\x8d\x01\n\x14QueryVoteInfoRequest\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12$\n\x08\x65xchange\x18\x02 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x03 \x01(\t\x12-\n\npagination\x18\x04 \x01(\x0b\x32\x19.dwjk.dtp.QueryPagination\"\x84\x03\n\x15QueryVoteInfoResponse\x12@\n\x0evote_info_list\x18\x01 \x03(\x0b\x32(.dwjk.dtp.QueryVoteInfoResponse.VoteInfo\x12-\n\npagination\x18\x02 \x01(\x0b\x32\x19.dwjk.dtp.QueryPagination\x1a\xf9\x01\n\x08VoteInfo\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12$\n\x08\x65xchange\x18\x02 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x03 \x01(\t\x12\x12\n\nplacard_id\x18\x04 \x01(\t\x12\x11\n\tmotion_id\x18\x05 \x01(\t\x12\x13\n\x0bmotion_name\x18\x06 \x01(\t\x12)\n\x0bmotion_type\x18\x07 \x01(\x0e\x32\x14.dwjk.dtp.MotionType\x12\x18\n\x10\x61\x62stention_allow\x18\x08 \x01(\x08\x12\x12\n\nstart_date\x18\t \x01(\t\x12\x10\n\x08\x65nd_date\x18\n \x01(\t\"\xbf\x01\n\x17QueryDeclareInfoRequest\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12$\n\x08\x65xchange\x18\x02 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x03 \x01(\t\x12-\n\rbusiness_type\x18\x04 \x01(\x0e\x32\x16.dwjk.dtp.BusinessType\x12-\n\npagination\x18\x05 \x01(\x0b\x32\x19.dwjk.dtp.QueryPagination\"\xe8\x02\n\x18QueryDeclareInfoResponse\x12I\n\x11\x64\x65\x63lare_info_list\x18\x01 \x03(\x0b\x32..dwjk.dtp.QueryDeclareInfoResponse.DeclareInfo\x12-\n\npagination\x18\x02 \x01(\x0b\x32\x19.dwjk.dtp.QueryPagination\x1a\xd1\x01\n\x0b\x44\x65\x63lareInfo\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12$\n\x08\x65xchange\x18\x02 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x03 \x01(\t\x12-\n\rbusiness_type\x18\x04 \x01(\x0e\x32\x16.dwjk.dtp.BusinessType\x12\x16\n\x0e\x61uthority_code\x18\x05 \x01(\t\x12\r\n\x05price\x18\x06 \x01(\t\x12\x12\n\nstart_date\x18\x07 \x01(\t\x12\x10\n\x08\x65nd_date\x18\x08 \x01(\t\"-\n\x17QueryBankServiceRequest\x12\x12\n\naccount_no\x18\x01 \x01(\t\"\xad\x02\n\x18QueryBankServiceResponse\x12\x45\n\tinfo_list\x18\x01 \x03(\x0b\x32\x32.dwjk.dtp.QueryBankServiceResponse.BankServiceInfo\x1a\xc9\x01\n\x0f\x42\x61nkServiceInfo\x12\x12\n\nrequest_no\x18\x01 \x01(\t\x12\x12\n\naccount_no\x18\x02 \x01(\t\x12\x0c\n\x04\x64\x61te\x18\x03 \x01(\t\x12\x0c\n\x04time\x18\x04 \x01(\t\x12\x0e\n\x06result\x18\x05 \x01(\x08\x12\x0f\n\x07message\x18\x06 \x01(\t\x12/\n\x0cservice_code\x18\x07 \x01(\x0e\x32\x19.dwjk.dtp.BankServiceCode\x12\x0e\n\x06\x61mount\x18\x08 \x01(\t\x12\x10\n\x08\x63urrency\x18\t \x01(\t\"\xa6\x02\n\x18QueryMarginOrdersRequest\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12\x19\n\x11order_exchange_id\x18\x02 \x01(\t\x12\x34\n\x0equery_criteria\x18\x03 \x01(\x0e\x32\x1c.dwjk.dtp.QueryOrderCriteria\x12-\n\npagination\x18\x04 \x01(\x0b\x32\x19.dwjk.dtp.QueryPagination\x12$\n\x08\x65xchange\x18\x05 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x06 \x01(\t\x12\x19\n\x11order_original_id\x18\x07 \x01(\t\x12\'\n\norder_side\x18\x08 \x01(\x0e\x32\x13.dwjk.dtp.OrderSide\"\xbe\x05\n\x19QueryMarginOrdersResponse\x12=\n\norder_list\x18\x01 \x03(\x0b\x32).dwjk.dtp.QueryMarginOrdersResponse.Order\x12-\n\npagination\x18\x02 \x01(\x0b\x32\x19.dwjk.dtp.QueryPagination\x1a\xb2\x04\n\x05Order\x12\x19\n\x11order_exchange_id\x18\x01 \x01(\t\x12\x19\n\x11order_original_id\x18\x02 \x01(\t\x12$\n\x08\x65xchange\x18\x03 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x04 \x01(\t\x12\x0c\n\x04name\x18\x05 \x01(\t\x12\r\n\x05price\x18\x06 \x01(\t\x12\x10\n\x08quantity\x18\x07 \x01(\r\x12\'\n\norder_side\x18\x08 \x01(\x0e\x32\x13.dwjk.dtp.OrderSide\x12\'\n\norder_type\x18\t \x01(\x0e\x32\x13.dwjk.dtp.OrderType\x12%\n\x06status\x18\n \x01(\x0e\x32\x15.dwjk.dtp.OrderStatus\x12\x12\n\norder_time\x18\x0b \x01(\t\x12\x12\n\naccount_no\x18\x0c \x01(\t\x12\x1a\n\x12\x61verage_fill_price\x18\r \x01(\t\x12\x1b\n\x13total_fill_quantity\x18\x0e \x01(\r\x12\x19\n\x11total_fill_amount\x18\x0f \x01(\t\x12\x15\n\rfreeze_amount\x18\x10 \x01(\t\x12\x14\n\x0c\x63lear_amount\x18\x11 \x01(\t\x12 \n\x18total_cancelled_quantity\x18\x12 \x01(\r\x12\x16\n\x0estatus_message\x18\x13 \x01(\t\x12\x34\n\rposition_type\x18\x14 \x01(\x0e\x32\x1d.dwjk.dtp.CompactPositionType\"\x8c\x02\n\x17QueryMarginFillsRequest\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12\x19\n\x11order_exchange_id\x18\x02 \x01(\t\x12-\n\npagination\x18\x03 \x01(\x0b\x32\x19.dwjk.dtp.QueryPagination\x12$\n\x08\x65xchange\x18\x04 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x05 \x01(\t\x12\x19\n\x11order_original_id\x18\x06 \x01(\t\x12\'\n\norder_side\x18\x07 \x01(\x0e\x32\x13.dwjk.dtp.OrderSide\x12\x1b\n\x13include_cancel_fill\x18\x08 \x01(\x08\"\x93\x04\n\x18QueryMarginFillsResponse\x12:\n\tfill_list\x18\x01 \x03(\x0b\x32\'.dwjk.dtp.QueryMarginFillsResponse.Fill\x12-\n\npagination\x18\x02 \x01(\x0b\x32\x19.dwjk.dtp.QueryPagination\x1a\x8b\x03\n\x04\x46ill\x12\x18\n\x10\x66ill_exchange_id\x18\x01 \x01(\t\x12\x11\n\tfill_time\x18\x02 \x01(\t\x12)\n\x0b\x66ill_status\x18\x03 \x01(\x0e\x32\x14.dwjk.dtp.FillStatus\x12\x12\n\nfill_price\x18\x04 \x01(\t\x12\x15\n\rfill_quantity\x18\x05 \x01(\x05\x12\x13\n\x0b\x66ill_amount\x18\x06 \x01(\t\x12\x14\n\x0c\x63lear_amount\x18\x07 \x01(\t\x12\x19\n\x11order_exchange_id\x18\x08 \x01(\t\x12\x19\n\x11order_original_id\x18\t \x01(\t\x12$\n\x08\x65xchange\x18\n \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x0b \x01(\t\x12\x0c\n\x04name\x18\x0c \x01(\t\x12\'\n\norder_side\x18\r \x01(\x0e\x32\x13.dwjk.dtp.OrderSide\x12\x34\n\rposition_type\x18\x0e \x01(\x0e\x32\x1d.dwjk.dtp.CompactPositionType\"/\n\x19QueryMarginCapitalRequest\x12\x12\n\naccount_no\x18\x01 \x01(\t\"\x82\x08\n\x1aQueryMarginCapitalResponse\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12\x14\n\x0c\x61ssure_asset\x18\x02 \x01(\t\x12\x17\n\x0ftotal_liability\x18\x03 \x01(\t\x12\x11\n\tnet_asset\x18\x04 \x01(\t\x12\x16\n\x0emaintain_value\x18\x05 \x01(\t\x12\x12\n\nsecurities\x18\x06 \x01(\t\x12\x18\n\x10\x61vailable_margin\x18\x07 \x01(\t\x12\x12\n\npay_margin\x18\x08 \x01(\t\x12\x1c\n\x14\x61vailable_collateral\x18\t \x01(\t\x12\x19\n\x11\x61vailable_finance\x18\n \x01(\t\x12\x1a\n\x12\x61vailable_security\x18\x0b \x01(\t\x12\x16\n\x0e\x61vailable_cash\x18\x0c \x01(\t\x12\x12\n\ncash_asset\x18\r \x01(\t\x12\x16\n\x0etransfer_asset\x18\x0e \x01(\t\x12\x1e\n\x16\x66inance_compact_amount\x18\x0f \x01(\t\x12\"\n\x1a\x66inance_compact_commission\x18\x10 \x01(\t\x12 \n\x18\x66inance_compact_interest\x18\x11 \x01(\t\x12\x1f\n\x17\x66inance_compact_revenue\x18\x12 \x01(\t\x12 \n\x18\x66inance_available_amount\x18\x13 \x01(\t\x12\x1a\n\x12\x66inance_pay_amount\x18\x14 \x01(\t\x12\x1a\n\x12\x66inance_max_amount\x18\x15 \x01(\t\x12\x1a\n\x12\x66inance_pay_margin\x18\x16 \x01(\t\x12\x1a\n\x12\x66inance_securities\x18\x17 \x01(\t\x12$\n\x1csecurity_loan_compact_amount\x18\x18 \x01(\t\x12(\n security_loan_compact_commission\x18\x19 \x01(\t\x12&\n\x1esecurity_loan_compact_interest\x18\x1a \x01(\t\x12%\n\x1dsecurity_loan_compact_revenue\x18\x1b \x01(\t\x12\x1f\n\x17security_loan_available\x18\x1c \x01(\t\x12 \n\x18security_loan_pay_amount\x18\x1d \x01(\t\x12 \n\x18security_loan_max_amount\x18\x1e \x01(\t\x12 \n\x18security_loan_pay_margin\x18\x1f \x01(\t\x12 \n\x18security_loan_securities\x18 \x01(\t\x12\x1b\n\x13security_loan_total\x18! \x01(\t\x12\x1d\n\x15security_loan_balance\x18\" \x01(\t\"\x93\x01\n\x1aQueryMarginPositionRequest\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12$\n\x08\x65xchange\x18\x02 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x03 \x01(\t\x12-\n\npagination\x18\x04 \x01(\x0b\x32\x19.dwjk.dtp.QueryPagination\"\x9f\x03\n\x1bQueryMarginPositionResponse\x12K\n\rposition_list\x18\x01 \x03(\x0b\x32\x34.dwjk.dtp.QueryMarginPositionResponse.PositionDetail\x12-\n\npagination\x18\x02 \x01(\x0b\x32\x19.dwjk.dtp.QueryPagination\x1a\x83\x02\n\x0ePositionDetail\x12$\n\x08\x65xchange\x18\x01 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x0f\n\x07\x62\x61lance\x18\x04 \x01(\x03\x12\x1a\n\x12\x61vailable_quantity\x18\x05 \x01(\x03\x12\x17\n\x0f\x66reeze_quantity\x18\x06 \x01(\x05\x12\x14\n\x0c\x62uy_quantity\x18\x07 \x01(\x03\x12\x15\n\rsell_quantity\x18\x08 \x01(\x03\x12\x14\n\x0cmarket_value\x18\t \x01(\t\x12\x0c\n\x04\x63ost\x18\n \x01(\t\x12\x18\n\x10initial_quantity\x18\x0b \x01(\x03\"\xe1\x01\n\x1cQueryMarginSecuritiesRequest\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12$\n\x08\x65xchange\x18\x02 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x03 \x01(\t\x12\x14\n\x0cinclude_zero\x18\x04 \x01(\x08\x12\x34\n\rposition_type\x18\x05 \x01(\x0e\x32\x1d.dwjk.dtp.CompactPositionType\x12-\n\npagination\x18\x06 \x01(\x0b\x32\x19.dwjk.dtp.QueryPagination\"\x80\x03\n\x1dQueryMarginSecuritiesResponse\x12M\n\rsecurity_list\x18\x01 \x03(\x0b\x32\x36.dwjk.dtp.QueryMarginSecuritiesResponse.SecurityDetail\x12-\n\npagination\x18\x02 \x01(\x0b\x32\x19.dwjk.dtp.QueryPagination\x1a\xe0\x01\n\x0eSecurityDetail\x12$\n\x08\x65xchange\x18\x01 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x16\n\x0esecurity_ratio\x18\x04 \x01(\t\x12,\n\x06status\x18\x05 \x01(\x0e\x32\x1c.dwjk.dtp.SecurityLoanStatus\x12\x10\n\x08quantity\x18\x06 \x01(\r\x12\x34\n\rposition_type\x18\x07 \x01(\x0e\x32\x1d.dwjk.dtp.CompactPositionType\"\xd5\x01\n\x19QueryMarginCompactRequest\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12\x12\n\ncompact_no\x18\x02 \x01(\t\x12+\n\x0c\x63ompact_type\x18\x03 \x01(\x0e\x32\x15.dwjk.dtp.CompactType\x12\x34\n\rposition_type\x18\x04 \x01(\x0e\x32\x1d.dwjk.dtp.CompactPositionType\x12-\n\npagination\x18\x05 \x01(\x0b\x32\x19.dwjk.dtp.QueryPagination\"\xc8\x05\n\x1aQueryMarginCompactResponse\x12H\n\x0c\x63ompact_list\x18\x01 \x03(\x0b\x32\x32.dwjk.dtp.QueryMarginCompactResponse.CompactDetail\x12-\n\npagination\x18\x02 \x01(\x0b\x32\x19.dwjk.dtp.QueryPagination\x1a\xb0\x04\n\rCompactDetail\x12\x14\n\x0c\x63ompact_date\x18\x01 \x01(\t\x12\x12\n\ncompact_no\x18\x02 \x01(\t\x12$\n\x08\x65xchange\x18\x03 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x04 \x01(\t\x12+\n\x0c\x63ompact_type\x18\x05 \x01(\x0e\x32\x15.dwjk.dtp.CompactType\x12\'\n\x06status\x18\x06 \x01(\x0e\x32\x17.dwjk.dtp.CompactStatus\x12\x13\n\x0bopen_amount\x18\x07 \x01(\t\x12\x15\n\ropen_quantity\x18\x08 \x01(\r\x12\x17\n\x0fopen_commission\x18\t \x01(\t\x12\x15\n\rno_pay_amount\x18\n \x01(\t\x12\x17\n\x0fno_pay_quantity\x18\x0b \x01(\r\x12\x19\n\x11no_pay_commission\x18\x0c \x01(\t\x12\x17\n\x0fno_pay_interest\x18\r \x01(\t\x12\x1d\n\x15return_total_interest\x18\x0e \x01(\t\x12\x1a\n\x12year_interest_rate\x18\x0f \x01(\t\x12\x19\n\x11return_close_date\x18\x10 \x01(\t\x12\x34\n\rposition_type\x18\x11 \x01(\x0e\x32\x1d.dwjk.dtp.CompactPositionType\x12\x17\n\x0f\x66inance_revenue\x18\x12 \x01(\t\x12\x1d\n\x15security_loan_revenue\x18\x13 \x01(\t\"\xd8\x01\n\x1eQueryMarginCompactWaterRequest\x12\x12\n\naccount_no\x18\x01 \x01(\t\x12$\n\x08\x65xchange\x18\x02 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x03 \x01(\t\x12\x12\n\ncompact_no\x18\x04 \x01(\t\x12+\n\x0c\x63ompact_type\x18\x05 \x01(\x0e\x32\x15.dwjk.dtp.CompactType\x12-\n\npagination\x18\x06 \x01(\x0b\x32\x19.dwjk.dtp.QueryPagination\"\x8d\x04\n\x1fQueryMarginCompactWaterResponse\x12I\n\nwater_list\x18\x01 \x03(\x0b\x32\x35.dwjk.dtp.QueryMarginCompactWaterResponse.WaterDetail\x12-\n\npagination\x18\x02 \x01(\x0b\x32\x19.dwjk.dtp.QueryPagination\x1a\xef\x02\n\x0bWaterDetail\x12\x14\n\x0c\x63ompact_date\x18\x01 \x01(\t\x12\x12\n\ncompact_no\x18\x02 \x01(\t\x12$\n\x08\x65xchange\x18\x03 \x01(\x0e\x32\x12.dwjk.dtp.Exchange\x12\x0c\n\x04\x63ode\x18\x04 \x01(\t\x12\x19\n\x11order_exchange_id\x18\x05 \x01(\t\x12+\n\x0c\x63ompact_type\x18\x06 \x01(\x0e\x32\x15.dwjk.dtp.CompactType\x12\x12\n\npay_amount\x18\x07 \x01(\t\x12\x14\n\x0cpay_quantity\x18\x08 \x01(\r\x12\x16\n\x0epay_commission\x18\t \x01(\t\x12\x14\n\x0cpay_interest\x18\n \x01(\t\x12\x15\n\rno_pay_amount\x18\x0b \x01(\t\x12\x17\n\x0fno_pay_quantity\x18\x0c \x01(\r\x12\x19\n\x11no_pay_commission\x18\r \x01(\t\x12\x17\n\x0fno_pay_interest\x18\x0e \x01(\t\"/\n\x0fQueryPagination\x12\x0e\n\x06offset\x18\x01 \x01(\t\x12\x0c\n\x04size\x18\x02 \x01(\r\"=\n\x0f\x41lgorithmicSend\x12\x11\n\tclient_id\x18\x01 \x01(\t\x12\x17\n\x0fsub_original_id\x18\x02 \x01(\tb\x06proto3')
,
dependencies=[dtp_dot_type__pb2.DESCRIPTOR,])
_REQUESTHEADER = _descriptor.Descriptor(
name='RequestHeader',
full_name='dwjk.dtp.RequestHeader',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='api_id', full_name='dwjk.dtp.RequestHeader.api_id', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='request_id', full_name='dwjk.dtp.RequestHeader.request_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='token', full_name='dwjk.dtp.RequestHeader.token', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='dwjk.dtp.RequestHeader.user_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.RequestHeader.account_no', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ip', full_name='dwjk.dtp.RequestHeader.ip', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='mac', full_name='dwjk.dtp.RequestHeader.mac', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='harddisk', full_name='dwjk.dtp.RequestHeader.harddisk', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tag', full_name='dwjk.dtp.RequestHeader.tag', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=44,
serialized_end=203,
)
_RESPONSEHEADER = _descriptor.Descriptor(
name='ResponseHeader',
full_name='dwjk.dtp.ResponseHeader',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='api_id', full_name='dwjk.dtp.ResponseHeader.api_id', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='request_id', full_name='dwjk.dtp.ResponseHeader.request_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.ResponseHeader.code', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='message', full_name='dwjk.dtp.ResponseHeader.message', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=205,
serialized_end=318,
)
_REPORTHEADER = _descriptor.Descriptor(
name='ReportHeader',
full_name='dwjk.dtp.ReportHeader',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='api_id', full_name='dwjk.dtp.ReportHeader.api_id', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.ReportHeader.code', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='message', full_name='dwjk.dtp.ReportHeader.message', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='request_id', full_name='dwjk.dtp.ReportHeader.request_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='dwjk.dtp.ReportHeader.user_id', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.ReportHeader.account_no', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ip', full_name='dwjk.dtp.ReportHeader.ip', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='mac', full_name='dwjk.dtp.ReportHeader.mac', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='harddisk', full_name='dwjk.dtp.ReportHeader.harddisk', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tag', full_name='dwjk.dtp.ReportHeader.tag', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=321,
serialized_end=525,
)
_LOGINACCOUNTREQUEST = _descriptor.Descriptor(
name='LoginAccountRequest',
full_name='dwjk.dtp.LoginAccountRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.LoginAccountRequest.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='password', full_name='dwjk.dtp.LoginAccountRequest.password', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='login_name', full_name='dwjk.dtp.LoginAccountRequest.login_name', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=527,
serialized_end=606,
)
_LOGINACCOUNTRESPONSE = _descriptor.Descriptor(
name='LoginAccountResponse',
full_name='dwjk.dtp.LoginAccountResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='token', full_name='dwjk.dtp.LoginAccountResponse.token', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=608,
serialized_end=645,
)
_LOGOUTACCOUNTREQUEST = _descriptor.Descriptor(
name='LogoutAccountRequest',
full_name='dwjk.dtp.LogoutAccountRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.LogoutAccountRequest.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=647,
serialized_end=689,
)
_LOGOUTACCOUNTRESPONSE = _descriptor.Descriptor(
name='LogoutAccountResponse',
full_name='dwjk.dtp.LogoutAccountResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=691,
serialized_end=714,
)
_PLACEORDER = _descriptor.Descriptor(
name='PlaceOrder',
full_name='dwjk.dtp.PlaceOrder',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.PlaceOrder.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_original_id', full_name='dwjk.dtp.PlaceOrder.order_original_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.PlaceOrder.exchange', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.PlaceOrder.code', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='price', full_name='dwjk.dtp.PlaceOrder.price', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='quantity', full_name='dwjk.dtp.PlaceOrder.quantity', index=5,
number=6, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_side', full_name='dwjk.dtp.PlaceOrder.order_side', index=6,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_type', full_name='dwjk.dtp.PlaceOrder.order_type', index=7,
number=8, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='contacts', full_name='dwjk.dtp.PlaceOrder.contacts', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='phone', full_name='dwjk.dtp.PlaceOrder.phone', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='convention_no', full_name='dwjk.dtp.PlaceOrder.convention_no', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_limit', full_name='dwjk.dtp.PlaceOrder.order_limit', index=11,
number=12, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=717,
serialized_end=1042,
)
_CANCELORDER = _descriptor.Descriptor(
name='CancelOrder',
full_name='dwjk.dtp.CancelOrder',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.CancelOrder.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.CancelOrder.exchange', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_exchange_id', full_name='dwjk.dtp.CancelOrder.order_exchange_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.CancelOrder.code', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1044,
serialized_end=1156,
)
_CANCELRESPONSE = _descriptor.Descriptor(
name='CancelResponse',
full_name='dwjk.dtp.CancelResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.CancelResponse.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.CancelResponse.exchange', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_exchange_id', full_name='dwjk.dtp.CancelResponse.order_exchange_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cancel_exchange_id', full_name='dwjk.dtp.CancelResponse.cancel_exchange_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1159,
serialized_end=1288,
)
_PLACEVOTE = _descriptor.Descriptor(
name='PlaceVote',
full_name='dwjk.dtp.PlaceVote',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.PlaceVote.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_original_id', full_name='dwjk.dtp.PlaceVote.order_original_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.PlaceVote.exchange', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.PlaceVote.code', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='placard_id', full_name='dwjk.dtp.PlaceVote.placard_id', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='motion_id', full_name='dwjk.dtp.PlaceVote.motion_id', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='favour_count', full_name='dwjk.dtp.PlaceVote.favour_count', index=6,
number=7, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='opposition_count', full_name='dwjk.dtp.PlaceVote.opposition_count', index=7,
number=8, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='abstention_count', full_name='dwjk.dtp.PlaceVote.abstention_count', index=8,
number=9, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1291,
serialized_end=1514,
)
_PLACEDECLARE = _descriptor.Descriptor(
name='PlaceDeclare',
full_name='dwjk.dtp.PlaceDeclare',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.PlaceDeclare.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_original_id', full_name='dwjk.dtp.PlaceDeclare.order_original_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.PlaceDeclare.exchange', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.PlaceDeclare.code', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='business_type', full_name='dwjk.dtp.PlaceDeclare.business_type', index=4,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='declare_type', full_name='dwjk.dtp.PlaceDeclare.declare_type', index=5,
number=6, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='count', full_name='dwjk.dtp.PlaceDeclare.count', index=6,
number=7, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1517,
serialized_end=1737,
)
_MARGINPLACEORDER = _descriptor.Descriptor(
name='MarginPlaceOrder',
full_name='dwjk.dtp.MarginPlaceOrder',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.MarginPlaceOrder.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_original_id', full_name='dwjk.dtp.MarginPlaceOrder.order_original_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.MarginPlaceOrder.exchange', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.MarginPlaceOrder.code', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='price', full_name='dwjk.dtp.MarginPlaceOrder.price', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='quantity', full_name='dwjk.dtp.MarginPlaceOrder.quantity', index=5,
number=6, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_side', full_name='dwjk.dtp.MarginPlaceOrder.order_side', index=6,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_type', full_name='dwjk.dtp.MarginPlaceOrder.order_type', index=7,
number=8, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='contacts', full_name='dwjk.dtp.MarginPlaceOrder.contacts', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='phone', full_name='dwjk.dtp.MarginPlaceOrder.phone', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='convention_no', full_name='dwjk.dtp.MarginPlaceOrder.convention_no', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_limit', full_name='dwjk.dtp.MarginPlaceOrder.order_limit', index=11,
number=12, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='compact_no', full_name='dwjk.dtp.MarginPlaceOrder.compact_no', index=12,
number=13, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='position_type', full_name='dwjk.dtp.MarginPlaceOrder.position_type', index=13,
number=14, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1740,
serialized_end=2145,
)
_MARGINCANCELORDER = _descriptor.Descriptor(
name='MarginCancelOrder',
full_name='dwjk.dtp.MarginCancelOrder',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.MarginCancelOrder.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.MarginCancelOrder.exchange', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_exchange_id', full_name='dwjk.dtp.MarginCancelOrder.order_exchange_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.MarginCancelOrder.code', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2147,
serialized_end=2265,
)
_MARGINCANCELRESPONSE = _descriptor.Descriptor(
name='MarginCancelResponse',
full_name='dwjk.dtp.MarginCancelResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.MarginCancelResponse.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.MarginCancelResponse.exchange', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_exchange_id', full_name='dwjk.dtp.MarginCancelResponse.order_exchange_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cancel_exchange_id', full_name='dwjk.dtp.MarginCancelResponse.cancel_exchange_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2268,
serialized_end=2403,
)
_MARGINREPAYAMOUNTORDER = _descriptor.Descriptor(
name='MarginRepayAmountOrder',
full_name='dwjk.dtp.MarginRepayAmountOrder',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.MarginRepayAmountOrder.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_original_id', full_name='dwjk.dtp.MarginRepayAmountOrder.order_original_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='compact_no', full_name='dwjk.dtp.MarginRepayAmountOrder.compact_no', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='repay_amount', full_name='dwjk.dtp.MarginRepayAmountOrder.repay_amount', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='position_type', full_name='dwjk.dtp.MarginRepayAmountOrder.position_type', index=4,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2406,
serialized_end=2573,
)
_MARGINREPAYSECURITYORDER = _descriptor.Descriptor(
name='MarginRepaySecurityOrder',
full_name='dwjk.dtp.MarginRepaySecurityOrder',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.MarginRepaySecurityOrder.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_original_id', full_name='dwjk.dtp.MarginRepaySecurityOrder.order_original_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.MarginRepaySecurityOrder.exchange', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.MarginRepaySecurityOrder.code', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='quantity', full_name='dwjk.dtp.MarginRepaySecurityOrder.quantity', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='compact_no', full_name='dwjk.dtp.MarginRepaySecurityOrder.compact_no', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='position_type', full_name='dwjk.dtp.MarginRepaySecurityOrder.position_type', index=6,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2576,
serialized_end=2793,
)
_PLACEBATCHORDER_BATCHORDERITEM = _descriptor.Descriptor(
name='BatchOrderItem',
full_name='dwjk.dtp.PlaceBatchOrder.BatchOrderItem',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.PlaceBatchOrder.BatchOrderItem.exchange', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.PlaceBatchOrder.BatchOrderItem.code', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_side', full_name='dwjk.dtp.PlaceBatchOrder.BatchOrderItem.order_side', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='price', full_name='dwjk.dtp.PlaceBatchOrder.BatchOrderItem.price', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='quantity', full_name='dwjk.dtp.PlaceBatchOrder.BatchOrderItem.quantity', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_type', full_name='dwjk.dtp.PlaceBatchOrder.BatchOrderItem.order_type', index=5,
number=6, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_original_id', full_name='dwjk.dtp.PlaceBatchOrder.BatchOrderItem.order_original_id', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_limit', full_name='dwjk.dtp.PlaceBatchOrder.BatchOrderItem.order_limit', index=7,
number=12, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2918,
serialized_end=3171,
)
_PLACEBATCHORDER = _descriptor.Descriptor(
name='PlaceBatchOrder',
full_name='dwjk.dtp.PlaceBatchOrder',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.PlaceBatchOrder.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='all_failed', full_name='dwjk.dtp.PlaceBatchOrder.all_failed', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_list', full_name='dwjk.dtp.PlaceBatchOrder.order_list', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_PLACEBATCHORDER_BATCHORDERITEM, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2796,
serialized_end=3171,
)
_PLACEBATCHRESPONSE = _descriptor.Descriptor(
name='PlaceBatchResponse',
full_name='dwjk.dtp.PlaceBatchResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.PlaceBatchResponse.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_count', full_name='dwjk.dtp.PlaceBatchResponse.total_count', index=1,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='success_count', full_name='dwjk.dtp.PlaceBatchResponse.success_count', index=2,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='failed_count', full_name='dwjk.dtp.PlaceBatchResponse.failed_count', index=3,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3173,
serialized_end=3279,
)
_CANCELBATCHORDER = _descriptor.Descriptor(
name='CancelBatchOrder',
full_name='dwjk.dtp.CancelBatchOrder',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.CancelBatchOrder.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchangeID', full_name='dwjk.dtp.CancelBatchOrder.exchangeID', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3281,
serialized_end=3339,
)
_CANCELBATCHRESPONSE = _descriptor.Descriptor(
name='CancelBatchResponse',
full_name='dwjk.dtp.CancelBatchResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.CancelBatchResponse.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_count', full_name='dwjk.dtp.CancelBatchResponse.total_count', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='success_count', full_name='dwjk.dtp.CancelBatchResponse.success_count', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='failed_count', full_name='dwjk.dtp.CancelBatchResponse.failed_count', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3341,
serialized_end=3448,
)
_PLACEDREPORT = _descriptor.Descriptor(
name='PlacedReport',
full_name='dwjk.dtp.PlacedReport',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='order_exchange_id', full_name='dwjk.dtp.PlacedReport.order_exchange_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='placed_time', full_name='dwjk.dtp.PlacedReport.placed_time', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='freeze_amount', full_name='dwjk.dtp.PlacedReport.freeze_amount', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='dwjk.dtp.PlacedReport.status', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_original_id', full_name='dwjk.dtp.PlacedReport.order_original_id', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.PlacedReport.account_no', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.PlacedReport.exchange', index=6,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.PlacedReport.code', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='quantity', full_name='dwjk.dtp.PlacedReport.quantity', index=8,
number=9, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_side', full_name='dwjk.dtp.PlacedReport.order_side', index=9,
number=10, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='price', full_name='dwjk.dtp.PlacedReport.price', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3451,
serialized_end=3748,
)
_FILLREPORT = _descriptor.Descriptor(
name='FillReport',
full_name='dwjk.dtp.FillReport',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='fill_exchange_id', full_name='dwjk.dtp.FillReport.fill_exchange_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fill_time', full_name='dwjk.dtp.FillReport.fill_time', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fill_status', full_name='dwjk.dtp.FillReport.fill_status', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fill_price', full_name='dwjk.dtp.FillReport.fill_price', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fill_quantity', full_name='dwjk.dtp.FillReport.fill_quantity', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fill_amount', full_name='dwjk.dtp.FillReport.fill_amount', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='clear_amount', full_name='dwjk.dtp.FillReport.clear_amount', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_fill_quantity', full_name='dwjk.dtp.FillReport.total_fill_quantity', index=7,
number=8, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_fill_amount', full_name='dwjk.dtp.FillReport.total_fill_amount', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_cancelled_quantity', full_name='dwjk.dtp.FillReport.total_cancelled_quantity', index=9,
number=10, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_exchange_id', full_name='dwjk.dtp.FillReport.order_exchange_id', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_original_id', full_name='dwjk.dtp.FillReport.order_original_id', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.FillReport.account_no', index=12,
number=13, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.FillReport.exchange', index=13,
number=14, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.FillReport.code', index=14,
number=15, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='price', full_name='dwjk.dtp.FillReport.price', index=15,
number=16, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='quantity', full_name='dwjk.dtp.FillReport.quantity', index=16,
number=17, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_side', full_name='dwjk.dtp.FillReport.order_side', index=17,
number=18, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_clear_amount', full_name='dwjk.dtp.FillReport.total_clear_amount', index=18,
number=19, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3751,
serialized_end=4255,
)
_CANCELLATIONREPORT = _descriptor.Descriptor(
name='CancellationReport',
full_name='dwjk.dtp.CancellationReport',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='order_exchange_id', full_name='dwjk.dtp.CancellationReport.order_exchange_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_original_id', full_name='dwjk.dtp.CancellationReport.order_original_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.CancellationReport.account_no', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.CancellationReport.exchange', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.CancellationReport.code', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='quantity', full_name='dwjk.dtp.CancellationReport.quantity', index=5,
number=6, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_side', full_name='dwjk.dtp.CancellationReport.order_side', index=6,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='dwjk.dtp.CancellationReport.status', index=7,
number=8, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_fill_quantity', full_name='dwjk.dtp.CancellationReport.total_fill_quantity', index=8,
number=9, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cancelled_quantity', full_name='dwjk.dtp.CancellationReport.cancelled_quantity', index=9,
number=10, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='freeze_amount', full_name='dwjk.dtp.CancellationReport.freeze_amount', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4258,
serialized_end=4582,
)
_VOTEREPORT = _descriptor.Descriptor(
name='VoteReport',
full_name='dwjk.dtp.VoteReport',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='order_exchange_id', full_name='dwjk.dtp.VoteReport.order_exchange_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='placed_time', full_name='dwjk.dtp.VoteReport.placed_time', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='dwjk.dtp.VoteReport.status', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_original_id', full_name='dwjk.dtp.VoteReport.order_original_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.VoteReport.account_no', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.VoteReport.exchange', index=5,
number=6, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.VoteReport.code', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='placard_id', full_name='dwjk.dtp.VoteReport.placard_id', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='motion_id', full_name='dwjk.dtp.VoteReport.motion_id', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='favour_count', full_name='dwjk.dtp.VoteReport.favour_count', index=9,
number=10, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='opposition_count', full_name='dwjk.dtp.VoteReport.opposition_count', index=10,
number=11, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='abstention_count', full_name='dwjk.dtp.VoteReport.abstention_count', index=11,
number=12, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4585,
serialized_end=4896,
)
_DECLAREREPORT = _descriptor.Descriptor(
name='DeclareReport',
full_name='dwjk.dtp.DeclareReport',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='order_exchange_id', full_name='dwjk.dtp.DeclareReport.order_exchange_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='placed_time', full_name='dwjk.dtp.DeclareReport.placed_time', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='dwjk.dtp.DeclareReport.status', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_original_id', full_name='dwjk.dtp.DeclareReport.order_original_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.DeclareReport.account_no', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.DeclareReport.exchange', index=5,
number=6, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.DeclareReport.code', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='business_type', full_name='dwjk.dtp.DeclareReport.business_type', index=7,
number=8, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='declare_type', full_name='dwjk.dtp.DeclareReport.declare_type', index=8,
number=9, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='count', full_name='dwjk.dtp.DeclareReport.count', index=9,
number=10, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4899,
serialized_end=5207,
)
_MARGINPLACEDREPORT = _descriptor.Descriptor(
name='MarginPlacedReport',
full_name='dwjk.dtp.MarginPlacedReport',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='order_exchange_id', full_name='dwjk.dtp.MarginPlacedReport.order_exchange_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='placed_time', full_name='dwjk.dtp.MarginPlacedReport.placed_time', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='freeze_amount', full_name='dwjk.dtp.MarginPlacedReport.freeze_amount', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='dwjk.dtp.MarginPlacedReport.status', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_original_id', full_name='dwjk.dtp.MarginPlacedReport.order_original_id', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.MarginPlacedReport.account_no', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.MarginPlacedReport.exchange', index=6,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.MarginPlacedReport.code', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='quantity', full_name='dwjk.dtp.MarginPlacedReport.quantity', index=8,
number=9, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_side', full_name='dwjk.dtp.MarginPlacedReport.order_side', index=9,
number=10, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='price', full_name='dwjk.dtp.MarginPlacedReport.price', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='compact_no', full_name='dwjk.dtp.MarginPlacedReport.compact_no', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='position_type', full_name='dwjk.dtp.MarginPlacedReport.position_type', index=12,
number=13, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5210,
serialized_end=5587,
)
_MARGINREPAYAMOUNTREPORT = _descriptor.Descriptor(
name='MarginRepayAmountReport',
full_name='dwjk.dtp.MarginRepayAmountReport',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='placed_time', full_name='dwjk.dtp.MarginRepayAmountReport.placed_time', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='dwjk.dtp.MarginRepayAmountReport.status', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_original_id', full_name='dwjk.dtp.MarginRepayAmountReport.order_original_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.MarginRepayAmountReport.account_no', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='repay_amount', full_name='dwjk.dtp.MarginRepayAmountReport.repay_amount', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='compact_no', full_name='dwjk.dtp.MarginRepayAmountReport.compact_no', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='position_type', full_name='dwjk.dtp.MarginRepayAmountReport.position_type', index=6,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5590,
serialized_end=5818,
)
_MARGINREPAYSECURITYREPORT = _descriptor.Descriptor(
name='MarginRepaySecurityReport',
full_name='dwjk.dtp.MarginRepaySecurityReport',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='order_exchange_id', full_name='dwjk.dtp.MarginRepaySecurityReport.order_exchange_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='placed_time', full_name='dwjk.dtp.MarginRepaySecurityReport.placed_time', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='dwjk.dtp.MarginRepaySecurityReport.status', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_original_id', full_name='dwjk.dtp.MarginRepaySecurityReport.order_original_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.MarginRepaySecurityReport.account_no', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.MarginRepaySecurityReport.exchange', index=5,
number=6, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.MarginRepaySecurityReport.code', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='quantity', full_name='dwjk.dtp.MarginRepaySecurityReport.quantity', index=7,
number=8, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='compact_no', full_name='dwjk.dtp.MarginRepaySecurityReport.compact_no', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='position_type', full_name='dwjk.dtp.MarginRepaySecurityReport.position_type', index=9,
number=10, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5821,
serialized_end=6126,
)
_MARGINFILLREPORT = _descriptor.Descriptor(
name='MarginFillReport',
full_name='dwjk.dtp.MarginFillReport',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='fill_exchange_id', full_name='dwjk.dtp.MarginFillReport.fill_exchange_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fill_time', full_name='dwjk.dtp.MarginFillReport.fill_time', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fill_status', full_name='dwjk.dtp.MarginFillReport.fill_status', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fill_price', full_name='dwjk.dtp.MarginFillReport.fill_price', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fill_quantity', full_name='dwjk.dtp.MarginFillReport.fill_quantity', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fill_amount', full_name='dwjk.dtp.MarginFillReport.fill_amount', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='clear_amount', full_name='dwjk.dtp.MarginFillReport.clear_amount', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_fill_quantity', full_name='dwjk.dtp.MarginFillReport.total_fill_quantity', index=7,
number=8, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_fill_amount', full_name='dwjk.dtp.MarginFillReport.total_fill_amount', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_cancelled_quantity', full_name='dwjk.dtp.MarginFillReport.total_cancelled_quantity', index=9,
number=10, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_exchange_id', full_name='dwjk.dtp.MarginFillReport.order_exchange_id', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_original_id', full_name='dwjk.dtp.MarginFillReport.order_original_id', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.MarginFillReport.account_no', index=12,
number=13, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.MarginFillReport.exchange', index=13,
number=14, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.MarginFillReport.code', index=14,
number=15, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='price', full_name='dwjk.dtp.MarginFillReport.price', index=15,
number=16, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='quantity', full_name='dwjk.dtp.MarginFillReport.quantity', index=16,
number=17, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_side', full_name='dwjk.dtp.MarginFillReport.order_side', index=17,
number=18, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_clear_amount', full_name='dwjk.dtp.MarginFillReport.total_clear_amount', index=18,
number=19, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='position_type', full_name='dwjk.dtp.MarginFillReport.position_type', index=19,
number=20, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6129,
serialized_end=6693,
)
_MARGINCANCELLATIONREPORT = _descriptor.Descriptor(
name='MarginCancellationReport',
full_name='dwjk.dtp.MarginCancellationReport',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='order_exchange_id', full_name='dwjk.dtp.MarginCancellationReport.order_exchange_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_original_id', full_name='dwjk.dtp.MarginCancellationReport.order_original_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.MarginCancellationReport.account_no', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.MarginCancellationReport.exchange', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.MarginCancellationReport.code', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='quantity', full_name='dwjk.dtp.MarginCancellationReport.quantity', index=5,
number=6, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_side', full_name='dwjk.dtp.MarginCancellationReport.order_side', index=6,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='dwjk.dtp.MarginCancellationReport.status', index=7,
number=8, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_fill_quantity', full_name='dwjk.dtp.MarginCancellationReport.total_fill_quantity', index=8,
number=9, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cancelled_quantity', full_name='dwjk.dtp.MarginCancellationReport.cancelled_quantity', index=9,
number=10, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='freeze_amount', full_name='dwjk.dtp.MarginCancellationReport.freeze_amount', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='position_type', full_name='dwjk.dtp.MarginCancellationReport.position_type', index=11,
number=12, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6696,
serialized_end=7080,
)
_QUERYORDERSREQUEST = _descriptor.Descriptor(
name='QueryOrdersRequest',
full_name='dwjk.dtp.QueryOrdersRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.QueryOrdersRequest.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_exchange_id', full_name='dwjk.dtp.QueryOrdersRequest.order_exchange_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='query_criteria', full_name='dwjk.dtp.QueryOrdersRequest.query_criteria', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='dwjk.dtp.QueryOrdersRequest.pagination', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryOrdersRequest.exchange', index=4,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.QueryOrdersRequest.code', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_original_id', full_name='dwjk.dtp.QueryOrdersRequest.order_original_id', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_side', full_name='dwjk.dtp.QueryOrdersRequest.order_side', index=7,
number=8, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7083,
serialized_end=7371,
)
_QUERYORDERSRESPONSE_ORDER = _descriptor.Descriptor(
name='Order',
full_name='dwjk.dtp.QueryOrdersResponse.Order',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='order_exchange_id', full_name='dwjk.dtp.QueryOrdersResponse.Order.order_exchange_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_original_id', full_name='dwjk.dtp.QueryOrdersResponse.Order.order_original_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryOrdersResponse.Order.exchange', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.QueryOrdersResponse.Order.code', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='dwjk.dtp.QueryOrdersResponse.Order.name', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='price', full_name='dwjk.dtp.QueryOrdersResponse.Order.price', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='quantity', full_name='dwjk.dtp.QueryOrdersResponse.Order.quantity', index=6,
number=7, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_side', full_name='dwjk.dtp.QueryOrdersResponse.Order.order_side', index=7,
number=8, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_type', full_name='dwjk.dtp.QueryOrdersResponse.Order.order_type', index=8,
number=9, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='dwjk.dtp.QueryOrdersResponse.Order.status', index=9,
number=10, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_time', full_name='dwjk.dtp.QueryOrdersResponse.Order.order_time', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.QueryOrdersResponse.Order.account_no', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='average_fill_price', full_name='dwjk.dtp.QueryOrdersResponse.Order.average_fill_price', index=12,
number=13, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_fill_quantity', full_name='dwjk.dtp.QueryOrdersResponse.Order.total_fill_quantity', index=13,
number=14, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_fill_amount', full_name='dwjk.dtp.QueryOrdersResponse.Order.total_fill_amount', index=14,
number=15, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='freeze_amount', full_name='dwjk.dtp.QueryOrdersResponse.Order.freeze_amount', index=15,
number=16, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='clear_amount', full_name='dwjk.dtp.QueryOrdersResponse.Order.clear_amount', index=16,
number=17, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_cancelled_quantity', full_name='dwjk.dtp.QueryOrdersResponse.Order.total_cancelled_quantity', index=17,
number=18, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status_message', full_name='dwjk.dtp.QueryOrdersResponse.Order.status_message', index=18,
number=19, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7502,
serialized_end=8010,
)
_QUERYORDERSRESPONSE = _descriptor.Descriptor(
name='QueryOrdersResponse',
full_name='dwjk.dtp.QueryOrdersResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='order_list', full_name='dwjk.dtp.QueryOrdersResponse.order_list', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='dwjk.dtp.QueryOrdersResponse.pagination', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_QUERYORDERSRESPONSE_ORDER, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7374,
serialized_end=8010,
)
_QUERYFILLSREQUEST = _descriptor.Descriptor(
name='QueryFillsRequest',
full_name='dwjk.dtp.QueryFillsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.QueryFillsRequest.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_exchange_id', full_name='dwjk.dtp.QueryFillsRequest.order_exchange_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='dwjk.dtp.QueryFillsRequest.pagination', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryFillsRequest.exchange', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.QueryFillsRequest.code', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_original_id', full_name='dwjk.dtp.QueryFillsRequest.order_original_id', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_side', full_name='dwjk.dtp.QueryFillsRequest.order_side', index=6,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='include_cancel_fill', full_name='dwjk.dtp.QueryFillsRequest.include_cancel_fill', index=7,
number=8, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8013,
serialized_end=8275,
)
_QUERYFILLSRESPONSE_FILL = _descriptor.Descriptor(
name='Fill',
full_name='dwjk.dtp.QueryFillsResponse.Fill',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='fill_exchange_id', full_name='dwjk.dtp.QueryFillsResponse.Fill.fill_exchange_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fill_time', full_name='dwjk.dtp.QueryFillsResponse.Fill.fill_time', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fill_status', full_name='dwjk.dtp.QueryFillsResponse.Fill.fill_status', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fill_price', full_name='dwjk.dtp.QueryFillsResponse.Fill.fill_price', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fill_quantity', full_name='dwjk.dtp.QueryFillsResponse.Fill.fill_quantity', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fill_amount', full_name='dwjk.dtp.QueryFillsResponse.Fill.fill_amount', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='clear_amount', full_name='dwjk.dtp.QueryFillsResponse.Fill.clear_amount', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_exchange_id', full_name='dwjk.dtp.QueryFillsResponse.Fill.order_exchange_id', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_original_id', full_name='dwjk.dtp.QueryFillsResponse.Fill.order_original_id', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryFillsResponse.Fill.exchange', index=9,
number=10, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.QueryFillsResponse.Fill.code', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='dwjk.dtp.QueryFillsResponse.Fill.name', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_side', full_name='dwjk.dtp.QueryFillsResponse.Fill.order_side', index=12,
number=13, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8402,
serialized_end=8743,
)
_QUERYFILLSRESPONSE = _descriptor.Descriptor(
name='QueryFillsResponse',
full_name='dwjk.dtp.QueryFillsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='fill_list', full_name='dwjk.dtp.QueryFillsResponse.fill_list', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='dwjk.dtp.QueryFillsResponse.pagination', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_QUERYFILLSRESPONSE_FILL, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8278,
serialized_end=8743,
)
_QUERYCAPITALREQUEST = _descriptor.Descriptor(
name='QueryCapitalRequest',
full_name='dwjk.dtp.QueryCapitalRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.QueryCapitalRequest.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8745,
serialized_end=8786,
)
_QUERYCAPITALRESPONSE = _descriptor.Descriptor(
name='QueryCapitalResponse',
full_name='dwjk.dtp.QueryCapitalResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.QueryCapitalResponse.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='balance', full_name='dwjk.dtp.QueryCapitalResponse.balance', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='available', full_name='dwjk.dtp.QueryCapitalResponse.available', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='freeze', full_name='dwjk.dtp.QueryCapitalResponse.freeze', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='securities', full_name='dwjk.dtp.QueryCapitalResponse.securities', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total', full_name='dwjk.dtp.QueryCapitalResponse.total', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='clear_amount', full_name='dwjk.dtp.QueryCapitalResponse.clear_amount', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8789,
serialized_end=8940,
)
_QUERYPOSITIONREQUEST = _descriptor.Descriptor(
name='QueryPositionRequest',
full_name='dwjk.dtp.QueryPositionRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.QueryPositionRequest.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryPositionRequest.exchange', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.QueryPositionRequest.code', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='dwjk.dtp.QueryPositionRequest.pagination', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8943,
serialized_end=9084,
)
_QUERYPOSITIONRESPONSE_POSITIONDETAIL = _descriptor.Descriptor(
name='PositionDetail',
full_name='dwjk.dtp.QueryPositionResponse.PositionDetail',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryPositionResponse.PositionDetail.exchange', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.QueryPositionResponse.PositionDetail.code', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='dwjk.dtp.QueryPositionResponse.PositionDetail.name', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='balance', full_name='dwjk.dtp.QueryPositionResponse.PositionDetail.balance', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='available_quantity', full_name='dwjk.dtp.QueryPositionResponse.PositionDetail.available_quantity', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='freeze_quantity', full_name='dwjk.dtp.QueryPositionResponse.PositionDetail.freeze_quantity', index=5,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='buy_quantity', full_name='dwjk.dtp.QueryPositionResponse.PositionDetail.buy_quantity', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sell_quantity', full_name='dwjk.dtp.QueryPositionResponse.PositionDetail.sell_quantity', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='market_value', full_name='dwjk.dtp.QueryPositionResponse.PositionDetail.market_value', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cost', full_name='dwjk.dtp.QueryPositionResponse.PositionDetail.cost', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='initial_quantity', full_name='dwjk.dtp.QueryPositionResponse.PositionDetail.initial_quantity', index=10,
number=11, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='purchase_quantity', full_name='dwjk.dtp.QueryPositionResponse.PositionDetail.purchase_quantity', index=11,
number=12, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='redemption_quantity', full_name='dwjk.dtp.QueryPositionResponse.PositionDetail.redemption_quantity', index=12,
number=13, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9231,
serialized_end=9546,
)
_QUERYPOSITIONRESPONSE = _descriptor.Descriptor(
name='QueryPositionResponse',
full_name='dwjk.dtp.QueryPositionResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='position_list', full_name='dwjk.dtp.QueryPositionResponse.position_list', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='dwjk.dtp.QueryPositionResponse.pagination', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_QUERYPOSITIONRESPONSE_POSITIONDETAIL, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9087,
serialized_end=9546,
)
_QUERYRATIONREQUEST = _descriptor.Descriptor(
name='QueryRationRequest',
full_name='dwjk.dtp.QueryRationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.QueryRationRequest.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9548,
serialized_end=9588,
)
_QUERYRATIONRESPONSE_RATION = _descriptor.Descriptor(
name='Ration',
full_name='dwjk.dtp.QueryRationResponse.Ration',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.QueryRationResponse.Ration.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryRationResponse.Ration.exchange', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='quantity', full_name='dwjk.dtp.QueryRationResponse.Ration.quantity', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='science_quantity', full_name='dwjk.dtp.QueryRationResponse.Ration.science_quantity', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9673,
serialized_end=9783,
)
_QUERYRATIONRESPONSE = _descriptor.Descriptor(
name='QueryRationResponse',
full_name='dwjk.dtp.QueryRationResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='ration_list', full_name='dwjk.dtp.QueryRationResponse.ration_list', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_QUERYRATIONRESPONSE_RATION, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9591,
serialized_end=9783,
)
_QUERYBONDSTORAGEREQUEST = _descriptor.Descriptor(
name='QueryBondStorageRequest',
full_name='dwjk.dtp.QueryBondStorageRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.QueryBondStorageRequest.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryBondStorageRequest.exchange', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='dwjk.dtp.QueryBondStorageRequest.pagination', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9786,
serialized_end=9916,
)
_QUERYBONDSTORAGERESPONSE_BONDSTORAGEDETAIL = _descriptor.Descriptor(
name='BondStorageDetail',
full_name='dwjk.dtp.QueryBondStorageResponse.BondStorageDetail',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryBondStorageResponse.BondStorageDetail.exchange', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.QueryBondStorageResponse.BondStorageDetail.code', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='storage_quantity', full_name='dwjk.dtp.QueryBondStorageResponse.BondStorageDetail.storage_quantity', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=10069,
serialized_end=10166,
)
_QUERYBONDSTORAGERESPONSE = _descriptor.Descriptor(
name='QueryBondStorageResponse',
full_name='dwjk.dtp.QueryBondStorageResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='detail_list', full_name='dwjk.dtp.QueryBondStorageResponse.detail_list', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='dwjk.dtp.QueryBondStorageResponse.pagination', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_QUERYBONDSTORAGERESPONSE_BONDSTORAGEDETAIL, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9919,
serialized_end=10166,
)
_QUERYPENDINGREPOREQUEST = _descriptor.Descriptor(
name='QueryPendingRepoRequest',
full_name='dwjk.dtp.QueryPendingRepoRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.QueryPendingRepoRequest.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryPendingRepoRequest.exchange', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='dwjk.dtp.QueryPendingRepoRequest.pagination', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=10169,
serialized_end=10299,
)
_QUERYPENDINGREPORESPONSE_PENDINGREPODETAIL = _descriptor.Descriptor(
name='PendingRepoDetail',
full_name='dwjk.dtp.QueryPendingRepoResponse.PendingRepoDetail',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='order_exchange_id', full_name='dwjk.dtp.QueryPendingRepoResponse.PendingRepoDetail.order_exchange_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryPendingRepoResponse.PendingRepoDetail.exchange', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='currency', full_name='dwjk.dtp.QueryPendingRepoResponse.PendingRepoDetail.currency', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.QueryPendingRepoResponse.PendingRepoDetail.code', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='dwjk.dtp.QueryPendingRepoResponse.PendingRepoDetail.name', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='transaction_type', full_name='dwjk.dtp.QueryPendingRepoResponse.PendingRepoDetail.transaction_type', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fill_date', full_name='dwjk.dtp.QueryPendingRepoResponse.PendingRepoDetail.fill_date', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fill_quantity', full_name='dwjk.dtp.QueryPendingRepoResponse.PendingRepoDetail.fill_quantity', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fill_price', full_name='dwjk.dtp.QueryPendingRepoResponse.PendingRepoDetail.fill_price', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fill_amount', full_name='dwjk.dtp.QueryPendingRepoResponse.PendingRepoDetail.fill_amount', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='interest_amount', full_name='dwjk.dtp.QueryPendingRepoResponse.PendingRepoDetail.interest_amount', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=10453,
serialized_end=10717,
)
_QUERYPENDINGREPORESPONSE = _descriptor.Descriptor(
name='QueryPendingRepoResponse',
full_name='dwjk.dtp.QueryPendingRepoResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='detail_list', full_name='dwjk.dtp.QueryPendingRepoResponse.detail_list', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='dwjk.dtp.QueryPendingRepoResponse.pagination', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_QUERYPENDINGREPORESPONSE_PENDINGREPODETAIL, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=10302,
serialized_end=10717,
)
_QUERYREPOACTUALOCCUPATIONDAYREQUEST = _descriptor.Descriptor(
name='QueryRepoActualOccupationDayRequest',
full_name='dwjk.dtp.QueryRepoActualOccupationDayRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.QueryRepoActualOccupationDayRequest.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryRepoActualOccupationDayRequest.exchange', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.QueryRepoActualOccupationDayRequest.code', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=10719,
serialized_end=10828,
)
_QUERYREPOACTUALOCCUPATIONDAYRESPONSE_OCCUPATIONDAY = _descriptor.Descriptor(
name='OccupationDay',
full_name='dwjk.dtp.QueryRepoActualOccupationDayResponse.OccupationDay',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryRepoActualOccupationDayResponse.OccupationDay.exchange', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.QueryRepoActualOccupationDayResponse.OccupationDay.code', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='actual_day', full_name='dwjk.dtp.QueryRepoActualOccupationDayResponse.OccupationDay.actual_day', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='standard_day', full_name='dwjk.dtp.QueryRepoActualOccupationDayResponse.OccupationDay.standard_day', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=10953,
serialized_end=11062,
)
_QUERYREPOACTUALOCCUPATIONDAYRESPONSE = _descriptor.Descriptor(
name='QueryRepoActualOccupationDayResponse',
full_name='dwjk.dtp.QueryRepoActualOccupationDayResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='detal_list', full_name='dwjk.dtp.QueryRepoActualOccupationDayResponse.detal_list', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_QUERYREPOACTUALOCCUPATIONDAYRESPONSE_OCCUPATIONDAY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=10831,
serialized_end=11062,
)
_QUERYQUOTAREQUEST = _descriptor.Descriptor(
name='QueryQuotaRequest',
full_name='dwjk.dtp.QueryQuotaRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.QueryQuotaRequest.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryQuotaRequest.exchange', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11064,
serialized_end=11141,
)
_QUERYQUOTARESPONSE_QUOTA = _descriptor.Descriptor(
name='Quota',
full_name='dwjk.dtp.QueryQuotaResponse.Quota',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.QueryQuotaResponse.Quota.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryQuotaResponse.Quota.exchange', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_quota', full_name='dwjk.dtp.QueryQuotaResponse.Quota.total_quota', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='surplus_quota', full_name='dwjk.dtp.QueryQuotaResponse.Quota.surplus_quota', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='quota_status', full_name='dwjk.dtp.QueryQuotaResponse.Quota.quota_status', index=4,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11223,
serialized_end=11377,
)
_QUERYQUOTARESPONSE = _descriptor.Descriptor(
name='QueryQuotaResponse',
full_name='dwjk.dtp.QueryQuotaResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='quota_list', full_name='dwjk.dtp.QueryQuotaResponse.quota_list', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_QUERYQUOTARESPONSE_QUOTA, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11144,
serialized_end=11377,
)
_QUERYVOTEINFOREQUEST = _descriptor.Descriptor(
name='QueryVoteInfoRequest',
full_name='dwjk.dtp.QueryVoteInfoRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.QueryVoteInfoRequest.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryVoteInfoRequest.exchange', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.QueryVoteInfoRequest.code', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='dwjk.dtp.QueryVoteInfoRequest.pagination', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11380,
serialized_end=11521,
)
_QUERYVOTEINFORESPONSE_VOTEINFO = _descriptor.Descriptor(
name='VoteInfo',
full_name='dwjk.dtp.QueryVoteInfoResponse.VoteInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.QueryVoteInfoResponse.VoteInfo.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryVoteInfoResponse.VoteInfo.exchange', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.QueryVoteInfoResponse.VoteInfo.code', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='placard_id', full_name='dwjk.dtp.QueryVoteInfoResponse.VoteInfo.placard_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='motion_id', full_name='dwjk.dtp.QueryVoteInfoResponse.VoteInfo.motion_id', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='motion_name', full_name='dwjk.dtp.QueryVoteInfoResponse.VoteInfo.motion_name', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='motion_type', full_name='dwjk.dtp.QueryVoteInfoResponse.VoteInfo.motion_type', index=6,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='abstention_allow', full_name='dwjk.dtp.QueryVoteInfoResponse.VoteInfo.abstention_allow', index=7,
number=8, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='start_date', full_name='dwjk.dtp.QueryVoteInfoResponse.VoteInfo.start_date', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='end_date', full_name='dwjk.dtp.QueryVoteInfoResponse.VoteInfo.end_date', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11663,
serialized_end=11912,
)
_QUERYVOTEINFORESPONSE = _descriptor.Descriptor(
name='QueryVoteInfoResponse',
full_name='dwjk.dtp.QueryVoteInfoResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='vote_info_list', full_name='dwjk.dtp.QueryVoteInfoResponse.vote_info_list', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='dwjk.dtp.QueryVoteInfoResponse.pagination', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_QUERYVOTEINFORESPONSE_VOTEINFO, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11524,
serialized_end=11912,
)
_QUERYDECLAREINFOREQUEST = _descriptor.Descriptor(
name='QueryDeclareInfoRequest',
full_name='dwjk.dtp.QueryDeclareInfoRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.QueryDeclareInfoRequest.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryDeclareInfoRequest.exchange', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.QueryDeclareInfoRequest.code', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='business_type', full_name='dwjk.dtp.QueryDeclareInfoRequest.business_type', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='dwjk.dtp.QueryDeclareInfoRequest.pagination', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11915,
serialized_end=12106,
)
_QUERYDECLAREINFORESPONSE_DECLAREINFO = _descriptor.Descriptor(
name='DeclareInfo',
full_name='dwjk.dtp.QueryDeclareInfoResponse.DeclareInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.QueryDeclareInfoResponse.DeclareInfo.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryDeclareInfoResponse.DeclareInfo.exchange', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.QueryDeclareInfoResponse.DeclareInfo.code', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='business_type', full_name='dwjk.dtp.QueryDeclareInfoResponse.DeclareInfo.business_type', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='authority_code', full_name='dwjk.dtp.QueryDeclareInfoResponse.DeclareInfo.authority_code', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='price', full_name='dwjk.dtp.QueryDeclareInfoResponse.DeclareInfo.price', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='start_date', full_name='dwjk.dtp.QueryDeclareInfoResponse.DeclareInfo.start_date', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='end_date', full_name='dwjk.dtp.QueryDeclareInfoResponse.DeclareInfo.end_date', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=12260,
serialized_end=12469,
)
_QUERYDECLAREINFORESPONSE = _descriptor.Descriptor(
name='QueryDeclareInfoResponse',
full_name='dwjk.dtp.QueryDeclareInfoResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='declare_info_list', full_name='dwjk.dtp.QueryDeclareInfoResponse.declare_info_list', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='dwjk.dtp.QueryDeclareInfoResponse.pagination', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_QUERYDECLAREINFORESPONSE_DECLAREINFO, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=12109,
serialized_end=12469,
)
_QUERYBANKSERVICEREQUEST = _descriptor.Descriptor(
name='QueryBankServiceRequest',
full_name='dwjk.dtp.QueryBankServiceRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.QueryBankServiceRequest.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=12471,
serialized_end=12516,
)
_QUERYBANKSERVICERESPONSE_BANKSERVICEINFO = _descriptor.Descriptor(
name='BankServiceInfo',
full_name='dwjk.dtp.QueryBankServiceResponse.BankServiceInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='request_no', full_name='dwjk.dtp.QueryBankServiceResponse.BankServiceInfo.request_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.QueryBankServiceResponse.BankServiceInfo.account_no', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='date', full_name='dwjk.dtp.QueryBankServiceResponse.BankServiceInfo.date', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='time', full_name='dwjk.dtp.QueryBankServiceResponse.BankServiceInfo.time', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='result', full_name='dwjk.dtp.QueryBankServiceResponse.BankServiceInfo.result', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='message', full_name='dwjk.dtp.QueryBankServiceResponse.BankServiceInfo.message', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='service_code', full_name='dwjk.dtp.QueryBankServiceResponse.BankServiceInfo.service_code', index=6,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amount', full_name='dwjk.dtp.QueryBankServiceResponse.BankServiceInfo.amount', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='currency', full_name='dwjk.dtp.QueryBankServiceResponse.BankServiceInfo.currency', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=12619,
serialized_end=12820,
)
_QUERYBANKSERVICERESPONSE = _descriptor.Descriptor(
name='QueryBankServiceResponse',
full_name='dwjk.dtp.QueryBankServiceResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='info_list', full_name='dwjk.dtp.QueryBankServiceResponse.info_list', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_QUERYBANKSERVICERESPONSE_BANKSERVICEINFO, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=12519,
serialized_end=12820,
)
_QUERYMARGINORDERSREQUEST = _descriptor.Descriptor(
name='QueryMarginOrdersRequest',
full_name='dwjk.dtp.QueryMarginOrdersRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.QueryMarginOrdersRequest.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_exchange_id', full_name='dwjk.dtp.QueryMarginOrdersRequest.order_exchange_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='query_criteria', full_name='dwjk.dtp.QueryMarginOrdersRequest.query_criteria', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='dwjk.dtp.QueryMarginOrdersRequest.pagination', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryMarginOrdersRequest.exchange', index=4,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.QueryMarginOrdersRequest.code', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_original_id', full_name='dwjk.dtp.QueryMarginOrdersRequest.order_original_id', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_side', full_name='dwjk.dtp.QueryMarginOrdersRequest.order_side', index=7,
number=8, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=12823,
serialized_end=13117,
)
_QUERYMARGINORDERSRESPONSE_ORDER = _descriptor.Descriptor(
name='Order',
full_name='dwjk.dtp.QueryMarginOrdersResponse.Order',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='order_exchange_id', full_name='dwjk.dtp.QueryMarginOrdersResponse.Order.order_exchange_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_original_id', full_name='dwjk.dtp.QueryMarginOrdersResponse.Order.order_original_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryMarginOrdersResponse.Order.exchange', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.QueryMarginOrdersResponse.Order.code', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='dwjk.dtp.QueryMarginOrdersResponse.Order.name', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='price', full_name='dwjk.dtp.QueryMarginOrdersResponse.Order.price', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='quantity', full_name='dwjk.dtp.QueryMarginOrdersResponse.Order.quantity', index=6,
number=7, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_side', full_name='dwjk.dtp.QueryMarginOrdersResponse.Order.order_side', index=7,
number=8, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_type', full_name='dwjk.dtp.QueryMarginOrdersResponse.Order.order_type', index=8,
number=9, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='dwjk.dtp.QueryMarginOrdersResponse.Order.status', index=9,
number=10, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_time', full_name='dwjk.dtp.QueryMarginOrdersResponse.Order.order_time', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.QueryMarginOrdersResponse.Order.account_no', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='average_fill_price', full_name='dwjk.dtp.QueryMarginOrdersResponse.Order.average_fill_price', index=12,
number=13, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_fill_quantity', full_name='dwjk.dtp.QueryMarginOrdersResponse.Order.total_fill_quantity', index=13,
number=14, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_fill_amount', full_name='dwjk.dtp.QueryMarginOrdersResponse.Order.total_fill_amount', index=14,
number=15, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='freeze_amount', full_name='dwjk.dtp.QueryMarginOrdersResponse.Order.freeze_amount', index=15,
number=16, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='clear_amount', full_name='dwjk.dtp.QueryMarginOrdersResponse.Order.clear_amount', index=16,
number=17, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_cancelled_quantity', full_name='dwjk.dtp.QueryMarginOrdersResponse.Order.total_cancelled_quantity', index=17,
number=18, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status_message', full_name='dwjk.dtp.QueryMarginOrdersResponse.Order.status_message', index=18,
number=19, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='position_type', full_name='dwjk.dtp.QueryMarginOrdersResponse.Order.position_type', index=19,
number=20, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=13260,
serialized_end=13822,
)
_QUERYMARGINORDERSRESPONSE = _descriptor.Descriptor(
name='QueryMarginOrdersResponse',
full_name='dwjk.dtp.QueryMarginOrdersResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='order_list', full_name='dwjk.dtp.QueryMarginOrdersResponse.order_list', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='dwjk.dtp.QueryMarginOrdersResponse.pagination', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_QUERYMARGINORDERSRESPONSE_ORDER, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=13120,
serialized_end=13822,
)
_QUERYMARGINFILLSREQUEST = _descriptor.Descriptor(
name='QueryMarginFillsRequest',
full_name='dwjk.dtp.QueryMarginFillsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.QueryMarginFillsRequest.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_exchange_id', full_name='dwjk.dtp.QueryMarginFillsRequest.order_exchange_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='dwjk.dtp.QueryMarginFillsRequest.pagination', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryMarginFillsRequest.exchange', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.QueryMarginFillsRequest.code', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_original_id', full_name='dwjk.dtp.QueryMarginFillsRequest.order_original_id', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_side', full_name='dwjk.dtp.QueryMarginFillsRequest.order_side', index=6,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='include_cancel_fill', full_name='dwjk.dtp.QueryMarginFillsRequest.include_cancel_fill', index=7,
number=8, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=13825,
serialized_end=14093,
)
_QUERYMARGINFILLSRESPONSE_FILL = _descriptor.Descriptor(
name='Fill',
full_name='dwjk.dtp.QueryMarginFillsResponse.Fill',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='fill_exchange_id', full_name='dwjk.dtp.QueryMarginFillsResponse.Fill.fill_exchange_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fill_time', full_name='dwjk.dtp.QueryMarginFillsResponse.Fill.fill_time', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fill_status', full_name='dwjk.dtp.QueryMarginFillsResponse.Fill.fill_status', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fill_price', full_name='dwjk.dtp.QueryMarginFillsResponse.Fill.fill_price', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fill_quantity', full_name='dwjk.dtp.QueryMarginFillsResponse.Fill.fill_quantity', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fill_amount', full_name='dwjk.dtp.QueryMarginFillsResponse.Fill.fill_amount', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='clear_amount', full_name='dwjk.dtp.QueryMarginFillsResponse.Fill.clear_amount', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_exchange_id', full_name='dwjk.dtp.QueryMarginFillsResponse.Fill.order_exchange_id', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_original_id', full_name='dwjk.dtp.QueryMarginFillsResponse.Fill.order_original_id', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryMarginFillsResponse.Fill.exchange', index=9,
number=10, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.QueryMarginFillsResponse.Fill.code', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='dwjk.dtp.QueryMarginFillsResponse.Fill.name', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_side', full_name='dwjk.dtp.QueryMarginFillsResponse.Fill.order_side', index=12,
number=13, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='position_type', full_name='dwjk.dtp.QueryMarginFillsResponse.Fill.position_type', index=13,
number=14, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14232,
serialized_end=14627,
)
_QUERYMARGINFILLSRESPONSE = _descriptor.Descriptor(
name='QueryMarginFillsResponse',
full_name='dwjk.dtp.QueryMarginFillsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='fill_list', full_name='dwjk.dtp.QueryMarginFillsResponse.fill_list', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='dwjk.dtp.QueryMarginFillsResponse.pagination', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_QUERYMARGINFILLSRESPONSE_FILL, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14096,
serialized_end=14627,
)
_QUERYMARGINCAPITALREQUEST = _descriptor.Descriptor(
name='QueryMarginCapitalRequest',
full_name='dwjk.dtp.QueryMarginCapitalRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.QueryMarginCapitalRequest.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14629,
serialized_end=14676,
)
_QUERYMARGINCAPITALRESPONSE = _descriptor.Descriptor(
name='QueryMarginCapitalResponse',
full_name='dwjk.dtp.QueryMarginCapitalResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.QueryMarginCapitalResponse.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='assure_asset', full_name='dwjk.dtp.QueryMarginCapitalResponse.assure_asset', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_liability', full_name='dwjk.dtp.QueryMarginCapitalResponse.total_liability', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='net_asset', full_name='dwjk.dtp.QueryMarginCapitalResponse.net_asset', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='maintain_value', full_name='dwjk.dtp.QueryMarginCapitalResponse.maintain_value', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='securities', full_name='dwjk.dtp.QueryMarginCapitalResponse.securities', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='available_margin', full_name='dwjk.dtp.QueryMarginCapitalResponse.available_margin', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pay_margin', full_name='dwjk.dtp.QueryMarginCapitalResponse.pay_margin', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='available_collateral', full_name='dwjk.dtp.QueryMarginCapitalResponse.available_collateral', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='available_finance', full_name='dwjk.dtp.QueryMarginCapitalResponse.available_finance', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='available_security', full_name='dwjk.dtp.QueryMarginCapitalResponse.available_security', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='available_cash', full_name='dwjk.dtp.QueryMarginCapitalResponse.available_cash', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cash_asset', full_name='dwjk.dtp.QueryMarginCapitalResponse.cash_asset', index=12,
number=13, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='transfer_asset', full_name='dwjk.dtp.QueryMarginCapitalResponse.transfer_asset', index=13,
number=14, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='finance_compact_amount', full_name='dwjk.dtp.QueryMarginCapitalResponse.finance_compact_amount', index=14,
number=15, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='finance_compact_commission', full_name='dwjk.dtp.QueryMarginCapitalResponse.finance_compact_commission', index=15,
number=16, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='finance_compact_interest', full_name='dwjk.dtp.QueryMarginCapitalResponse.finance_compact_interest', index=16,
number=17, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='finance_compact_revenue', full_name='dwjk.dtp.QueryMarginCapitalResponse.finance_compact_revenue', index=17,
number=18, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='finance_available_amount', full_name='dwjk.dtp.QueryMarginCapitalResponse.finance_available_amount', index=18,
number=19, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='finance_pay_amount', full_name='dwjk.dtp.QueryMarginCapitalResponse.finance_pay_amount', index=19,
number=20, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='finance_max_amount', full_name='dwjk.dtp.QueryMarginCapitalResponse.finance_max_amount', index=20,
number=21, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='finance_pay_margin', full_name='dwjk.dtp.QueryMarginCapitalResponse.finance_pay_margin', index=21,
number=22, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='finance_securities', full_name='dwjk.dtp.QueryMarginCapitalResponse.finance_securities', index=22,
number=23, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='security_loan_compact_amount', full_name='dwjk.dtp.QueryMarginCapitalResponse.security_loan_compact_amount', index=23,
number=24, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='security_loan_compact_commission', full_name='dwjk.dtp.QueryMarginCapitalResponse.security_loan_compact_commission', index=24,
number=25, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='security_loan_compact_interest', full_name='dwjk.dtp.QueryMarginCapitalResponse.security_loan_compact_interest', index=25,
number=26, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='security_loan_compact_revenue', full_name='dwjk.dtp.QueryMarginCapitalResponse.security_loan_compact_revenue', index=26,
number=27, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='security_loan_available', full_name='dwjk.dtp.QueryMarginCapitalResponse.security_loan_available', index=27,
number=28, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='security_loan_pay_amount', full_name='dwjk.dtp.QueryMarginCapitalResponse.security_loan_pay_amount', index=28,
number=29, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='security_loan_max_amount', full_name='dwjk.dtp.QueryMarginCapitalResponse.security_loan_max_amount', index=29,
number=30, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='security_loan_pay_margin', full_name='dwjk.dtp.QueryMarginCapitalResponse.security_loan_pay_margin', index=30,
number=31, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='security_loan_securities', full_name='dwjk.dtp.QueryMarginCapitalResponse.security_loan_securities', index=31,
number=32, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='security_loan_total', full_name='dwjk.dtp.QueryMarginCapitalResponse.security_loan_total', index=32,
number=33, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='security_loan_balance', full_name='dwjk.dtp.QueryMarginCapitalResponse.security_loan_balance', index=33,
number=34, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14679,
serialized_end=15705,
)
_QUERYMARGINPOSITIONREQUEST = _descriptor.Descriptor(
name='QueryMarginPositionRequest',
full_name='dwjk.dtp.QueryMarginPositionRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.QueryMarginPositionRequest.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryMarginPositionRequest.exchange', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.QueryMarginPositionRequest.code', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='dwjk.dtp.QueryMarginPositionRequest.pagination', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=15708,
serialized_end=15855,
)
_QUERYMARGINPOSITIONRESPONSE_POSITIONDETAIL = _descriptor.Descriptor(
name='PositionDetail',
full_name='dwjk.dtp.QueryMarginPositionResponse.PositionDetail',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryMarginPositionResponse.PositionDetail.exchange', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.QueryMarginPositionResponse.PositionDetail.code', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='dwjk.dtp.QueryMarginPositionResponse.PositionDetail.name', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='balance', full_name='dwjk.dtp.QueryMarginPositionResponse.PositionDetail.balance', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='available_quantity', full_name='dwjk.dtp.QueryMarginPositionResponse.PositionDetail.available_quantity', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='freeze_quantity', full_name='dwjk.dtp.QueryMarginPositionResponse.PositionDetail.freeze_quantity', index=5,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='buy_quantity', full_name='dwjk.dtp.QueryMarginPositionResponse.PositionDetail.buy_quantity', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sell_quantity', full_name='dwjk.dtp.QueryMarginPositionResponse.PositionDetail.sell_quantity', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='market_value', full_name='dwjk.dtp.QueryMarginPositionResponse.PositionDetail.market_value', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cost', full_name='dwjk.dtp.QueryMarginPositionResponse.PositionDetail.cost', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='initial_quantity', full_name='dwjk.dtp.QueryMarginPositionResponse.PositionDetail.initial_quantity', index=10,
number=11, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9231,
serialized_end=9490,
)
_QUERYMARGINPOSITIONRESPONSE = _descriptor.Descriptor(
name='QueryMarginPositionResponse',
full_name='dwjk.dtp.QueryMarginPositionResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='position_list', full_name='dwjk.dtp.QueryMarginPositionResponse.position_list', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='dwjk.dtp.QueryMarginPositionResponse.pagination', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_QUERYMARGINPOSITIONRESPONSE_POSITIONDETAIL, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=15858,
serialized_end=16273,
)
_QUERYMARGINSECURITIESREQUEST = _descriptor.Descriptor(
name='QueryMarginSecuritiesRequest',
full_name='dwjk.dtp.QueryMarginSecuritiesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.QueryMarginSecuritiesRequest.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryMarginSecuritiesRequest.exchange', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.QueryMarginSecuritiesRequest.code', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='include_zero', full_name='dwjk.dtp.QueryMarginSecuritiesRequest.include_zero', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='position_type', full_name='dwjk.dtp.QueryMarginSecuritiesRequest.position_type', index=4,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='dwjk.dtp.QueryMarginSecuritiesRequest.pagination', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16276,
serialized_end=16501,
)
_QUERYMARGINSECURITIESRESPONSE_SECURITYDETAIL = _descriptor.Descriptor(
name='SecurityDetail',
full_name='dwjk.dtp.QueryMarginSecuritiesResponse.SecurityDetail',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryMarginSecuritiesResponse.SecurityDetail.exchange', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.QueryMarginSecuritiesResponse.SecurityDetail.code', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='dwjk.dtp.QueryMarginSecuritiesResponse.SecurityDetail.name', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='security_ratio', full_name='dwjk.dtp.QueryMarginSecuritiesResponse.SecurityDetail.security_ratio', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='dwjk.dtp.QueryMarginSecuritiesResponse.SecurityDetail.status', index=4,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='quantity', full_name='dwjk.dtp.QueryMarginSecuritiesResponse.SecurityDetail.quantity', index=5,
number=6, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='position_type', full_name='dwjk.dtp.QueryMarginSecuritiesResponse.SecurityDetail.position_type', index=6,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16664,
serialized_end=16888,
)
_QUERYMARGINSECURITIESRESPONSE = _descriptor.Descriptor(
name='QueryMarginSecuritiesResponse',
full_name='dwjk.dtp.QueryMarginSecuritiesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='security_list', full_name='dwjk.dtp.QueryMarginSecuritiesResponse.security_list', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='dwjk.dtp.QueryMarginSecuritiesResponse.pagination', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_QUERYMARGINSECURITIESRESPONSE_SECURITYDETAIL, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16504,
serialized_end=16888,
)
_QUERYMARGINCOMPACTREQUEST = _descriptor.Descriptor(
name='QueryMarginCompactRequest',
full_name='dwjk.dtp.QueryMarginCompactRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.QueryMarginCompactRequest.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='compact_no', full_name='dwjk.dtp.QueryMarginCompactRequest.compact_no', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='compact_type', full_name='dwjk.dtp.QueryMarginCompactRequest.compact_type', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='position_type', full_name='dwjk.dtp.QueryMarginCompactRequest.position_type', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='dwjk.dtp.QueryMarginCompactRequest.pagination', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16891,
serialized_end=17104,
)
_QUERYMARGINCOMPACTRESPONSE_COMPACTDETAIL = _descriptor.Descriptor(
name='CompactDetail',
full_name='dwjk.dtp.QueryMarginCompactResponse.CompactDetail',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='compact_date', full_name='dwjk.dtp.QueryMarginCompactResponse.CompactDetail.compact_date', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='compact_no', full_name='dwjk.dtp.QueryMarginCompactResponse.CompactDetail.compact_no', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryMarginCompactResponse.CompactDetail.exchange', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.QueryMarginCompactResponse.CompactDetail.code', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='compact_type', full_name='dwjk.dtp.QueryMarginCompactResponse.CompactDetail.compact_type', index=4,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='dwjk.dtp.QueryMarginCompactResponse.CompactDetail.status', index=5,
number=6, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='open_amount', full_name='dwjk.dtp.QueryMarginCompactResponse.CompactDetail.open_amount', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='open_quantity', full_name='dwjk.dtp.QueryMarginCompactResponse.CompactDetail.open_quantity', index=7,
number=8, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='open_commission', full_name='dwjk.dtp.QueryMarginCompactResponse.CompactDetail.open_commission', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='no_pay_amount', full_name='dwjk.dtp.QueryMarginCompactResponse.CompactDetail.no_pay_amount', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='no_pay_quantity', full_name='dwjk.dtp.QueryMarginCompactResponse.CompactDetail.no_pay_quantity', index=10,
number=11, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='no_pay_commission', full_name='dwjk.dtp.QueryMarginCompactResponse.CompactDetail.no_pay_commission', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='no_pay_interest', full_name='dwjk.dtp.QueryMarginCompactResponse.CompactDetail.no_pay_interest', index=12,
number=13, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='return_total_interest', full_name='dwjk.dtp.QueryMarginCompactResponse.CompactDetail.return_total_interest', index=13,
number=14, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='year_interest_rate', full_name='dwjk.dtp.QueryMarginCompactResponse.CompactDetail.year_interest_rate', index=14,
number=15, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='return_close_date', full_name='dwjk.dtp.QueryMarginCompactResponse.CompactDetail.return_close_date', index=15,
number=16, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='position_type', full_name='dwjk.dtp.QueryMarginCompactResponse.CompactDetail.position_type', index=16,
number=17, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='finance_revenue', full_name='dwjk.dtp.QueryMarginCompactResponse.CompactDetail.finance_revenue', index=17,
number=18, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='security_loan_revenue', full_name='dwjk.dtp.QueryMarginCompactResponse.CompactDetail.security_loan_revenue', index=18,
number=19, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17259,
serialized_end=17819,
)
_QUERYMARGINCOMPACTRESPONSE = _descriptor.Descriptor(
name='QueryMarginCompactResponse',
full_name='dwjk.dtp.QueryMarginCompactResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='compact_list', full_name='dwjk.dtp.QueryMarginCompactResponse.compact_list', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='dwjk.dtp.QueryMarginCompactResponse.pagination', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_QUERYMARGINCOMPACTRESPONSE_COMPACTDETAIL, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17107,
serialized_end=17819,
)
_QUERYMARGINCOMPACTWATERREQUEST = _descriptor.Descriptor(
name='QueryMarginCompactWaterRequest',
full_name='dwjk.dtp.QueryMarginCompactWaterRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='account_no', full_name='dwjk.dtp.QueryMarginCompactWaterRequest.account_no', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryMarginCompactWaterRequest.exchange', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.QueryMarginCompactWaterRequest.code', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='compact_no', full_name='dwjk.dtp.QueryMarginCompactWaterRequest.compact_no', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='compact_type', full_name='dwjk.dtp.QueryMarginCompactWaterRequest.compact_type', index=4,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='dwjk.dtp.QueryMarginCompactWaterRequest.pagination', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17822,
serialized_end=18038,
)
_QUERYMARGINCOMPACTWATERRESPONSE_WATERDETAIL = _descriptor.Descriptor(
name='WaterDetail',
full_name='dwjk.dtp.QueryMarginCompactWaterResponse.WaterDetail',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='compact_date', full_name='dwjk.dtp.QueryMarginCompactWaterResponse.WaterDetail.compact_date', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='compact_no', full_name='dwjk.dtp.QueryMarginCompactWaterResponse.WaterDetail.compact_no', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='dwjk.dtp.QueryMarginCompactWaterResponse.WaterDetail.exchange', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='dwjk.dtp.QueryMarginCompactWaterResponse.WaterDetail.code', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_exchange_id', full_name='dwjk.dtp.QueryMarginCompactWaterResponse.WaterDetail.order_exchange_id', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='compact_type', full_name='dwjk.dtp.QueryMarginCompactWaterResponse.WaterDetail.compact_type', index=5,
number=6, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pay_amount', full_name='dwjk.dtp.QueryMarginCompactWaterResponse.WaterDetail.pay_amount', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pay_quantity', full_name='dwjk.dtp.QueryMarginCompactWaterResponse.WaterDetail.pay_quantity', index=7,
number=8, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pay_commission', full_name='dwjk.dtp.QueryMarginCompactWaterResponse.WaterDetail.pay_commission', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pay_interest', full_name='dwjk.dtp.QueryMarginCompactWaterResponse.WaterDetail.pay_interest', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='no_pay_amount', full_name='dwjk.dtp.QueryMarginCompactWaterResponse.WaterDetail.no_pay_amount', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='no_pay_quantity', full_name='dwjk.dtp.QueryMarginCompactWaterResponse.WaterDetail.no_pay_quantity', index=11,
number=12, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='no_pay_commission', full_name='dwjk.dtp.QueryMarginCompactWaterResponse.WaterDetail.no_pay_commission', index=12,
number=13, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='no_pay_interest', full_name='dwjk.dtp.QueryMarginCompactWaterResponse.WaterDetail.no_pay_interest', index=13,
number=14, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=18199,
serialized_end=18566,
)
_QUERYMARGINCOMPACTWATERRESPONSE = _descriptor.Descriptor(
name='QueryMarginCompactWaterResponse',
full_name='dwjk.dtp.QueryMarginCompactWaterResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='water_list', full_name='dwjk.dtp.QueryMarginCompactWaterResponse.water_list', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='dwjk.dtp.QueryMarginCompactWaterResponse.pagination', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_QUERYMARGINCOMPACTWATERRESPONSE_WATERDETAIL, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=18041,
serialized_end=18566,
)
_QUERYPAGINATION = _descriptor.Descriptor(
name='QueryPagination',
full_name='dwjk.dtp.QueryPagination',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='offset', full_name='dwjk.dtp.QueryPagination.offset', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='size', full_name='dwjk.dtp.QueryPagination.size', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=18568,
serialized_end=18615,
)
_ALGORITHMICSEND = _descriptor.Descriptor(
name='AlgorithmicSend',
full_name='dwjk.dtp.AlgorithmicSend',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='client_id', full_name='dwjk.dtp.AlgorithmicSend.client_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sub_original_id', full_name='dwjk.dtp.AlgorithmicSend.sub_original_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=18617,
serialized_end=18678,
)
_RESPONSEHEADER.fields_by_name['code'].enum_type = dtp_dot_type__pb2._RESPONSESTATUSCODE
_REPORTHEADER.fields_by_name['code'].enum_type = dtp_dot_type__pb2._RESPONSESTATUSCODE
_PLACEORDER.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_PLACEORDER.fields_by_name['order_side'].enum_type = dtp_dot_type__pb2._ORDERSIDE
_PLACEORDER.fields_by_name['order_type'].enum_type = dtp_dot_type__pb2._ORDERTYPE
_PLACEORDER.fields_by_name['order_limit'].enum_type = dtp_dot_type__pb2._ORDERLIMIT
_CANCELORDER.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_CANCELRESPONSE.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_PLACEVOTE.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_PLACEDECLARE.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_PLACEDECLARE.fields_by_name['business_type'].enum_type = dtp_dot_type__pb2._BUSINESSTYPE
_PLACEDECLARE.fields_by_name['declare_type'].enum_type = dtp_dot_type__pb2._DECLARETYPE
_MARGINPLACEORDER.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_MARGINPLACEORDER.fields_by_name['order_side'].enum_type = dtp_dot_type__pb2._ORDERSIDE
_MARGINPLACEORDER.fields_by_name['order_type'].enum_type = dtp_dot_type__pb2._ORDERTYPE
_MARGINPLACEORDER.fields_by_name['order_limit'].enum_type = dtp_dot_type__pb2._ORDERLIMIT
_MARGINPLACEORDER.fields_by_name['position_type'].enum_type = dtp_dot_type__pb2._COMPACTPOSITIONTYPE
_MARGINCANCELORDER.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_MARGINCANCELRESPONSE.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_MARGINREPAYAMOUNTORDER.fields_by_name['position_type'].enum_type = dtp_dot_type__pb2._COMPACTPOSITIONTYPE
_MARGINREPAYSECURITYORDER.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_MARGINREPAYSECURITYORDER.fields_by_name['position_type'].enum_type = dtp_dot_type__pb2._COMPACTPOSITIONTYPE
_PLACEBATCHORDER_BATCHORDERITEM.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_PLACEBATCHORDER_BATCHORDERITEM.fields_by_name['order_side'].enum_type = dtp_dot_type__pb2._ORDERSIDE
_PLACEBATCHORDER_BATCHORDERITEM.fields_by_name['order_type'].enum_type = dtp_dot_type__pb2._ORDERTYPE
_PLACEBATCHORDER_BATCHORDERITEM.fields_by_name['order_limit'].enum_type = dtp_dot_type__pb2._ORDERLIMIT
_PLACEBATCHORDER_BATCHORDERITEM.containing_type = _PLACEBATCHORDER
_PLACEBATCHORDER.fields_by_name['order_list'].message_type = _PLACEBATCHORDER_BATCHORDERITEM
_PLACEDREPORT.fields_by_name['status'].enum_type = dtp_dot_type__pb2._ORDERSTATUS
_PLACEDREPORT.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_PLACEDREPORT.fields_by_name['order_side'].enum_type = dtp_dot_type__pb2._ORDERSIDE
_FILLREPORT.fields_by_name['fill_status'].enum_type = dtp_dot_type__pb2._FILLSTATUS
_FILLREPORT.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_FILLREPORT.fields_by_name['order_side'].enum_type = dtp_dot_type__pb2._ORDERSIDE
_CANCELLATIONREPORT.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_CANCELLATIONREPORT.fields_by_name['order_side'].enum_type = dtp_dot_type__pb2._ORDERSIDE
_CANCELLATIONREPORT.fields_by_name['status'].enum_type = dtp_dot_type__pb2._ORDERSTATUS
_VOTEREPORT.fields_by_name['status'].enum_type = dtp_dot_type__pb2._ORDERSTATUS
_VOTEREPORT.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_DECLAREREPORT.fields_by_name['status'].enum_type = dtp_dot_type__pb2._ORDERSTATUS
_DECLAREREPORT.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_DECLAREREPORT.fields_by_name['business_type'].enum_type = dtp_dot_type__pb2._BUSINESSTYPE
_DECLAREREPORT.fields_by_name['declare_type'].enum_type = dtp_dot_type__pb2._DECLARETYPE
_MARGINPLACEDREPORT.fields_by_name['status'].enum_type = dtp_dot_type__pb2._ORDERSTATUS
_MARGINPLACEDREPORT.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_MARGINPLACEDREPORT.fields_by_name['order_side'].enum_type = dtp_dot_type__pb2._ORDERSIDE
_MARGINPLACEDREPORT.fields_by_name['position_type'].enum_type = dtp_dot_type__pb2._COMPACTPOSITIONTYPE
_MARGINREPAYAMOUNTREPORT.fields_by_name['status'].enum_type = dtp_dot_type__pb2._ORDERSTATUS
_MARGINREPAYAMOUNTREPORT.fields_by_name['position_type'].enum_type = dtp_dot_type__pb2._COMPACTPOSITIONTYPE
_MARGINREPAYSECURITYREPORT.fields_by_name['status'].enum_type = dtp_dot_type__pb2._ORDERSTATUS
_MARGINREPAYSECURITYREPORT.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_MARGINREPAYSECURITYREPORT.fields_by_name['position_type'].enum_type = dtp_dot_type__pb2._COMPACTPOSITIONTYPE
_MARGINFILLREPORT.fields_by_name['fill_status'].enum_type = dtp_dot_type__pb2._FILLSTATUS
_MARGINFILLREPORT.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_MARGINFILLREPORT.fields_by_name['order_side'].enum_type = dtp_dot_type__pb2._ORDERSIDE
_MARGINFILLREPORT.fields_by_name['position_type'].enum_type = dtp_dot_type__pb2._COMPACTPOSITIONTYPE
_MARGINCANCELLATIONREPORT.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_MARGINCANCELLATIONREPORT.fields_by_name['order_side'].enum_type = dtp_dot_type__pb2._ORDERSIDE
_MARGINCANCELLATIONREPORT.fields_by_name['status'].enum_type = dtp_dot_type__pb2._ORDERSTATUS
_MARGINCANCELLATIONREPORT.fields_by_name['position_type'].enum_type = dtp_dot_type__pb2._COMPACTPOSITIONTYPE
_QUERYORDERSREQUEST.fields_by_name['query_criteria'].enum_type = dtp_dot_type__pb2._QUERYORDERCRITERIA
_QUERYORDERSREQUEST.fields_by_name['pagination'].message_type = _QUERYPAGINATION
_QUERYORDERSREQUEST.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYORDERSREQUEST.fields_by_name['order_side'].enum_type = dtp_dot_type__pb2._ORDERSIDE
_QUERYORDERSRESPONSE_ORDER.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYORDERSRESPONSE_ORDER.fields_by_name['order_side'].enum_type = dtp_dot_type__pb2._ORDERSIDE
_QUERYORDERSRESPONSE_ORDER.fields_by_name['order_type'].enum_type = dtp_dot_type__pb2._ORDERTYPE
_QUERYORDERSRESPONSE_ORDER.fields_by_name['status'].enum_type = dtp_dot_type__pb2._ORDERSTATUS
_QUERYORDERSRESPONSE_ORDER.containing_type = _QUERYORDERSRESPONSE
_QUERYORDERSRESPONSE.fields_by_name['order_list'].message_type = _QUERYORDERSRESPONSE_ORDER
_QUERYORDERSRESPONSE.fields_by_name['pagination'].message_type = _QUERYPAGINATION
_QUERYFILLSREQUEST.fields_by_name['pagination'].message_type = _QUERYPAGINATION
_QUERYFILLSREQUEST.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYFILLSREQUEST.fields_by_name['order_side'].enum_type = dtp_dot_type__pb2._ORDERSIDE
_QUERYFILLSRESPONSE_FILL.fields_by_name['fill_status'].enum_type = dtp_dot_type__pb2._FILLSTATUS
_QUERYFILLSRESPONSE_FILL.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYFILLSRESPONSE_FILL.fields_by_name['order_side'].enum_type = dtp_dot_type__pb2._ORDERSIDE
_QUERYFILLSRESPONSE_FILL.containing_type = _QUERYFILLSRESPONSE
_QUERYFILLSRESPONSE.fields_by_name['fill_list'].message_type = _QUERYFILLSRESPONSE_FILL
_QUERYFILLSRESPONSE.fields_by_name['pagination'].message_type = _QUERYPAGINATION
_QUERYPOSITIONREQUEST.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYPOSITIONREQUEST.fields_by_name['pagination'].message_type = _QUERYPAGINATION
_QUERYPOSITIONRESPONSE_POSITIONDETAIL.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYPOSITIONRESPONSE_POSITIONDETAIL.containing_type = _QUERYPOSITIONRESPONSE
_QUERYPOSITIONRESPONSE.fields_by_name['position_list'].message_type = _QUERYPOSITIONRESPONSE_POSITIONDETAIL
_QUERYPOSITIONRESPONSE.fields_by_name['pagination'].message_type = _QUERYPAGINATION
_QUERYRATIONRESPONSE_RATION.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYRATIONRESPONSE_RATION.containing_type = _QUERYRATIONRESPONSE
_QUERYRATIONRESPONSE.fields_by_name['ration_list'].message_type = _QUERYRATIONRESPONSE_RATION
_QUERYBONDSTORAGEREQUEST.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYBONDSTORAGEREQUEST.fields_by_name['pagination'].message_type = _QUERYPAGINATION
_QUERYBONDSTORAGERESPONSE_BONDSTORAGEDETAIL.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYBONDSTORAGERESPONSE_BONDSTORAGEDETAIL.containing_type = _QUERYBONDSTORAGERESPONSE
_QUERYBONDSTORAGERESPONSE.fields_by_name['detail_list'].message_type = _QUERYBONDSTORAGERESPONSE_BONDSTORAGEDETAIL
_QUERYBONDSTORAGERESPONSE.fields_by_name['pagination'].message_type = _QUERYPAGINATION
_QUERYPENDINGREPOREQUEST.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYPENDINGREPOREQUEST.fields_by_name['pagination'].message_type = _QUERYPAGINATION
_QUERYPENDINGREPORESPONSE_PENDINGREPODETAIL.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYPENDINGREPORESPONSE_PENDINGREPODETAIL.containing_type = _QUERYPENDINGREPORESPONSE
_QUERYPENDINGREPORESPONSE.fields_by_name['detail_list'].message_type = _QUERYPENDINGREPORESPONSE_PENDINGREPODETAIL
_QUERYPENDINGREPORESPONSE.fields_by_name['pagination'].message_type = _QUERYPAGINATION
_QUERYREPOACTUALOCCUPATIONDAYREQUEST.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYREPOACTUALOCCUPATIONDAYRESPONSE_OCCUPATIONDAY.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYREPOACTUALOCCUPATIONDAYRESPONSE_OCCUPATIONDAY.containing_type = _QUERYREPOACTUALOCCUPATIONDAYRESPONSE
_QUERYREPOACTUALOCCUPATIONDAYRESPONSE.fields_by_name['detal_list'].message_type = _QUERYREPOACTUALOCCUPATIONDAYRESPONSE_OCCUPATIONDAY
_QUERYQUOTAREQUEST.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYQUOTARESPONSE_QUOTA.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYQUOTARESPONSE_QUOTA.fields_by_name['quota_status'].enum_type = dtp_dot_type__pb2._QUOTASTATUS
_QUERYQUOTARESPONSE_QUOTA.containing_type = _QUERYQUOTARESPONSE
_QUERYQUOTARESPONSE.fields_by_name['quota_list'].message_type = _QUERYQUOTARESPONSE_QUOTA
_QUERYVOTEINFOREQUEST.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYVOTEINFOREQUEST.fields_by_name['pagination'].message_type = _QUERYPAGINATION
_QUERYVOTEINFORESPONSE_VOTEINFO.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYVOTEINFORESPONSE_VOTEINFO.fields_by_name['motion_type'].enum_type = dtp_dot_type__pb2._MOTIONTYPE
_QUERYVOTEINFORESPONSE_VOTEINFO.containing_type = _QUERYVOTEINFORESPONSE
_QUERYVOTEINFORESPONSE.fields_by_name['vote_info_list'].message_type = _QUERYVOTEINFORESPONSE_VOTEINFO
_QUERYVOTEINFORESPONSE.fields_by_name['pagination'].message_type = _QUERYPAGINATION
_QUERYDECLAREINFOREQUEST.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYDECLAREINFOREQUEST.fields_by_name['business_type'].enum_type = dtp_dot_type__pb2._BUSINESSTYPE
_QUERYDECLAREINFOREQUEST.fields_by_name['pagination'].message_type = _QUERYPAGINATION
_QUERYDECLAREINFORESPONSE_DECLAREINFO.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYDECLAREINFORESPONSE_DECLAREINFO.fields_by_name['business_type'].enum_type = dtp_dot_type__pb2._BUSINESSTYPE
_QUERYDECLAREINFORESPONSE_DECLAREINFO.containing_type = _QUERYDECLAREINFORESPONSE
_QUERYDECLAREINFORESPONSE.fields_by_name['declare_info_list'].message_type = _QUERYDECLAREINFORESPONSE_DECLAREINFO
_QUERYDECLAREINFORESPONSE.fields_by_name['pagination'].message_type = _QUERYPAGINATION
_QUERYBANKSERVICERESPONSE_BANKSERVICEINFO.fields_by_name['service_code'].enum_type = dtp_dot_type__pb2._BANKSERVICECODE
_QUERYBANKSERVICERESPONSE_BANKSERVICEINFO.containing_type = _QUERYBANKSERVICERESPONSE
_QUERYBANKSERVICERESPONSE.fields_by_name['info_list'].message_type = _QUERYBANKSERVICERESPONSE_BANKSERVICEINFO
_QUERYMARGINORDERSREQUEST.fields_by_name['query_criteria'].enum_type = dtp_dot_type__pb2._QUERYORDERCRITERIA
_QUERYMARGINORDERSREQUEST.fields_by_name['pagination'].message_type = _QUERYPAGINATION
_QUERYMARGINORDERSREQUEST.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYMARGINORDERSREQUEST.fields_by_name['order_side'].enum_type = dtp_dot_type__pb2._ORDERSIDE
_QUERYMARGINORDERSRESPONSE_ORDER.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYMARGINORDERSRESPONSE_ORDER.fields_by_name['order_side'].enum_type = dtp_dot_type__pb2._ORDERSIDE
_QUERYMARGINORDERSRESPONSE_ORDER.fields_by_name['order_type'].enum_type = dtp_dot_type__pb2._ORDERTYPE
_QUERYMARGINORDERSRESPONSE_ORDER.fields_by_name['status'].enum_type = dtp_dot_type__pb2._ORDERSTATUS
_QUERYMARGINORDERSRESPONSE_ORDER.fields_by_name['position_type'].enum_type = dtp_dot_type__pb2._COMPACTPOSITIONTYPE
_QUERYMARGINORDERSRESPONSE_ORDER.containing_type = _QUERYMARGINORDERSRESPONSE
_QUERYMARGINORDERSRESPONSE.fields_by_name['order_list'].message_type = _QUERYMARGINORDERSRESPONSE_ORDER
_QUERYMARGINORDERSRESPONSE.fields_by_name['pagination'].message_type = _QUERYPAGINATION
_QUERYMARGINFILLSREQUEST.fields_by_name['pagination'].message_type = _QUERYPAGINATION
_QUERYMARGINFILLSREQUEST.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYMARGINFILLSREQUEST.fields_by_name['order_side'].enum_type = dtp_dot_type__pb2._ORDERSIDE
_QUERYMARGINFILLSRESPONSE_FILL.fields_by_name['fill_status'].enum_type = dtp_dot_type__pb2._FILLSTATUS
_QUERYMARGINFILLSRESPONSE_FILL.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYMARGINFILLSRESPONSE_FILL.fields_by_name['order_side'].enum_type = dtp_dot_type__pb2._ORDERSIDE
_QUERYMARGINFILLSRESPONSE_FILL.fields_by_name['position_type'].enum_type = dtp_dot_type__pb2._COMPACTPOSITIONTYPE
_QUERYMARGINFILLSRESPONSE_FILL.containing_type = _QUERYMARGINFILLSRESPONSE
_QUERYMARGINFILLSRESPONSE.fields_by_name['fill_list'].message_type = _QUERYMARGINFILLSRESPONSE_FILL
_QUERYMARGINFILLSRESPONSE.fields_by_name['pagination'].message_type = _QUERYPAGINATION
_QUERYMARGINPOSITIONREQUEST.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYMARGINPOSITIONREQUEST.fields_by_name['pagination'].message_type = _QUERYPAGINATION
_QUERYMARGINPOSITIONRESPONSE_POSITIONDETAIL.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYMARGINPOSITIONRESPONSE_POSITIONDETAIL.containing_type = _QUERYMARGINPOSITIONRESPONSE
_QUERYMARGINPOSITIONRESPONSE.fields_by_name['position_list'].message_type = _QUERYMARGINPOSITIONRESPONSE_POSITIONDETAIL
_QUERYMARGINPOSITIONRESPONSE.fields_by_name['pagination'].message_type = _QUERYPAGINATION
_QUERYMARGINSECURITIESREQUEST.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYMARGINSECURITIESREQUEST.fields_by_name['position_type'].enum_type = dtp_dot_type__pb2._COMPACTPOSITIONTYPE
_QUERYMARGINSECURITIESREQUEST.fields_by_name['pagination'].message_type = _QUERYPAGINATION
_QUERYMARGINSECURITIESRESPONSE_SECURITYDETAIL.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYMARGINSECURITIESRESPONSE_SECURITYDETAIL.fields_by_name['status'].enum_type = dtp_dot_type__pb2._SECURITYLOANSTATUS
_QUERYMARGINSECURITIESRESPONSE_SECURITYDETAIL.fields_by_name['position_type'].enum_type = dtp_dot_type__pb2._COMPACTPOSITIONTYPE
_QUERYMARGINSECURITIESRESPONSE_SECURITYDETAIL.containing_type = _QUERYMARGINSECURITIESRESPONSE
_QUERYMARGINSECURITIESRESPONSE.fields_by_name['security_list'].message_type = _QUERYMARGINSECURITIESRESPONSE_SECURITYDETAIL
_QUERYMARGINSECURITIESRESPONSE.fields_by_name['pagination'].message_type = _QUERYPAGINATION
_QUERYMARGINCOMPACTREQUEST.fields_by_name['compact_type'].enum_type = dtp_dot_type__pb2._COMPACTTYPE
_QUERYMARGINCOMPACTREQUEST.fields_by_name['position_type'].enum_type = dtp_dot_type__pb2._COMPACTPOSITIONTYPE
_QUERYMARGINCOMPACTREQUEST.fields_by_name['pagination'].message_type = _QUERYPAGINATION
_QUERYMARGINCOMPACTRESPONSE_COMPACTDETAIL.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYMARGINCOMPACTRESPONSE_COMPACTDETAIL.fields_by_name['compact_type'].enum_type = dtp_dot_type__pb2._COMPACTTYPE
_QUERYMARGINCOMPACTRESPONSE_COMPACTDETAIL.fields_by_name['status'].enum_type = dtp_dot_type__pb2._COMPACTSTATUS
_QUERYMARGINCOMPACTRESPONSE_COMPACTDETAIL.fields_by_name['position_type'].enum_type = dtp_dot_type__pb2._COMPACTPOSITIONTYPE
_QUERYMARGINCOMPACTRESPONSE_COMPACTDETAIL.containing_type = _QUERYMARGINCOMPACTRESPONSE
_QUERYMARGINCOMPACTRESPONSE.fields_by_name['compact_list'].message_type = _QUERYMARGINCOMPACTRESPONSE_COMPACTDETAIL
_QUERYMARGINCOMPACTRESPONSE.fields_by_name['pagination'].message_type = _QUERYPAGINATION
_QUERYMARGINCOMPACTWATERREQUEST.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYMARGINCOMPACTWATERREQUEST.fields_by_name['compact_type'].enum_type = dtp_dot_type__pb2._COMPACTTYPE
_QUERYMARGINCOMPACTWATERREQUEST.fields_by_name['pagination'].message_type = _QUERYPAGINATION
_QUERYMARGINCOMPACTWATERRESPONSE_WATERDETAIL.fields_by_name['exchange'].enum_type = dtp_dot_type__pb2._EXCHANGE
_QUERYMARGINCOMPACTWATERRESPONSE_WATERDETAIL.fields_by_name['compact_type'].enum_type = dtp_dot_type__pb2._COMPACTTYPE
_QUERYMARGINCOMPACTWATERRESPONSE_WATERDETAIL.containing_type = _QUERYMARGINCOMPACTWATERRESPONSE
_QUERYMARGINCOMPACTWATERRESPONSE.fields_by_name['water_list'].message_type = _QUERYMARGINCOMPACTWATERRESPONSE_WATERDETAIL
_QUERYMARGINCOMPACTWATERRESPONSE.fields_by_name['pagination'].message_type = _QUERYPAGINATION
DESCRIPTOR.message_types_by_name['RequestHeader'] = _REQUESTHEADER
DESCRIPTOR.message_types_by_name['ResponseHeader'] = _RESPONSEHEADER
DESCRIPTOR.message_types_by_name['ReportHeader'] = _REPORTHEADER
DESCRIPTOR.message_types_by_name['LoginAccountRequest'] = _LOGINACCOUNTREQUEST
DESCRIPTOR.message_types_by_name['LoginAccountResponse'] = _LOGINACCOUNTRESPONSE
DESCRIPTOR.message_types_by_name['LogoutAccountRequest'] = _LOGOUTACCOUNTREQUEST
DESCRIPTOR.message_types_by_name['LogoutAccountResponse'] = _LOGOUTACCOUNTRESPONSE
DESCRIPTOR.message_types_by_name['PlaceOrder'] = _PLACEORDER
DESCRIPTOR.message_types_by_name['CancelOrder'] = _CANCELORDER
DESCRIPTOR.message_types_by_name['CancelResponse'] = _CANCELRESPONSE
DESCRIPTOR.message_types_by_name['PlaceVote'] = _PLACEVOTE
DESCRIPTOR.message_types_by_name['PlaceDeclare'] = _PLACEDECLARE
DESCRIPTOR.message_types_by_name['MarginPlaceOrder'] = _MARGINPLACEORDER
DESCRIPTOR.message_types_by_name['MarginCancelOrder'] = _MARGINCANCELORDER
DESCRIPTOR.message_types_by_name['MarginCancelResponse'] = _MARGINCANCELRESPONSE
DESCRIPTOR.message_types_by_name['MarginRepayAmountOrder'] = _MARGINREPAYAMOUNTORDER
DESCRIPTOR.message_types_by_name['MarginRepaySecurityOrder'] = _MARGINREPAYSECURITYORDER
DESCRIPTOR.message_types_by_name['PlaceBatchOrder'] = _PLACEBATCHORDER
DESCRIPTOR.message_types_by_name['PlaceBatchResponse'] = _PLACEBATCHRESPONSE
DESCRIPTOR.message_types_by_name['CancelBatchOrder'] = _CANCELBATCHORDER
DESCRIPTOR.message_types_by_name['CancelBatchResponse'] = _CANCELBATCHRESPONSE
DESCRIPTOR.message_types_by_name['PlacedReport'] = _PLACEDREPORT
DESCRIPTOR.message_types_by_name['FillReport'] = _FILLREPORT
DESCRIPTOR.message_types_by_name['CancellationReport'] = _CANCELLATIONREPORT
DESCRIPTOR.message_types_by_name['VoteReport'] = _VOTEREPORT
DESCRIPTOR.message_types_by_name['DeclareReport'] = _DECLAREREPORT
DESCRIPTOR.message_types_by_name['MarginPlacedReport'] = _MARGINPLACEDREPORT
DESCRIPTOR.message_types_by_name['MarginRepayAmountReport'] = _MARGINREPAYAMOUNTREPORT
DESCRIPTOR.message_types_by_name['MarginRepaySecurityReport'] = _MARGINREPAYSECURITYREPORT
DESCRIPTOR.message_types_by_name['MarginFillReport'] = _MARGINFILLREPORT
DESCRIPTOR.message_types_by_name['MarginCancellationReport'] = _MARGINCANCELLATIONREPORT
DESCRIPTOR.message_types_by_name['QueryOrdersRequest'] = _QUERYORDERSREQUEST
DESCRIPTOR.message_types_by_name['QueryOrdersResponse'] = _QUERYORDERSRESPONSE
DESCRIPTOR.message_types_by_name['QueryFillsRequest'] = _QUERYFILLSREQUEST
DESCRIPTOR.message_types_by_name['QueryFillsResponse'] = _QUERYFILLSRESPONSE
DESCRIPTOR.message_types_by_name['QueryCapitalRequest'] = _QUERYCAPITALREQUEST
DESCRIPTOR.message_types_by_name['QueryCapitalResponse'] = _QUERYCAPITALRESPONSE
DESCRIPTOR.message_types_by_name['QueryPositionRequest'] = _QUERYPOSITIONREQUEST
DESCRIPTOR.message_types_by_name['QueryPositionResponse'] = _QUERYPOSITIONRESPONSE
DESCRIPTOR.message_types_by_name['QueryRationRequest'] = _QUERYRATIONREQUEST
DESCRIPTOR.message_types_by_name['QueryRationResponse'] = _QUERYRATIONRESPONSE
DESCRIPTOR.message_types_by_name['QueryBondStorageRequest'] = _QUERYBONDSTORAGEREQUEST
DESCRIPTOR.message_types_by_name['QueryBondStorageResponse'] = _QUERYBONDSTORAGERESPONSE
DESCRIPTOR.message_types_by_name['QueryPendingRepoRequest'] = _QUERYPENDINGREPOREQUEST
DESCRIPTOR.message_types_by_name['QueryPendingRepoResponse'] = _QUERYPENDINGREPORESPONSE
DESCRIPTOR.message_types_by_name['QueryRepoActualOccupationDayRequest'] = _QUERYREPOACTUALOCCUPATIONDAYREQUEST
DESCRIPTOR.message_types_by_name['QueryRepoActualOccupationDayResponse'] = _QUERYREPOACTUALOCCUPATIONDAYRESPONSE
DESCRIPTOR.message_types_by_name['QueryQuotaRequest'] = _QUERYQUOTAREQUEST
DESCRIPTOR.message_types_by_name['QueryQuotaResponse'] = _QUERYQUOTARESPONSE
DESCRIPTOR.message_types_by_name['QueryVoteInfoRequest'] = _QUERYVOTEINFOREQUEST
DESCRIPTOR.message_types_by_name['QueryVoteInfoResponse'] = _QUERYVOTEINFORESPONSE
DESCRIPTOR.message_types_by_name['QueryDeclareInfoRequest'] = _QUERYDECLAREINFOREQUEST
DESCRIPTOR.message_types_by_name['QueryDeclareInfoResponse'] = _QUERYDECLAREINFORESPONSE
DESCRIPTOR.message_types_by_name['QueryBankServiceRequest'] = _QUERYBANKSERVICEREQUEST
DESCRIPTOR.message_types_by_name['QueryBankServiceResponse'] = _QUERYBANKSERVICERESPONSE
DESCRIPTOR.message_types_by_name['QueryMarginOrdersRequest'] = _QUERYMARGINORDERSREQUEST
DESCRIPTOR.message_types_by_name['QueryMarginOrdersResponse'] = _QUERYMARGINORDERSRESPONSE
DESCRIPTOR.message_types_by_name['QueryMarginFillsRequest'] = _QUERYMARGINFILLSREQUEST
DESCRIPTOR.message_types_by_name['QueryMarginFillsResponse'] = _QUERYMARGINFILLSRESPONSE
DESCRIPTOR.message_types_by_name['QueryMarginCapitalRequest'] = _QUERYMARGINCAPITALREQUEST
DESCRIPTOR.message_types_by_name['QueryMarginCapitalResponse'] = _QUERYMARGINCAPITALRESPONSE
DESCRIPTOR.message_types_by_name['QueryMarginPositionRequest'] = _QUERYMARGINPOSITIONREQUEST
DESCRIPTOR.message_types_by_name['QueryMarginPositionResponse'] = _QUERYMARGINPOSITIONRESPONSE
DESCRIPTOR.message_types_by_name['QueryMarginSecuritiesRequest'] = _QUERYMARGINSECURITIESREQUEST
DESCRIPTOR.message_types_by_name['QueryMarginSecuritiesResponse'] = _QUERYMARGINSECURITIESRESPONSE
DESCRIPTOR.message_types_by_name['QueryMarginCompactRequest'] = _QUERYMARGINCOMPACTREQUEST
DESCRIPTOR.message_types_by_name['QueryMarginCompactResponse'] = _QUERYMARGINCOMPACTRESPONSE
DESCRIPTOR.message_types_by_name['QueryMarginCompactWaterRequest'] = _QUERYMARGINCOMPACTWATERREQUEST
DESCRIPTOR.message_types_by_name['QueryMarginCompactWaterResponse'] = _QUERYMARGINCOMPACTWATERRESPONSE
DESCRIPTOR.message_types_by_name['QueryPagination'] = _QUERYPAGINATION
DESCRIPTOR.message_types_by_name['AlgorithmicSend'] = _ALGORITHMICSEND
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
RequestHeader = _reflection.GeneratedProtocolMessageType('RequestHeader', (_message.Message,), dict(
DESCRIPTOR = _REQUESTHEADER,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.RequestHeader)
))
_sym_db.RegisterMessage(RequestHeader)
ResponseHeader = _reflection.GeneratedProtocolMessageType('ResponseHeader', (_message.Message,), dict(
DESCRIPTOR = _RESPONSEHEADER,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.ResponseHeader)
))
_sym_db.RegisterMessage(ResponseHeader)
ReportHeader = _reflection.GeneratedProtocolMessageType('ReportHeader', (_message.Message,), dict(
DESCRIPTOR = _REPORTHEADER,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.ReportHeader)
))
_sym_db.RegisterMessage(ReportHeader)
LoginAccountRequest = _reflection.GeneratedProtocolMessageType('LoginAccountRequest', (_message.Message,), dict(
DESCRIPTOR = _LOGINACCOUNTREQUEST,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.LoginAccountRequest)
))
_sym_db.RegisterMessage(LoginAccountRequest)
LoginAccountResponse = _reflection.GeneratedProtocolMessageType('LoginAccountResponse', (_message.Message,), dict(
DESCRIPTOR = _LOGINACCOUNTRESPONSE,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.LoginAccountResponse)
))
_sym_db.RegisterMessage(LoginAccountResponse)
LogoutAccountRequest = _reflection.GeneratedProtocolMessageType('LogoutAccountRequest', (_message.Message,), dict(
DESCRIPTOR = _LOGOUTACCOUNTREQUEST,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.LogoutAccountRequest)
))
_sym_db.RegisterMessage(LogoutAccountRequest)
LogoutAccountResponse = _reflection.GeneratedProtocolMessageType('LogoutAccountResponse', (_message.Message,), dict(
DESCRIPTOR = _LOGOUTACCOUNTRESPONSE,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.LogoutAccountResponse)
))
_sym_db.RegisterMessage(LogoutAccountResponse)
PlaceOrder = _reflection.GeneratedProtocolMessageType('PlaceOrder', (_message.Message,), dict(
DESCRIPTOR = _PLACEORDER,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.PlaceOrder)
))
_sym_db.RegisterMessage(PlaceOrder)
CancelOrder = _reflection.GeneratedProtocolMessageType('CancelOrder', (_message.Message,), dict(
DESCRIPTOR = _CANCELORDER,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.CancelOrder)
))
_sym_db.RegisterMessage(CancelOrder)
CancelResponse = _reflection.GeneratedProtocolMessageType('CancelResponse', (_message.Message,), dict(
DESCRIPTOR = _CANCELRESPONSE,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.CancelResponse)
))
_sym_db.RegisterMessage(CancelResponse)
PlaceVote = _reflection.GeneratedProtocolMessageType('PlaceVote', (_message.Message,), dict(
DESCRIPTOR = _PLACEVOTE,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.PlaceVote)
))
_sym_db.RegisterMessage(PlaceVote)
PlaceDeclare = _reflection.GeneratedProtocolMessageType('PlaceDeclare', (_message.Message,), dict(
DESCRIPTOR = _PLACEDECLARE,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.PlaceDeclare)
))
_sym_db.RegisterMessage(PlaceDeclare)
MarginPlaceOrder = _reflection.GeneratedProtocolMessageType('MarginPlaceOrder', (_message.Message,), dict(
DESCRIPTOR = _MARGINPLACEORDER,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.MarginPlaceOrder)
))
_sym_db.RegisterMessage(MarginPlaceOrder)
MarginCancelOrder = _reflection.GeneratedProtocolMessageType('MarginCancelOrder', (_message.Message,), dict(
DESCRIPTOR = _MARGINCANCELORDER,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.MarginCancelOrder)
))
_sym_db.RegisterMessage(MarginCancelOrder)
MarginCancelResponse = _reflection.GeneratedProtocolMessageType('MarginCancelResponse', (_message.Message,), dict(
DESCRIPTOR = _MARGINCANCELRESPONSE,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.MarginCancelResponse)
))
_sym_db.RegisterMessage(MarginCancelResponse)
MarginRepayAmountOrder = _reflection.GeneratedProtocolMessageType('MarginRepayAmountOrder', (_message.Message,), dict(
DESCRIPTOR = _MARGINREPAYAMOUNTORDER,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.MarginRepayAmountOrder)
))
_sym_db.RegisterMessage(MarginRepayAmountOrder)
MarginRepaySecurityOrder = _reflection.GeneratedProtocolMessageType('MarginRepaySecurityOrder', (_message.Message,), dict(
DESCRIPTOR = _MARGINREPAYSECURITYORDER,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.MarginRepaySecurityOrder)
))
_sym_db.RegisterMessage(MarginRepaySecurityOrder)
PlaceBatchOrder = _reflection.GeneratedProtocolMessageType('PlaceBatchOrder', (_message.Message,), dict(
BatchOrderItem = _reflection.GeneratedProtocolMessageType('BatchOrderItem', (_message.Message,), dict(
DESCRIPTOR = _PLACEBATCHORDER_BATCHORDERITEM,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.PlaceBatchOrder.BatchOrderItem)
))
,
DESCRIPTOR = _PLACEBATCHORDER,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.PlaceBatchOrder)
))
_sym_db.RegisterMessage(PlaceBatchOrder)
_sym_db.RegisterMessage(PlaceBatchOrder.BatchOrderItem)
PlaceBatchResponse = _reflection.GeneratedProtocolMessageType('PlaceBatchResponse', (_message.Message,), dict(
DESCRIPTOR = _PLACEBATCHRESPONSE,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.PlaceBatchResponse)
))
_sym_db.RegisterMessage(PlaceBatchResponse)
CancelBatchOrder = _reflection.GeneratedProtocolMessageType('CancelBatchOrder', (_message.Message,), dict(
DESCRIPTOR = _CANCELBATCHORDER,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.CancelBatchOrder)
))
_sym_db.RegisterMessage(CancelBatchOrder)
CancelBatchResponse = _reflection.GeneratedProtocolMessageType('CancelBatchResponse', (_message.Message,), dict(
DESCRIPTOR = _CANCELBATCHRESPONSE,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.CancelBatchResponse)
))
_sym_db.RegisterMessage(CancelBatchResponse)
PlacedReport = _reflection.GeneratedProtocolMessageType('PlacedReport', (_message.Message,), dict(
DESCRIPTOR = _PLACEDREPORT,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.PlacedReport)
))
_sym_db.RegisterMessage(PlacedReport)
FillReport = _reflection.GeneratedProtocolMessageType('FillReport', (_message.Message,), dict(
DESCRIPTOR = _FILLREPORT,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.FillReport)
))
_sym_db.RegisterMessage(FillReport)
CancellationReport = _reflection.GeneratedProtocolMessageType('CancellationReport', (_message.Message,), dict(
DESCRIPTOR = _CANCELLATIONREPORT,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.CancellationReport)
))
_sym_db.RegisterMessage(CancellationReport)
VoteReport = _reflection.GeneratedProtocolMessageType('VoteReport', (_message.Message,), dict(
DESCRIPTOR = _VOTEREPORT,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.VoteReport)
))
_sym_db.RegisterMessage(VoteReport)
DeclareReport = _reflection.GeneratedProtocolMessageType('DeclareReport', (_message.Message,), dict(
DESCRIPTOR = _DECLAREREPORT,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.DeclareReport)
))
_sym_db.RegisterMessage(DeclareReport)
MarginPlacedReport = _reflection.GeneratedProtocolMessageType('MarginPlacedReport', (_message.Message,), dict(
DESCRIPTOR = _MARGINPLACEDREPORT,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.MarginPlacedReport)
))
_sym_db.RegisterMessage(MarginPlacedReport)
MarginRepayAmountReport = _reflection.GeneratedProtocolMessageType('MarginRepayAmountReport', (_message.Message,), dict(
DESCRIPTOR = _MARGINREPAYAMOUNTREPORT,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.MarginRepayAmountReport)
))
_sym_db.RegisterMessage(MarginRepayAmountReport)
MarginRepaySecurityReport = _reflection.GeneratedProtocolMessageType('MarginRepaySecurityReport', (_message.Message,), dict(
DESCRIPTOR = _MARGINREPAYSECURITYREPORT,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.MarginRepaySecurityReport)
))
_sym_db.RegisterMessage(MarginRepaySecurityReport)
MarginFillReport = _reflection.GeneratedProtocolMessageType('MarginFillReport', (_message.Message,), dict(
DESCRIPTOR = _MARGINFILLREPORT,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.MarginFillReport)
))
_sym_db.RegisterMessage(MarginFillReport)
MarginCancellationReport = _reflection.GeneratedProtocolMessageType('MarginCancellationReport', (_message.Message,), dict(
DESCRIPTOR = _MARGINCANCELLATIONREPORT,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.MarginCancellationReport)
))
_sym_db.RegisterMessage(MarginCancellationReport)
QueryOrdersRequest = _reflection.GeneratedProtocolMessageType('QueryOrdersRequest', (_message.Message,), dict(
DESCRIPTOR = _QUERYORDERSREQUEST,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryOrdersRequest)
))
_sym_db.RegisterMessage(QueryOrdersRequest)
QueryOrdersResponse = _reflection.GeneratedProtocolMessageType('QueryOrdersResponse', (_message.Message,), dict(
Order = _reflection.GeneratedProtocolMessageType('Order', (_message.Message,), dict(
DESCRIPTOR = _QUERYORDERSRESPONSE_ORDER,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryOrdersResponse.Order)
))
,
DESCRIPTOR = _QUERYORDERSRESPONSE,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryOrdersResponse)
))
_sym_db.RegisterMessage(QueryOrdersResponse)
_sym_db.RegisterMessage(QueryOrdersResponse.Order)
QueryFillsRequest = _reflection.GeneratedProtocolMessageType('QueryFillsRequest', (_message.Message,), dict(
DESCRIPTOR = _QUERYFILLSREQUEST,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryFillsRequest)
))
_sym_db.RegisterMessage(QueryFillsRequest)
QueryFillsResponse = _reflection.GeneratedProtocolMessageType('QueryFillsResponse', (_message.Message,), dict(
Fill = _reflection.GeneratedProtocolMessageType('Fill', (_message.Message,), dict(
DESCRIPTOR = _QUERYFILLSRESPONSE_FILL,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryFillsResponse.Fill)
))
,
DESCRIPTOR = _QUERYFILLSRESPONSE,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryFillsResponse)
))
_sym_db.RegisterMessage(QueryFillsResponse)
_sym_db.RegisterMessage(QueryFillsResponse.Fill)
QueryCapitalRequest = _reflection.GeneratedProtocolMessageType('QueryCapitalRequest', (_message.Message,), dict(
DESCRIPTOR = _QUERYCAPITALREQUEST,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryCapitalRequest)
))
_sym_db.RegisterMessage(QueryCapitalRequest)
QueryCapitalResponse = _reflection.GeneratedProtocolMessageType('QueryCapitalResponse', (_message.Message,), dict(
DESCRIPTOR = _QUERYCAPITALRESPONSE,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryCapitalResponse)
))
_sym_db.RegisterMessage(QueryCapitalResponse)
QueryPositionRequest = _reflection.GeneratedProtocolMessageType('QueryPositionRequest', (_message.Message,), dict(
DESCRIPTOR = _QUERYPOSITIONREQUEST,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryPositionRequest)
))
_sym_db.RegisterMessage(QueryPositionRequest)
QueryPositionResponse = _reflection.GeneratedProtocolMessageType('QueryPositionResponse', (_message.Message,), dict(
PositionDetail = _reflection.GeneratedProtocolMessageType('PositionDetail', (_message.Message,), dict(
DESCRIPTOR = _QUERYPOSITIONRESPONSE_POSITIONDETAIL,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryPositionResponse.PositionDetail)
))
,
DESCRIPTOR = _QUERYPOSITIONRESPONSE,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryPositionResponse)
))
_sym_db.RegisterMessage(QueryPositionResponse)
_sym_db.RegisterMessage(QueryPositionResponse.PositionDetail)
QueryRationRequest = _reflection.GeneratedProtocolMessageType('QueryRationRequest', (_message.Message,), dict(
DESCRIPTOR = _QUERYRATIONREQUEST,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryRationRequest)
))
_sym_db.RegisterMessage(QueryRationRequest)
QueryRationResponse = _reflection.GeneratedProtocolMessageType('QueryRationResponse', (_message.Message,), dict(
Ration = _reflection.GeneratedProtocolMessageType('Ration', (_message.Message,), dict(
DESCRIPTOR = _QUERYRATIONRESPONSE_RATION,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryRationResponse.Ration)
))
,
DESCRIPTOR = _QUERYRATIONRESPONSE,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryRationResponse)
))
_sym_db.RegisterMessage(QueryRationResponse)
_sym_db.RegisterMessage(QueryRationResponse.Ration)
QueryBondStorageRequest = _reflection.GeneratedProtocolMessageType('QueryBondStorageRequest', (_message.Message,), dict(
DESCRIPTOR = _QUERYBONDSTORAGEREQUEST,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryBondStorageRequest)
))
_sym_db.RegisterMessage(QueryBondStorageRequest)
QueryBondStorageResponse = _reflection.GeneratedProtocolMessageType('QueryBondStorageResponse', (_message.Message,), dict(
BondStorageDetail = _reflection.GeneratedProtocolMessageType('BondStorageDetail', (_message.Message,), dict(
DESCRIPTOR = _QUERYBONDSTORAGERESPONSE_BONDSTORAGEDETAIL,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryBondStorageResponse.BondStorageDetail)
))
,
DESCRIPTOR = _QUERYBONDSTORAGERESPONSE,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryBondStorageResponse)
))
_sym_db.RegisterMessage(QueryBondStorageResponse)
_sym_db.RegisterMessage(QueryBondStorageResponse.BondStorageDetail)
QueryPendingRepoRequest = _reflection.GeneratedProtocolMessageType('QueryPendingRepoRequest', (_message.Message,), dict(
DESCRIPTOR = _QUERYPENDINGREPOREQUEST,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryPendingRepoRequest)
))
_sym_db.RegisterMessage(QueryPendingRepoRequest)
QueryPendingRepoResponse = _reflection.GeneratedProtocolMessageType('QueryPendingRepoResponse', (_message.Message,), dict(
PendingRepoDetail = _reflection.GeneratedProtocolMessageType('PendingRepoDetail', (_message.Message,), dict(
DESCRIPTOR = _QUERYPENDINGREPORESPONSE_PENDINGREPODETAIL,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryPendingRepoResponse.PendingRepoDetail)
))
,
DESCRIPTOR = _QUERYPENDINGREPORESPONSE,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryPendingRepoResponse)
))
_sym_db.RegisterMessage(QueryPendingRepoResponse)
_sym_db.RegisterMessage(QueryPendingRepoResponse.PendingRepoDetail)
QueryRepoActualOccupationDayRequest = _reflection.GeneratedProtocolMessageType('QueryRepoActualOccupationDayRequest', (_message.Message,), dict(
DESCRIPTOR = _QUERYREPOACTUALOCCUPATIONDAYREQUEST,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryRepoActualOccupationDayRequest)
))
_sym_db.RegisterMessage(QueryRepoActualOccupationDayRequest)
QueryRepoActualOccupationDayResponse = _reflection.GeneratedProtocolMessageType('QueryRepoActualOccupationDayResponse', (_message.Message,), dict(
OccupationDay = _reflection.GeneratedProtocolMessageType('OccupationDay', (_message.Message,), dict(
DESCRIPTOR = _QUERYREPOACTUALOCCUPATIONDAYRESPONSE_OCCUPATIONDAY,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryRepoActualOccupationDayResponse.OccupationDay)
))
,
DESCRIPTOR = _QUERYREPOACTUALOCCUPATIONDAYRESPONSE,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryRepoActualOccupationDayResponse)
))
_sym_db.RegisterMessage(QueryRepoActualOccupationDayResponse)
_sym_db.RegisterMessage(QueryRepoActualOccupationDayResponse.OccupationDay)
QueryQuotaRequest = _reflection.GeneratedProtocolMessageType('QueryQuotaRequest', (_message.Message,), dict(
DESCRIPTOR = _QUERYQUOTAREQUEST,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryQuotaRequest)
))
_sym_db.RegisterMessage(QueryQuotaRequest)
QueryQuotaResponse = _reflection.GeneratedProtocolMessageType('QueryQuotaResponse', (_message.Message,), dict(
Quota = _reflection.GeneratedProtocolMessageType('Quota', (_message.Message,), dict(
DESCRIPTOR = _QUERYQUOTARESPONSE_QUOTA,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryQuotaResponse.Quota)
))
,
DESCRIPTOR = _QUERYQUOTARESPONSE,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryQuotaResponse)
))
_sym_db.RegisterMessage(QueryQuotaResponse)
_sym_db.RegisterMessage(QueryQuotaResponse.Quota)
QueryVoteInfoRequest = _reflection.GeneratedProtocolMessageType('QueryVoteInfoRequest', (_message.Message,), dict(
DESCRIPTOR = _QUERYVOTEINFOREQUEST,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryVoteInfoRequest)
))
_sym_db.RegisterMessage(QueryVoteInfoRequest)
QueryVoteInfoResponse = _reflection.GeneratedProtocolMessageType('QueryVoteInfoResponse', (_message.Message,), dict(
VoteInfo = _reflection.GeneratedProtocolMessageType('VoteInfo', (_message.Message,), dict(
DESCRIPTOR = _QUERYVOTEINFORESPONSE_VOTEINFO,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryVoteInfoResponse.VoteInfo)
))
,
DESCRIPTOR = _QUERYVOTEINFORESPONSE,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryVoteInfoResponse)
))
_sym_db.RegisterMessage(QueryVoteInfoResponse)
_sym_db.RegisterMessage(QueryVoteInfoResponse.VoteInfo)
QueryDeclareInfoRequest = _reflection.GeneratedProtocolMessageType('QueryDeclareInfoRequest', (_message.Message,), dict(
DESCRIPTOR = _QUERYDECLAREINFOREQUEST,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryDeclareInfoRequest)
))
_sym_db.RegisterMessage(QueryDeclareInfoRequest)
QueryDeclareInfoResponse = _reflection.GeneratedProtocolMessageType('QueryDeclareInfoResponse', (_message.Message,), dict(
DeclareInfo = _reflection.GeneratedProtocolMessageType('DeclareInfo', (_message.Message,), dict(
DESCRIPTOR = _QUERYDECLAREINFORESPONSE_DECLAREINFO,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryDeclareInfoResponse.DeclareInfo)
))
,
DESCRIPTOR = _QUERYDECLAREINFORESPONSE,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryDeclareInfoResponse)
))
_sym_db.RegisterMessage(QueryDeclareInfoResponse)
_sym_db.RegisterMessage(QueryDeclareInfoResponse.DeclareInfo)
QueryBankServiceRequest = _reflection.GeneratedProtocolMessageType('QueryBankServiceRequest', (_message.Message,), dict(
DESCRIPTOR = _QUERYBANKSERVICEREQUEST,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryBankServiceRequest)
))
_sym_db.RegisterMessage(QueryBankServiceRequest)
QueryBankServiceResponse = _reflection.GeneratedProtocolMessageType('QueryBankServiceResponse', (_message.Message,), dict(
BankServiceInfo = _reflection.GeneratedProtocolMessageType('BankServiceInfo', (_message.Message,), dict(
DESCRIPTOR = _QUERYBANKSERVICERESPONSE_BANKSERVICEINFO,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryBankServiceResponse.BankServiceInfo)
))
,
DESCRIPTOR = _QUERYBANKSERVICERESPONSE,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryBankServiceResponse)
))
_sym_db.RegisterMessage(QueryBankServiceResponse)
_sym_db.RegisterMessage(QueryBankServiceResponse.BankServiceInfo)
QueryMarginOrdersRequest = _reflection.GeneratedProtocolMessageType('QueryMarginOrdersRequest', (_message.Message,), dict(
DESCRIPTOR = _QUERYMARGINORDERSREQUEST,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryMarginOrdersRequest)
))
_sym_db.RegisterMessage(QueryMarginOrdersRequest)
QueryMarginOrdersResponse = _reflection.GeneratedProtocolMessageType('QueryMarginOrdersResponse', (_message.Message,), dict(
Order = _reflection.GeneratedProtocolMessageType('Order', (_message.Message,), dict(
DESCRIPTOR = _QUERYMARGINORDERSRESPONSE_ORDER,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryMarginOrdersResponse.Order)
))
,
DESCRIPTOR = _QUERYMARGINORDERSRESPONSE,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryMarginOrdersResponse)
))
_sym_db.RegisterMessage(QueryMarginOrdersResponse)
_sym_db.RegisterMessage(QueryMarginOrdersResponse.Order)
QueryMarginFillsRequest = _reflection.GeneratedProtocolMessageType('QueryMarginFillsRequest', (_message.Message,), dict(
DESCRIPTOR = _QUERYMARGINFILLSREQUEST,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryMarginFillsRequest)
))
_sym_db.RegisterMessage(QueryMarginFillsRequest)
QueryMarginFillsResponse = _reflection.GeneratedProtocolMessageType('QueryMarginFillsResponse', (_message.Message,), dict(
Fill = _reflection.GeneratedProtocolMessageType('Fill', (_message.Message,), dict(
DESCRIPTOR = _QUERYMARGINFILLSRESPONSE_FILL,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryMarginFillsResponse.Fill)
))
,
DESCRIPTOR = _QUERYMARGINFILLSRESPONSE,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryMarginFillsResponse)
))
_sym_db.RegisterMessage(QueryMarginFillsResponse)
_sym_db.RegisterMessage(QueryMarginFillsResponse.Fill)
QueryMarginCapitalRequest = _reflection.GeneratedProtocolMessageType('QueryMarginCapitalRequest', (_message.Message,), dict(
DESCRIPTOR = _QUERYMARGINCAPITALREQUEST,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryMarginCapitalRequest)
))
_sym_db.RegisterMessage(QueryMarginCapitalRequest)
QueryMarginCapitalResponse = _reflection.GeneratedProtocolMessageType('QueryMarginCapitalResponse', (_message.Message,), dict(
DESCRIPTOR = _QUERYMARGINCAPITALRESPONSE,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryMarginCapitalResponse)
))
_sym_db.RegisterMessage(QueryMarginCapitalResponse)
QueryMarginPositionRequest = _reflection.GeneratedProtocolMessageType('QueryMarginPositionRequest', (_message.Message,), dict(
DESCRIPTOR = _QUERYMARGINPOSITIONREQUEST,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryMarginPositionRequest)
))
_sym_db.RegisterMessage(QueryMarginPositionRequest)
QueryMarginPositionResponse = _reflection.GeneratedProtocolMessageType('QueryMarginPositionResponse', (_message.Message,), dict(
PositionDetail = _reflection.GeneratedProtocolMessageType('PositionDetail', (_message.Message,), dict(
DESCRIPTOR = _QUERYMARGINPOSITIONRESPONSE_POSITIONDETAIL,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryMarginPositionResponse.PositionDetail)
))
,
DESCRIPTOR = _QUERYMARGINPOSITIONRESPONSE,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryMarginPositionResponse)
))
_sym_db.RegisterMessage(QueryMarginPositionResponse)
_sym_db.RegisterMessage(QueryMarginPositionResponse.PositionDetail)
QueryMarginSecuritiesRequest = _reflection.GeneratedProtocolMessageType('QueryMarginSecuritiesRequest', (_message.Message,), dict(
DESCRIPTOR = _QUERYMARGINSECURITIESREQUEST,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryMarginSecuritiesRequest)
))
_sym_db.RegisterMessage(QueryMarginSecuritiesRequest)
QueryMarginSecuritiesResponse = _reflection.GeneratedProtocolMessageType('QueryMarginSecuritiesResponse', (_message.Message,), dict(
SecurityDetail = _reflection.GeneratedProtocolMessageType('SecurityDetail', (_message.Message,), dict(
DESCRIPTOR = _QUERYMARGINSECURITIESRESPONSE_SECURITYDETAIL,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryMarginSecuritiesResponse.SecurityDetail)
))
,
DESCRIPTOR = _QUERYMARGINSECURITIESRESPONSE,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryMarginSecuritiesResponse)
))
_sym_db.RegisterMessage(QueryMarginSecuritiesResponse)
_sym_db.RegisterMessage(QueryMarginSecuritiesResponse.SecurityDetail)
QueryMarginCompactRequest = _reflection.GeneratedProtocolMessageType('QueryMarginCompactRequest', (_message.Message,), dict(
DESCRIPTOR = _QUERYMARGINCOMPACTREQUEST,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryMarginCompactRequest)
))
_sym_db.RegisterMessage(QueryMarginCompactRequest)
QueryMarginCompactResponse = _reflection.GeneratedProtocolMessageType('QueryMarginCompactResponse', (_message.Message,), dict(
CompactDetail = _reflection.GeneratedProtocolMessageType('CompactDetail', (_message.Message,), dict(
DESCRIPTOR = _QUERYMARGINCOMPACTRESPONSE_COMPACTDETAIL,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryMarginCompactResponse.CompactDetail)
))
,
DESCRIPTOR = _QUERYMARGINCOMPACTRESPONSE,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryMarginCompactResponse)
))
_sym_db.RegisterMessage(QueryMarginCompactResponse)
_sym_db.RegisterMessage(QueryMarginCompactResponse.CompactDetail)
QueryMarginCompactWaterRequest = _reflection.GeneratedProtocolMessageType('QueryMarginCompactWaterRequest', (_message.Message,), dict(
DESCRIPTOR = _QUERYMARGINCOMPACTWATERREQUEST,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryMarginCompactWaterRequest)
))
_sym_db.RegisterMessage(QueryMarginCompactWaterRequest)
QueryMarginCompactWaterResponse = _reflection.GeneratedProtocolMessageType('QueryMarginCompactWaterResponse', (_message.Message,), dict(
WaterDetail = _reflection.GeneratedProtocolMessageType('WaterDetail', (_message.Message,), dict(
DESCRIPTOR = _QUERYMARGINCOMPACTWATERRESPONSE_WATERDETAIL,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryMarginCompactWaterResponse.WaterDetail)
))
,
DESCRIPTOR = _QUERYMARGINCOMPACTWATERRESPONSE,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryMarginCompactWaterResponse)
))
_sym_db.RegisterMessage(QueryMarginCompactWaterResponse)
_sym_db.RegisterMessage(QueryMarginCompactWaterResponse.WaterDetail)
QueryPagination = _reflection.GeneratedProtocolMessageType('QueryPagination', (_message.Message,), dict(
DESCRIPTOR = _QUERYPAGINATION,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.QueryPagination)
))
_sym_db.RegisterMessage(QueryPagination)
AlgorithmicSend = _reflection.GeneratedProtocolMessageType('AlgorithmicSend', (_message.Message,), dict(
DESCRIPTOR = _ALGORITHMICSEND,
__module__ = 'dtp.api_pb2'
# @@protoc_insertion_point(class_scope:dwjk.dtp.AlgorithmicSend)
))
_sym_db.RegisterMessage(AlgorithmicSend)
# @@protoc_insertion_point(module_scope) | yunchi | /pb/api_pb2.py | api_pb2.py |
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='Quotation.proto',
package='quotation.stock',
syntax='proto3',
serialized_pb=_b('\n\x0fQuotation.proto\x12\x0fquotation.stock\"\xa9\n\n\x05Stock\x12\x12\n\nszWindCode\x18\x01 \x01(\t\x12\x0e\n\x06szCode\x18\x02 \x01(\t\x12\x12\n\nnActionDay\x18\x03 \x01(\x05\x12\x13\n\x0bnTradingDay\x18\x04 \x01(\x05\x12\r\n\x05nTime\x18\x05 \x01(\x05\x12\x0f\n\x07nStatus\x18\x06 \x01(\x05\x12\x11\n\tnPreClose\x18\x07 \x01(\x03\x12\r\n\x05nOpen\x18\x08 \x01(\x03\x12\r\n\x05nHigh\x18\t \x01(\x03\x12\x0c\n\x04nLow\x18\n \x01(\x03\x12\x0e\n\x06nMatch\x18\x0b \x01(\x03\x12\x13\n\x0bnAskPrice_0\x18\x0c \x01(\x03\x12\x13\n\x0bnAskPrice_1\x18\r \x01(\x03\x12\x13\n\x0bnAskPrice_2\x18\x0e \x01(\x03\x12\x13\n\x0bnAskPrice_3\x18\x0f \x01(\x03\x12\x13\n\x0bnAskPrice_4\x18\x10 \x01(\x03\x12\x13\n\x0bnAskPrice_5\x18\x11 \x01(\x03\x12\x13\n\x0bnAskPrice_6\x18\x12 \x01(\x03\x12\x13\n\x0bnAskPrice_7\x18\x13 \x01(\x03\x12\x13\n\x0bnAskPrice_8\x18\x14 \x01(\x03\x12\x13\n\x0bnAskPrice_9\x18\x15 \x01(\x03\x12\x11\n\tnAskVol_0\x18\x16 \x01(\x03\x12\x11\n\tnAskVol_1\x18\x17 \x01(\x03\x12\x11\n\tnAskVol_2\x18\x18 \x01(\x03\x12\x11\n\tnAskVol_3\x18\x19 \x01(\x03\x12\x11\n\tnAskVol_4\x18\x1a \x01(\x03\x12\x11\n\tnAskVol_5\x18\x1b \x01(\x03\x12\x11\n\tnAskVol_6\x18\x1c \x01(\x03\x12\x11\n\tnAskVol_7\x18\x1d \x01(\x03\x12\x11\n\tnAskVol_8\x18\x1e \x01(\x03\x12\x11\n\tnAskVol_9\x18\x1f \x01(\x03\x12\x13\n\x0bnBidPrice_0\x18 \x01(\x03\x12\x13\n\x0bnBidPrice_1\x18! \x01(\x03\x12\x13\n\x0bnBidPrice_2\x18\" \x01(\x03\x12\x13\n\x0bnBidPrice_3\x18# \x01(\x03\x12\x13\n\x0bnBidPrice_4\x18$ \x01(\x03\x12\x13\n\x0bnBidPrice_5\x18% \x01(\x03\x12\x13\n\x0bnBidPrice_6\x18& \x01(\x03\x12\x13\n\x0bnBidPrice_7\x18\' \x01(\x03\x12\x13\n\x0bnBidPrice_8\x18( \x01(\x03\x12\x13\n\x0bnBidPrice_9\x18) \x01(\x03\x12\x11\n\tnBidVol_0\x18* \x01(\x03\x12\x11\n\tnBidVol_1\x18+ \x01(\x03\x12\x11\n\tnBidVol_2\x18, \x01(\x03\x12\x11\n\tnBidVol_3\x18- \x01(\x03\x12\x11\n\tnBidVol_4\x18. \x01(\x03\x12\x11\n\tnBidVol_5\x18/ \x01(\x03\x12\x11\n\tnBidVol_6\x18\x30 \x01(\x03\x12\x11\n\tnBidVol_7\x18\x31 \x01(\x03\x12\x11\n\tnBidVol_8\x18\x32 \x01(\x03\x12\x11\n\tnBidVol_9\x18\x33 \x01(\x03\x12\x12\n\nnNumTrades\x18\x34 \x01(\x05\x12\x0f\n\x07iVolume\x18\x35 \x01(\x03\x12\x11\n\tiTurnover\x18\x36 \x01(\x03\x12\x14\n\x0cnTotalBidVol\x18\x37 \x01(\x03\x12\x14\n\x0cnTotalAskVol\x18\x38 \x01(\x03\x12\x1c\n\x14nWeightedAvgBidPrice\x18\x39 \x01(\x03\x12\x1c\n\x14nWeightedAvgAskPrice\x18: \x01(\x03\x12\r\n\x05nIOPV\x18; \x01(\x05\x12\x18\n\x10nYieldToMaturity\x18< \x01(\x05\x12\x14\n\x0cnHighLimited\x18= \x01(\x03\x12\x13\n\x0bnLowLimited\x18> \x01(\x03\x12\x10\n\x08\x63hPrefix\x18? \x01(\t\x12\r\n\x05nSyl1\x18@ \x01(\x05\x12\r\n\x05nSyl2\x18\x41 \x01(\x05\x12\x0c\n\x04nSD2\x18\x42 \x01(\x05\x12\x12\n\nnLocalTime\x18\x43 \x01(\x05\"\xb2\x06\n\x06\x46uture\x12\x0c\n\x04\x63ode\x18\x01 \x01(\t\x12\x10\n\x08\x65xchange\x18\x02 \x01(\t\x12\x10\n\x08tradeDay\x18\x03 \x01(\t\x12\x12\n\nupdateTime\x18\x04 \x01(\t\x12\x19\n\x11updateMilliSecond\x18\x05 \x01(\x05\x12\x11\n\tlastPrice\x18\x06 \x01(\x01\x12\x0e\n\x06volume\x18\x07 \x01(\x05\x12\x11\n\topenPrice\x18\x08 \x01(\x01\x12\x14\n\x0chighestPrice\x18\t \x01(\x01\x12\x13\n\x0blowestPrice\x18\n \x01(\x01\x12\x12\n\nclosePrice\x18\x0b \x01(\x01\x12\x17\n\x0fsuttlementPrice\x18\x0c \x01(\x01\x12\x14\n\x0climitUpPrice\x18\r \x01(\x01\x12\x16\n\x0elimitDownPrice\x18\x0e \x01(\x01\x12\x10\n\x08turnover\x18\x0f \x01(\x01\x12\x14\n\x0copenInterest\x18\x10 \x01(\x03\x12\x15\n\rpreClosePrice\x18\x11 \x01(\x01\x12\x1a\n\x12preSettlementPrice\x18\x12 \x01(\x01\x12\x17\n\x0fpreOpenInterest\x18\x13 \x01(\x03\x12\x11\n\tactionDay\x18\x14 \x01(\t\x12\x11\n\taskPrice1\x18\x15 \x01(\x01\x12\x11\n\taskPrice2\x18\x16 \x01(\x01\x12\x11\n\taskPrice3\x18\x17 \x01(\x01\x12\x11\n\taskPrice4\x18\x18 \x01(\x01\x12\x11\n\taskPrice5\x18\x19 \x01(\x01\x12\x12\n\naskVolume1\x18\x1a \x01(\x05\x12\x12\n\naskVolume2\x18\x1b \x01(\x05\x12\x12\n\naskVolume3\x18\x1c \x01(\x05\x12\x12\n\naskVolume4\x18\x1d \x01(\x05\x12\x12\n\naskVolume5\x18\x1e \x01(\x05\x12\x11\n\tbidPrice1\x18\x1f \x01(\x01\x12\x11\n\tbidPrice2\x18 \x01(\x01\x12\x11\n\tbidPrice3\x18! \x01(\x01\x12\x11\n\tbidPrice4\x18\" \x01(\x01\x12\x11\n\tbidPrice5\x18# \x01(\x01\x12\x12\n\nbidVolume1\x18$ \x01(\x05\x12\x12\n\nbidVolume2\x18% \x01(\x05\x12\x12\n\nbidVolume3\x18& \x01(\x05\x12\x12\n\nbidVolume4\x18\' \x01(\x05\x12\x12\n\nbidVolume5\x18( \x01(\x05\"\xb2\x06\n\x06Option\x12\x0c\n\x04\x63ode\x18\x01 \x01(\t\x12\x10\n\x08\x65xchange\x18\x02 \x01(\t\x12\x10\n\x08tradeDay\x18\x03 \x01(\t\x12\x12\n\nupdateTime\x18\x04 \x01(\t\x12\x19\n\x11updateMilliSecond\x18\x05 \x01(\x05\x12\x11\n\tlastPrice\x18\x06 \x01(\x01\x12\x0e\n\x06volume\x18\x07 \x01(\x05\x12\x11\n\topenPrice\x18\x08 \x01(\x01\x12\x14\n\x0chighestPrice\x18\t \x01(\x01\x12\x13\n\x0blowestPrice\x18\n \x01(\x01\x12\x12\n\nclosePrice\x18\x0b \x01(\x01\x12\x17\n\x0fsuttlementPrice\x18\x0c \x01(\x01\x12\x14\n\x0climitUpPrice\x18\r \x01(\x01\x12\x16\n\x0elimitDownPrice\x18\x0e \x01(\x01\x12\x10\n\x08turnover\x18\x0f \x01(\x01\x12\x14\n\x0copenInterest\x18\x10 \x01(\x03\x12\x15\n\rpreClosePrice\x18\x11 \x01(\x01\x12\x1a\n\x12preSettlementPrice\x18\x12 \x01(\x01\x12\x17\n\x0fpreOpenInterest\x18\x13 \x01(\x03\x12\x11\n\tactionDay\x18\x14 \x01(\t\x12\x11\n\taskPrice1\x18\x15 \x01(\x01\x12\x11\n\taskPrice2\x18\x16 \x01(\x01\x12\x11\n\taskPrice3\x18\x17 \x01(\x01\x12\x11\n\taskPrice4\x18\x18 \x01(\x01\x12\x11\n\taskPrice5\x18\x19 \x01(\x01\x12\x12\n\naskVolume1\x18\x1a \x01(\x05\x12\x12\n\naskVolume2\x18\x1b \x01(\x05\x12\x12\n\naskVolume3\x18\x1c \x01(\x05\x12\x12\n\naskVolume4\x18\x1d \x01(\x05\x12\x12\n\naskVolume5\x18\x1e \x01(\x05\x12\x11\n\tbidPrice1\x18\x1f \x01(\x01\x12\x11\n\tbidPrice2\x18 \x01(\x01\x12\x11\n\tbidPrice3\x18! \x01(\x01\x12\x11\n\tbidPrice4\x18\" \x01(\x01\x12\x11\n\tbidPrice5\x18# \x01(\x01\x12\x12\n\nbidVolume1\x18$ \x01(\x05\x12\x12\n\nbidVolume2\x18% \x01(\x05\x12\x12\n\nbidVolume3\x18& \x01(\x05\x12\x12\n\nbidVolume4\x18\' \x01(\x05\x12\x12\n\nbidVolume5\x18( \x01(\x05\"\x87\x02\n\x05Index\x12\x12\n\nszWindCode\x18\x01 \x01(\t\x12\x0e\n\x06szCode\x18\x02 \x01(\t\x12\x12\n\nnActionDay\x18\x03 \x01(\x05\x12\x13\n\x0bnTradingDay\x18\x04 \x01(\x05\x12\r\n\x05nTime\x18\x05 \x01(\x05\x12\x12\n\nnOpenIndex\x18\x06 \x01(\x03\x12\x12\n\nnHighIndex\x18\x07 \x01(\x03\x12\x11\n\tnLowIndex\x18\x08 \x01(\x03\x12\x12\n\nnLastIndex\x18\t \x01(\x03\x12\x14\n\x0ciTotalVolume\x18\n \x01(\x03\x12\x11\n\tiTurnover\x18\x0b \x01(\x03\x12\x16\n\x0enPreCloseIndex\x18\x0c \x01(\x03\x12\x12\n\nnLocalTime\x18\r \x01(\x05\"\x90\x02\n\x0bTransaction\x12\x12\n\nszWindCode\x18\x01 \x01(\t\x12\x0e\n\x06szCode\x18\x02 \x01(\t\x12\x12\n\nnActionDay\x18\x03 \x01(\x05\x12\r\n\x05nTime\x18\x04 \x01(\x05\x12\x0e\n\x06nIndex\x18\x05 \x01(\x05\x12\x0e\n\x06nPrice\x18\x06 \x01(\x03\x12\x0f\n\x07nVolume\x18\x07 \x01(\x05\x12\x11\n\tnTurnover\x18\x08 \x01(\x03\x12\x0f\n\x07nBSFlag\x18\t \x01(\x05\x12\x13\n\x0b\x63hOrderKind\x18\n \x01(\x0c\x12\x16\n\x0e\x63hFunctionCode\x18\x0b \x01(\t\x12\x11\n\tnAskOrder\x18\x0c \x01(\x05\x12\x11\n\tnBidOrder\x18\r \x01(\x05\x12\x12\n\nnLocalTime\x18\x0e \x01(\x05\"\xf3\x01\n\x05Order\x12\x12\n\nszWindCode\x18\x01 \x01(\t\x12\x0e\n\x06szCode\x18\x02 \x01(\t\x12\x12\n\nnActionDay\x18\x03 \x01(\x05\x12\r\n\x05nTime\x18\x04 \x01(\x05\x12\x0e\n\x06nOrder\x18\x05 \x01(\x05\x12\x0e\n\x06nPrice\x18\x06 \x01(\x03\x12\x0f\n\x07nVolume\x18\x07 \x01(\x05\x12\x13\n\x0b\x63hOrderKind\x18\x08 \x01(\x0c\x12\x16\n\x0e\x63hFunctionCode\x18\t \x01(\t\x12\x0f\n\x07nBroker\x18\n \x01(\x05\x12\x10\n\x08\x63hStatus\x18\x0b \x01(\t\x12\x0e\n\x06\x63hFlag\x18\x0c \x01(\t\x12\x12\n\nnLocalTime\x18\r \x01(\x05\"\xbc\x01\n\nOrderQueue\x12\x12\n\nszWindCode\x18\x01 \x01(\t\x12\x0e\n\x06szCode\x18\x02 \x01(\t\x12\x12\n\nnActionDay\x18\x03 \x01(\x05\x12\r\n\x05nTime\x18\x04 \x01(\x05\x12\r\n\x05nSide\x18\x05 \x01(\x05\x12\x0e\n\x06nPrice\x18\x06 \x01(\x03\x12\x0f\n\x07nOrders\x18\x07 \x01(\x05\x12\x10\n\x08nABItems\x18\x08 \x01(\x05\x12\x11\n\tnABVolume\x18\t \x03(\x05\x12\x12\n\nnLocalTime\x18\n \x01(\x05\"3\n\tStockList\x12&\n\x06stocks\x18\x01 \x03(\x0b\x32\x16.quotation.stock.Stock\"\xd6\x01\n\x05Kline\x12\x0c\n\x04\x63ode\x18\x01 \x01(\t\x12\x0e\n\x06symbol\x18\x02 \x01(\t\x12\x0c\n\x04open\x18\x03 \x01(\x01\x12\x0c\n\x04high\x18\x04 \x01(\x01\x12\x0b\n\x03low\x18\x05 \x01(\x01\x12\r\n\x05\x63lose\x18\x06 \x01(\x01\x12\x0e\n\x06\x61mount\x18\x07 \x01(\x01\x12\x0b\n\x03vol\x18\x08 \x01(\x01\x12\x0e\n\x06\x63hange\x18\t \x01(\x01\x12\x11\n\tpre_close\x18\n \x01(\x01\x12\x12\n\ntrade_date\x18\x0b \x01(\x05\x12\x12\n\ntrade_time\x18\x0c \x01(\t\x12\x0f\n\x07pct_chg\x18\r \x01(\x01\"3\n\tKlineList\x12&\n\x06klines\x18\x01 \x03(\x0b\x32\x16.quotation.stock.Kline\"\xd5\x03\n\nMarketData\x12\'\n\x05stock\x18\x01 \x01(\x0b\x32\x16.quotation.stock.StockH\x00\x12)\n\x06\x66uture\x18\x02 \x01(\x0b\x32\x17.quotation.stock.FutureH\x00\x12\'\n\x05index\x18\x03 \x01(\x0b\x32\x16.quotation.stock.IndexH\x00\x12\x33\n\x0btransaction\x18\x04 \x01(\x0b\x32\x1c.quotation.stock.TransactionH\x00\x12\'\n\x05order\x18\x05 \x01(\x0b\x32\x16.quotation.stock.OrderH\x00\x12,\n\x05queue\x18\x06 \x01(\x0b\x32\x1b.quotation.stock.OrderQueueH\x00\x12)\n\x06option\x18\x07 \x01(\x0b\x32\x17.quotation.stock.OptionH\x00\x12.\n\x04type\x18\x08 \x01(\x0e\x32 .quotation.stock.MarketData.Type\"[\n\x04Type\x12\t\n\x05STOCK\x10\x00\x12\t\n\x05INDEX\x10\x01\x12\n\n\x06\x46UTURE\x10\x02\x12\x0f\n\x0bTRANSACTION\x10\x03\x12\t\n\x05ORDER\x10\x04\x12\t\n\x05QUEUE\x10\x05\x12\n\n\x06OPTION\x10\x06\x42\x06\n\x04\x64\x61tab\x06proto3')
)
_MARKETDATA_TYPE = _descriptor.EnumDescriptor(
name='Type',
full_name='quotation.stock.MarketData.Type',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='STOCK', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INDEX', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FUTURE', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TRANSACTION', index=3, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ORDER', index=4, number=4,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='QUEUE', index=5, number=5,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='OPTION', index=6, number=6,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=4674,
serialized_end=4765,
)
_sym_db.RegisterEnumDescriptor(_MARKETDATA_TYPE)
_STOCK = _descriptor.Descriptor(
name='Stock',
full_name='quotation.stock.Stock',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='szWindCode', full_name='quotation.stock.Stock.szWindCode', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='szCode', full_name='quotation.stock.Stock.szCode', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nActionDay', full_name='quotation.stock.Stock.nActionDay', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nTradingDay', full_name='quotation.stock.Stock.nTradingDay', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nTime', full_name='quotation.stock.Stock.nTime', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nStatus', full_name='quotation.stock.Stock.nStatus', index=5,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nPreClose', full_name='quotation.stock.Stock.nPreClose', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nOpen', full_name='quotation.stock.Stock.nOpen', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nHigh', full_name='quotation.stock.Stock.nHigh', index=8,
number=9, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nLow', full_name='quotation.stock.Stock.nLow', index=9,
number=10, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nMatch', full_name='quotation.stock.Stock.nMatch', index=10,
number=11, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nAskPrice_0', full_name='quotation.stock.Stock.nAskPrice_0', index=11,
number=12, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nAskPrice_1', full_name='quotation.stock.Stock.nAskPrice_1', index=12,
number=13, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nAskPrice_2', full_name='quotation.stock.Stock.nAskPrice_2', index=13,
number=14, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nAskPrice_3', full_name='quotation.stock.Stock.nAskPrice_3', index=14,
number=15, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nAskPrice_4', full_name='quotation.stock.Stock.nAskPrice_4', index=15,
number=16, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nAskPrice_5', full_name='quotation.stock.Stock.nAskPrice_5', index=16,
number=17, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nAskPrice_6', full_name='quotation.stock.Stock.nAskPrice_6', index=17,
number=18, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nAskPrice_7', full_name='quotation.stock.Stock.nAskPrice_7', index=18,
number=19, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nAskPrice_8', full_name='quotation.stock.Stock.nAskPrice_8', index=19,
number=20, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nAskPrice_9', full_name='quotation.stock.Stock.nAskPrice_9', index=20,
number=21, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nAskVol_0', full_name='quotation.stock.Stock.nAskVol_0', index=21,
number=22, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nAskVol_1', full_name='quotation.stock.Stock.nAskVol_1', index=22,
number=23, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nAskVol_2', full_name='quotation.stock.Stock.nAskVol_2', index=23,
number=24, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nAskVol_3', full_name='quotation.stock.Stock.nAskVol_3', index=24,
number=25, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nAskVol_4', full_name='quotation.stock.Stock.nAskVol_4', index=25,
number=26, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nAskVol_5', full_name='quotation.stock.Stock.nAskVol_5', index=26,
number=27, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nAskVol_6', full_name='quotation.stock.Stock.nAskVol_6', index=27,
number=28, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nAskVol_7', full_name='quotation.stock.Stock.nAskVol_7', index=28,
number=29, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nAskVol_8', full_name='quotation.stock.Stock.nAskVol_8', index=29,
number=30, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nAskVol_9', full_name='quotation.stock.Stock.nAskVol_9', index=30,
number=31, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nBidPrice_0', full_name='quotation.stock.Stock.nBidPrice_0', index=31,
number=32, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nBidPrice_1', full_name='quotation.stock.Stock.nBidPrice_1', index=32,
number=33, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nBidPrice_2', full_name='quotation.stock.Stock.nBidPrice_2', index=33,
number=34, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nBidPrice_3', full_name='quotation.stock.Stock.nBidPrice_3', index=34,
number=35, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nBidPrice_4', full_name='quotation.stock.Stock.nBidPrice_4', index=35,
number=36, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nBidPrice_5', full_name='quotation.stock.Stock.nBidPrice_5', index=36,
number=37, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nBidPrice_6', full_name='quotation.stock.Stock.nBidPrice_6', index=37,
number=38, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nBidPrice_7', full_name='quotation.stock.Stock.nBidPrice_7', index=38,
number=39, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nBidPrice_8', full_name='quotation.stock.Stock.nBidPrice_8', index=39,
number=40, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nBidPrice_9', full_name='quotation.stock.Stock.nBidPrice_9', index=40,
number=41, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nBidVol_0', full_name='quotation.stock.Stock.nBidVol_0', index=41,
number=42, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nBidVol_1', full_name='quotation.stock.Stock.nBidVol_1', index=42,
number=43, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nBidVol_2', full_name='quotation.stock.Stock.nBidVol_2', index=43,
number=44, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nBidVol_3', full_name='quotation.stock.Stock.nBidVol_3', index=44,
number=45, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nBidVol_4', full_name='quotation.stock.Stock.nBidVol_4', index=45,
number=46, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nBidVol_5', full_name='quotation.stock.Stock.nBidVol_5', index=46,
number=47, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nBidVol_6', full_name='quotation.stock.Stock.nBidVol_6', index=47,
number=48, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nBidVol_7', full_name='quotation.stock.Stock.nBidVol_7', index=48,
number=49, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nBidVol_8', full_name='quotation.stock.Stock.nBidVol_8', index=49,
number=50, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nBidVol_9', full_name='quotation.stock.Stock.nBidVol_9', index=50,
number=51, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nNumTrades', full_name='quotation.stock.Stock.nNumTrades', index=51,
number=52, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='iVolume', full_name='quotation.stock.Stock.iVolume', index=52,
number=53, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='iTurnover', full_name='quotation.stock.Stock.iTurnover', index=53,
number=54, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nTotalBidVol', full_name='quotation.stock.Stock.nTotalBidVol', index=54,
number=55, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nTotalAskVol', full_name='quotation.stock.Stock.nTotalAskVol', index=55,
number=56, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nWeightedAvgBidPrice', full_name='quotation.stock.Stock.nWeightedAvgBidPrice', index=56,
number=57, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nWeightedAvgAskPrice', full_name='quotation.stock.Stock.nWeightedAvgAskPrice', index=57,
number=58, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nIOPV', full_name='quotation.stock.Stock.nIOPV', index=58,
number=59, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nYieldToMaturity', full_name='quotation.stock.Stock.nYieldToMaturity', index=59,
number=60, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nHighLimited', full_name='quotation.stock.Stock.nHighLimited', index=60,
number=61, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nLowLimited', full_name='quotation.stock.Stock.nLowLimited', index=61,
number=62, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chPrefix', full_name='quotation.stock.Stock.chPrefix', index=62,
number=63, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nSyl1', full_name='quotation.stock.Stock.nSyl1', index=63,
number=64, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nSyl2', full_name='quotation.stock.Stock.nSyl2', index=64,
number=65, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nSD2', full_name='quotation.stock.Stock.nSD2', index=65,
number=66, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nLocalTime', full_name='quotation.stock.Stock.nLocalTime', index=66,
number=67, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=37,
serialized_end=1358,
)
_FUTURE = _descriptor.Descriptor(
name='Future',
full_name='quotation.stock.Future',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='quotation.stock.Future.code', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='quotation.stock.Future.exchange', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tradeDay', full_name='quotation.stock.Future.tradeDay', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='updateTime', full_name='quotation.stock.Future.updateTime', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='updateMilliSecond', full_name='quotation.stock.Future.updateMilliSecond', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='lastPrice', full_name='quotation.stock.Future.lastPrice', index=5,
number=6, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='volume', full_name='quotation.stock.Future.volume', index=6,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='openPrice', full_name='quotation.stock.Future.openPrice', index=7,
number=8, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='highestPrice', full_name='quotation.stock.Future.highestPrice', index=8,
number=9, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='lowestPrice', full_name='quotation.stock.Future.lowestPrice', index=9,
number=10, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='closePrice', full_name='quotation.stock.Future.closePrice', index=10,
number=11, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='suttlementPrice', full_name='quotation.stock.Future.suttlementPrice', index=11,
number=12, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='limitUpPrice', full_name='quotation.stock.Future.limitUpPrice', index=12,
number=13, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='limitDownPrice', full_name='quotation.stock.Future.limitDownPrice', index=13,
number=14, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='turnover', full_name='quotation.stock.Future.turnover', index=14,
number=15, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='openInterest', full_name='quotation.stock.Future.openInterest', index=15,
number=16, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='preClosePrice', full_name='quotation.stock.Future.preClosePrice', index=16,
number=17, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='preSettlementPrice', full_name='quotation.stock.Future.preSettlementPrice', index=17,
number=18, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='preOpenInterest', full_name='quotation.stock.Future.preOpenInterest', index=18,
number=19, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='actionDay', full_name='quotation.stock.Future.actionDay', index=19,
number=20, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='askPrice1', full_name='quotation.stock.Future.askPrice1', index=20,
number=21, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='askPrice2', full_name='quotation.stock.Future.askPrice2', index=21,
number=22, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='askPrice3', full_name='quotation.stock.Future.askPrice3', index=22,
number=23, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='askPrice4', full_name='quotation.stock.Future.askPrice4', index=23,
number=24, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='askPrice5', full_name='quotation.stock.Future.askPrice5', index=24,
number=25, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='askVolume1', full_name='quotation.stock.Future.askVolume1', index=25,
number=26, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='askVolume2', full_name='quotation.stock.Future.askVolume2', index=26,
number=27, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='askVolume3', full_name='quotation.stock.Future.askVolume3', index=27,
number=28, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='askVolume4', full_name='quotation.stock.Future.askVolume4', index=28,
number=29, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='askVolume5', full_name='quotation.stock.Future.askVolume5', index=29,
number=30, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bidPrice1', full_name='quotation.stock.Future.bidPrice1', index=30,
number=31, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bidPrice2', full_name='quotation.stock.Future.bidPrice2', index=31,
number=32, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bidPrice3', full_name='quotation.stock.Future.bidPrice3', index=32,
number=33, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bidPrice4', full_name='quotation.stock.Future.bidPrice4', index=33,
number=34, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bidPrice5', full_name='quotation.stock.Future.bidPrice5', index=34,
number=35, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bidVolume1', full_name='quotation.stock.Future.bidVolume1', index=35,
number=36, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bidVolume2', full_name='quotation.stock.Future.bidVolume2', index=36,
number=37, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bidVolume3', full_name='quotation.stock.Future.bidVolume3', index=37,
number=38, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bidVolume4', full_name='quotation.stock.Future.bidVolume4', index=38,
number=39, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bidVolume5', full_name='quotation.stock.Future.bidVolume5', index=39,
number=40, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1361,
serialized_end=2179,
)
_OPTION = _descriptor.Descriptor(
name='Option',
full_name='quotation.stock.Option',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='quotation.stock.Option.code', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exchange', full_name='quotation.stock.Option.exchange', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tradeDay', full_name='quotation.stock.Option.tradeDay', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='updateTime', full_name='quotation.stock.Option.updateTime', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='updateMilliSecond', full_name='quotation.stock.Option.updateMilliSecond', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='lastPrice', full_name='quotation.stock.Option.lastPrice', index=5,
number=6, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='volume', full_name='quotation.stock.Option.volume', index=6,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='openPrice', full_name='quotation.stock.Option.openPrice', index=7,
number=8, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='highestPrice', full_name='quotation.stock.Option.highestPrice', index=8,
number=9, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='lowestPrice', full_name='quotation.stock.Option.lowestPrice', index=9,
number=10, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='closePrice', full_name='quotation.stock.Option.closePrice', index=10,
number=11, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='suttlementPrice', full_name='quotation.stock.Option.suttlementPrice', index=11,
number=12, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='limitUpPrice', full_name='quotation.stock.Option.limitUpPrice', index=12,
number=13, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='limitDownPrice', full_name='quotation.stock.Option.limitDownPrice', index=13,
number=14, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='turnover', full_name='quotation.stock.Option.turnover', index=14,
number=15, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='openInterest', full_name='quotation.stock.Option.openInterest', index=15,
number=16, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='preClosePrice', full_name='quotation.stock.Option.preClosePrice', index=16,
number=17, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='preSettlementPrice', full_name='quotation.stock.Option.preSettlementPrice', index=17,
number=18, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='preOpenInterest', full_name='quotation.stock.Option.preOpenInterest', index=18,
number=19, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='actionDay', full_name='quotation.stock.Option.actionDay', index=19,
number=20, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='askPrice1', full_name='quotation.stock.Option.askPrice1', index=20,
number=21, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='askPrice2', full_name='quotation.stock.Option.askPrice2', index=21,
number=22, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='askPrice3', full_name='quotation.stock.Option.askPrice3', index=22,
number=23, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='askPrice4', full_name='quotation.stock.Option.askPrice4', index=23,
number=24, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='askPrice5', full_name='quotation.stock.Option.askPrice5', index=24,
number=25, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='askVolume1', full_name='quotation.stock.Option.askVolume1', index=25,
number=26, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='askVolume2', full_name='quotation.stock.Option.askVolume2', index=26,
number=27, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='askVolume3', full_name='quotation.stock.Option.askVolume3', index=27,
number=28, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='askVolume4', full_name='quotation.stock.Option.askVolume4', index=28,
number=29, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='askVolume5', full_name='quotation.stock.Option.askVolume5', index=29,
number=30, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bidPrice1', full_name='quotation.stock.Option.bidPrice1', index=30,
number=31, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bidPrice2', full_name='quotation.stock.Option.bidPrice2', index=31,
number=32, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bidPrice3', full_name='quotation.stock.Option.bidPrice3', index=32,
number=33, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bidPrice4', full_name='quotation.stock.Option.bidPrice4', index=33,
number=34, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bidPrice5', full_name='quotation.stock.Option.bidPrice5', index=34,
number=35, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bidVolume1', full_name='quotation.stock.Option.bidVolume1', index=35,
number=36, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bidVolume2', full_name='quotation.stock.Option.bidVolume2', index=36,
number=37, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bidVolume3', full_name='quotation.stock.Option.bidVolume3', index=37,
number=38, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bidVolume4', full_name='quotation.stock.Option.bidVolume4', index=38,
number=39, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bidVolume5', full_name='quotation.stock.Option.bidVolume5', index=39,
number=40, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2182,
serialized_end=3000,
)
_INDEX = _descriptor.Descriptor(
name='Index',
full_name='quotation.stock.Index',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='szWindCode', full_name='quotation.stock.Index.szWindCode', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='szCode', full_name='quotation.stock.Index.szCode', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nActionDay', full_name='quotation.stock.Index.nActionDay', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nTradingDay', full_name='quotation.stock.Index.nTradingDay', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nTime', full_name='quotation.stock.Index.nTime', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nOpenIndex', full_name='quotation.stock.Index.nOpenIndex', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nHighIndex', full_name='quotation.stock.Index.nHighIndex', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nLowIndex', full_name='quotation.stock.Index.nLowIndex', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nLastIndex', full_name='quotation.stock.Index.nLastIndex', index=8,
number=9, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='iTotalVolume', full_name='quotation.stock.Index.iTotalVolume', index=9,
number=10, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='iTurnover', full_name='quotation.stock.Index.iTurnover', index=10,
number=11, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nPreCloseIndex', full_name='quotation.stock.Index.nPreCloseIndex', index=11,
number=12, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nLocalTime', full_name='quotation.stock.Index.nLocalTime', index=12,
number=13, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3003,
serialized_end=3266,
)
_TRANSACTION = _descriptor.Descriptor(
name='Transaction',
full_name='quotation.stock.Transaction',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='szWindCode', full_name='quotation.stock.Transaction.szWindCode', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='szCode', full_name='quotation.stock.Transaction.szCode', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nActionDay', full_name='quotation.stock.Transaction.nActionDay', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nTime', full_name='quotation.stock.Transaction.nTime', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nIndex', full_name='quotation.stock.Transaction.nIndex', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nPrice', full_name='quotation.stock.Transaction.nPrice', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nVolume', full_name='quotation.stock.Transaction.nVolume', index=6,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nTurnover', full_name='quotation.stock.Transaction.nTurnover', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nBSFlag', full_name='quotation.stock.Transaction.nBSFlag', index=8,
number=9, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chOrderKind', full_name='quotation.stock.Transaction.chOrderKind', index=9,
number=10, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chFunctionCode', full_name='quotation.stock.Transaction.chFunctionCode', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nAskOrder', full_name='quotation.stock.Transaction.nAskOrder', index=11,
number=12, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nBidOrder', full_name='quotation.stock.Transaction.nBidOrder', index=12,
number=13, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nLocalTime', full_name='quotation.stock.Transaction.nLocalTime', index=13,
number=14, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3269,
serialized_end=3541,
)
_ORDER = _descriptor.Descriptor(
name='Order',
full_name='quotation.stock.Order',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='szWindCode', full_name='quotation.stock.Order.szWindCode', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='szCode', full_name='quotation.stock.Order.szCode', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nActionDay', full_name='quotation.stock.Order.nActionDay', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nTime', full_name='quotation.stock.Order.nTime', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nOrder', full_name='quotation.stock.Order.nOrder', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nPrice', full_name='quotation.stock.Order.nPrice', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nVolume', full_name='quotation.stock.Order.nVolume', index=6,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chOrderKind', full_name='quotation.stock.Order.chOrderKind', index=7,
number=8, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chFunctionCode', full_name='quotation.stock.Order.chFunctionCode', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nBroker', full_name='quotation.stock.Order.nBroker', index=9,
number=10, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chStatus', full_name='quotation.stock.Order.chStatus', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chFlag', full_name='quotation.stock.Order.chFlag', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nLocalTime', full_name='quotation.stock.Order.nLocalTime', index=12,
number=13, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3544,
serialized_end=3787,
)
_ORDERQUEUE = _descriptor.Descriptor(
name='OrderQueue',
full_name='quotation.stock.OrderQueue',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='szWindCode', full_name='quotation.stock.OrderQueue.szWindCode', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='szCode', full_name='quotation.stock.OrderQueue.szCode', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nActionDay', full_name='quotation.stock.OrderQueue.nActionDay', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nTime', full_name='quotation.stock.OrderQueue.nTime', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nSide', full_name='quotation.stock.OrderQueue.nSide', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nPrice', full_name='quotation.stock.OrderQueue.nPrice', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nOrders', full_name='quotation.stock.OrderQueue.nOrders', index=6,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nABItems', full_name='quotation.stock.OrderQueue.nABItems', index=7,
number=8, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nABVolume', full_name='quotation.stock.OrderQueue.nABVolume', index=8,
number=9, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nLocalTime', full_name='quotation.stock.OrderQueue.nLocalTime', index=9,
number=10, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3790,
serialized_end=3978,
)
_STOCKLIST = _descriptor.Descriptor(
name='StockList',
full_name='quotation.stock.StockList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='stocks', full_name='quotation.stock.StockList.stocks', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3980,
serialized_end=4031,
)
_KLINE = _descriptor.Descriptor(
name='Kline',
full_name='quotation.stock.Kline',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='quotation.stock.Kline.code', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='symbol', full_name='quotation.stock.Kline.symbol', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='open', full_name='quotation.stock.Kline.open', index=2,
number=3, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='high', full_name='quotation.stock.Kline.high', index=3,
number=4, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='low', full_name='quotation.stock.Kline.low', index=4,
number=5, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='close', full_name='quotation.stock.Kline.close', index=5,
number=6, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amount', full_name='quotation.stock.Kline.amount', index=6,
number=7, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='vol', full_name='quotation.stock.Kline.vol', index=7,
number=8, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='change', full_name='quotation.stock.Kline.change', index=8,
number=9, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pre_close', full_name='quotation.stock.Kline.pre_close', index=9,
number=10, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='trade_date', full_name='quotation.stock.Kline.trade_date', index=10,
number=11, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='trade_time', full_name='quotation.stock.Kline.trade_time', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pct_chg', full_name='quotation.stock.Kline.pct_chg', index=12,
number=13, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4034,
serialized_end=4248,
)
_KLINELIST = _descriptor.Descriptor(
name='KlineList',
full_name='quotation.stock.KlineList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='klines', full_name='quotation.stock.KlineList.klines', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4250,
serialized_end=4301,
)
_MARKETDATA = _descriptor.Descriptor(
name='MarketData',
full_name='quotation.stock.MarketData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='stock', full_name='quotation.stock.MarketData.stock', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='future', full_name='quotation.stock.MarketData.future', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='index', full_name='quotation.stock.MarketData.index', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='transaction', full_name='quotation.stock.MarketData.transaction', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order', full_name='quotation.stock.MarketData.order', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='queue', full_name='quotation.stock.MarketData.queue', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='option', full_name='quotation.stock.MarketData.option', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='type', full_name='quotation.stock.MarketData.type', index=7,
number=8, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_MARKETDATA_TYPE,
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='data', full_name='quotation.stock.MarketData.data',
index=0, containing_type=None, fields=[]),
],
serialized_start=4304,
serialized_end=4773,
)
_STOCKLIST.fields_by_name['stocks'].message_type = _STOCK
_KLINELIST.fields_by_name['klines'].message_type = _KLINE
_MARKETDATA.fields_by_name['stock'].message_type = _STOCK
_MARKETDATA.fields_by_name['future'].message_type = _FUTURE
_MARKETDATA.fields_by_name['index'].message_type = _INDEX
_MARKETDATA.fields_by_name['transaction'].message_type = _TRANSACTION
_MARKETDATA.fields_by_name['order'].message_type = _ORDER
_MARKETDATA.fields_by_name['queue'].message_type = _ORDERQUEUE
_MARKETDATA.fields_by_name['option'].message_type = _OPTION
_MARKETDATA.fields_by_name['type'].enum_type = _MARKETDATA_TYPE
_MARKETDATA_TYPE.containing_type = _MARKETDATA
_MARKETDATA.oneofs_by_name['data'].fields.append(
_MARKETDATA.fields_by_name['stock'])
_MARKETDATA.fields_by_name['stock'].containing_oneof = _MARKETDATA.oneofs_by_name['data']
_MARKETDATA.oneofs_by_name['data'].fields.append(
_MARKETDATA.fields_by_name['future'])
_MARKETDATA.fields_by_name['future'].containing_oneof = _MARKETDATA.oneofs_by_name['data']
_MARKETDATA.oneofs_by_name['data'].fields.append(
_MARKETDATA.fields_by_name['index'])
_MARKETDATA.fields_by_name['index'].containing_oneof = _MARKETDATA.oneofs_by_name['data']
_MARKETDATA.oneofs_by_name['data'].fields.append(
_MARKETDATA.fields_by_name['transaction'])
_MARKETDATA.fields_by_name['transaction'].containing_oneof = _MARKETDATA.oneofs_by_name['data']
_MARKETDATA.oneofs_by_name['data'].fields.append(
_MARKETDATA.fields_by_name['order'])
_MARKETDATA.fields_by_name['order'].containing_oneof = _MARKETDATA.oneofs_by_name['data']
_MARKETDATA.oneofs_by_name['data'].fields.append(
_MARKETDATA.fields_by_name['queue'])
_MARKETDATA.fields_by_name['queue'].containing_oneof = _MARKETDATA.oneofs_by_name['data']
_MARKETDATA.oneofs_by_name['data'].fields.append(
_MARKETDATA.fields_by_name['option'])
_MARKETDATA.fields_by_name['option'].containing_oneof = _MARKETDATA.oneofs_by_name['data']
DESCRIPTOR.message_types_by_name['Stock'] = _STOCK
DESCRIPTOR.message_types_by_name['Future'] = _FUTURE
DESCRIPTOR.message_types_by_name['Option'] = _OPTION
DESCRIPTOR.message_types_by_name['Index'] = _INDEX
DESCRIPTOR.message_types_by_name['Transaction'] = _TRANSACTION
DESCRIPTOR.message_types_by_name['Order'] = _ORDER
DESCRIPTOR.message_types_by_name['OrderQueue'] = _ORDERQUEUE
DESCRIPTOR.message_types_by_name['StockList'] = _STOCKLIST
DESCRIPTOR.message_types_by_name['Kline'] = _KLINE
DESCRIPTOR.message_types_by_name['KlineList'] = _KLINELIST
DESCRIPTOR.message_types_by_name['MarketData'] = _MARKETDATA
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Stock = _reflection.GeneratedProtocolMessageType('Stock', (_message.Message,), dict(
DESCRIPTOR = _STOCK,
__module__ = 'Quotation_pb2'
# @@protoc_insertion_point(class_scope:quotation.stock.Stock)
))
_sym_db.RegisterMessage(Stock)
Future = _reflection.GeneratedProtocolMessageType('Future', (_message.Message,), dict(
DESCRIPTOR = _FUTURE,
__module__ = 'Quotation_pb2'
# @@protoc_insertion_point(class_scope:quotation.stock.Future)
))
_sym_db.RegisterMessage(Future)
Option = _reflection.GeneratedProtocolMessageType('Option', (_message.Message,), dict(
DESCRIPTOR = _OPTION,
__module__ = 'Quotation_pb2'
# @@protoc_insertion_point(class_scope:quotation.stock.Option)
))
_sym_db.RegisterMessage(Option)
Index = _reflection.GeneratedProtocolMessageType('Index', (_message.Message,), dict(
DESCRIPTOR = _INDEX,
__module__ = 'Quotation_pb2'
# @@protoc_insertion_point(class_scope:quotation.stock.Index)
))
_sym_db.RegisterMessage(Index)
Transaction = _reflection.GeneratedProtocolMessageType('Transaction', (_message.Message,), dict(
DESCRIPTOR = _TRANSACTION,
__module__ = 'Quotation_pb2'
# @@protoc_insertion_point(class_scope:quotation.stock.Transaction)
))
_sym_db.RegisterMessage(Transaction)
Order = _reflection.GeneratedProtocolMessageType('Order', (_message.Message,), dict(
DESCRIPTOR = _ORDER,
__module__ = 'Quotation_pb2'
# @@protoc_insertion_point(class_scope:quotation.stock.Order)
))
_sym_db.RegisterMessage(Order)
OrderQueue = _reflection.GeneratedProtocolMessageType('OrderQueue', (_message.Message,), dict(
DESCRIPTOR = _ORDERQUEUE,
__module__ = 'Quotation_pb2'
# @@protoc_insertion_point(class_scope:quotation.stock.OrderQueue)
))
_sym_db.RegisterMessage(OrderQueue)
StockList = _reflection.GeneratedProtocolMessageType('StockList', (_message.Message,), dict(
DESCRIPTOR = _STOCKLIST,
__module__ = 'Quotation_pb2'
# @@protoc_insertion_point(class_scope:quotation.stock.StockList)
))
_sym_db.RegisterMessage(StockList)
Kline = _reflection.GeneratedProtocolMessageType('Kline', (_message.Message,), dict(
DESCRIPTOR = _KLINE,
__module__ = 'Quotation_pb2'
# @@protoc_insertion_point(class_scope:quotation.stock.Kline)
))
_sym_db.RegisterMessage(Kline)
KlineList = _reflection.GeneratedProtocolMessageType('KlineList', (_message.Message,), dict(
DESCRIPTOR = _KLINELIST,
__module__ = 'Quotation_pb2'
# @@protoc_insertion_point(class_scope:quotation.stock.KlineList)
))
_sym_db.RegisterMessage(KlineList)
MarketData = _reflection.GeneratedProtocolMessageType('MarketData', (_message.Message,), dict(
DESCRIPTOR = _MARKETDATA,
__module__ = 'Quotation_pb2'
# @@protoc_insertion_point(class_scope:quotation.stock.MarketData)
))
_sym_db.RegisterMessage(MarketData)
# @@protoc_insertion_point(module_scope) | yunchi | /pb/Quotation_pb2.py | Quotation_pb2.py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.