id
stringlengths
1
265
text
stringlengths
6
5.19M
dataset_id
stringclasses
7 values
/MXFusion-0.3.1.tar.gz/MXFusion-0.3.1/mxfusion/components/variables/runtime_variable.py
from mxnet.ndarray.ndarray import NDArray from mxnet.symbol.symbol import Symbol def add_sample_dimension(F, array): """ Add an extra dimension with shape one in the front (axis 0) of an array representing samples. :param F: the execution mode of MXNet. :type F: mxnet.ndarray or mxnet.symbol :param array: the array that the extra dimension is added to. :type array: MXNet NDArray or MXNet Symbol :returns: the array with the extra dimension. :rtypes: the same type as the input array """ return F.expand_dims(array, axis=0) def add_sample_dimension_to_arrays(F, arrays, out=None): """ Add the sample dimension to a dict of arrays. :param F: the execution mode of MXNet. :type F: mxnet.ndarray or mxnet.symbol :param arrays: a dictionary of MXNet arrays. :type arrays: {UUID: array} :param out: (optional) if not None, add processed arrays into out. :type out: dict """ processed_arrays = { uuid: add_sample_dimension(F, v) if isinstance(v, (NDArray, Symbol)) else v for uuid, v in arrays.items()} if out is not None: out.update(processed_arrays) return processed_arrays def expectation(F, array): """ Return the expectation across the samples if the variable is a set of samples, otherwise return the variable. :param F: the MXNet execution mode. :type F: mxnet.ndarray or mxnet.symbol """ return F.mean(array, axis=0) def array_has_samples(F, array): """ Check if the array is a set of samples. :returns: True if the array is a set of samples. :rtypes: boolean """ # TODO: replace array.shape with F.shape_array return array.shape[0] > 1 def get_num_samples(F, array): """ Get the number of samples in the provided array. If the array is not a set of samples, the return value will be one. :returns: the number of samples. :rtypes: int """ # TODO: replace array.shape with F.shape_array return array.shape[0] def as_samples(F, array, num_samples): """ Broadcast the variable as if it is a sampled variable. If the variable is already a sampled variable, it directly returns the data reference. :param F: the execution mode of MXNet. :type F: mxnet.ndarray or mxnet.symbol :param array: the array to operate on. :type array: MXNet NDArray or MXNet Symbol :param num_samples: the number of samples :type num_samples: int """ if array_has_samples(F, array): return array else: return F.broadcast_axis(array, axis=0, size=num_samples) def arrays_as_samples(F, arrays): """ Broadcast the dimension of samples for a list of variables. If the number of samples of at least one of the variables is larger than one, all the variables in the list are broadcasted to have the same number of samples. :param F: the execution mode of MXNet. :type F: mxnet.ndarray or mxnet.symbol :param arrays: a list of arrays with samples to be broadcasted. :type arrays: [MXNet NDArray or MXNet Symbol or {str: MXNet NDArray or MXNet Symbol}] :returns: the list of variables after broadcasting :rtypes: [MXNet NDArray or MXNet Symbol or {str: MXNet NDArray or MXNet Symbol}] """ num_samples = [max([get_num_samples(F, v) for v in a.values()]) if isinstance(a, dict) else get_num_samples(F, a) for a in arrays] max_num_samples = max(num_samples) if max_num_samples > 1: return [{k: as_samples(F, v, max_num_samples) for k, v in a.items()} if isinstance(a, dict) else as_samples(F, a, max_num_samples) for a in arrays] else: return arrays
PypiClean
/FreePyBX-1.0-RC1.tar.gz/FreePyBX-1.0-RC1/freepybx/public/js/dojox/charting/widget/Legend.js.uncompressed.js
define("dojox/charting/widget/Legend", ["dojo/_base/lang", "dojo/_base/html", "dojo/_base/declare", "dijit/_Widget", "dojox/gfx","dojo/_base/array", "dojox/lang/functional", "dojox/lang/functional/array", "dojox/lang/functional/fold", "dojo/dom", "dojo/dom-construct", "dojo/dom-class","dijit/_base/manager"], function(lang, html, declare, Widget, gfx, arrayUtil, df, dfa, dff, dom, domFactory, domClass, widgetManager){ /*===== var Widget = dijit._Widget; =====*/ var REVERSED_SERIES = /\.(StackedColumns|StackedAreas|ClusteredBars)$/; return declare("dojox.charting.widget.Legend", Widget, { // summary: A legend for a chart. A legend contains summary labels for // each series of data contained in the chart. // // Set the horizontal attribute to boolean false to layout legend labels vertically. // Set the horizontal attribute to a number to layout legend labels in horizontal // rows each containing that number of labels (except possibly the last row). // // (Line or Scatter charts (colored lines with shape symbols) ) // -o- Series1 -X- Series2 -v- Series3 // // (Area/Bar/Pie charts (letters represent colors)) // [a] Series1 [b] Series2 [c] Series3 chartRef: "", horizontal: true, swatchSize: 18, legendBody: null, postCreate: function(){ if(!this.chart){ if(!this.chartRef){ return; } this.chart = widgetManager.byId(this.chartRef); if(!this.chart){ var node = dom.byId(this.chartRef); if(node){ this.chart = widgetManager.byNode(node); }else{ console.log("Could not find chart instance with id: " + this.chartRef); return; } } this.series = this.chart.chart.series; }else{ this.series = this.chart.series; } this.refresh(); }, buildRendering: function(){ this.domNode = domFactory.create("table", {role: "group", "aria-label": "chart legend", "class": "dojoxLegendNode"}); this.legendBody = domFactory.create("tbody", null, this.domNode); this.inherited(arguments); }, refresh: function(){ // summary: regenerates the legend to reflect changes to the chart // cleanup if(this._surfaces){ arrayUtil.forEach(this._surfaces, function(surface){ surface.destroy(); }); } this._surfaces = []; while(this.legendBody.lastChild){ domFactory.destroy(this.legendBody.lastChild); } if(this.horizontal){ domClass.add(this.domNode, "dojoxLegendHorizontal"); // make a container <tr> this._tr = domFactory.create("tr", null, this.legendBody); this._inrow = 0; } var s = this.series; if(s.length == 0){ return; } if(s[0].chart.stack[0].declaredClass == "dojox.charting.plot2d.Pie"){ var t = s[0].chart.stack[0]; if(typeof t.run.data[0] == "number"){ var filteredRun = df.map(t.run.data, "Math.max(x, 0)"); if(df.every(filteredRun, "<= 0")){ return; } var slices = df.map(filteredRun, "/this", df.foldl(filteredRun, "+", 0)); arrayUtil.forEach(slices, function(x, i){ this._addLabel(t.dyn[i], t._getLabel(x * 100) + "%"); }, this); }else{ arrayUtil.forEach(t.run.data, function(x, i){ this._addLabel(t.dyn[i], x.legend || x.text || x.y); }, this); } }else{ if(this._isReversal()){ s = s.slice(0).reverse(); } arrayUtil.forEach(s, function(x){ this._addLabel(x.dyn, x.legend || x.name); }, this); } }, _addLabel: function(dyn, label){ // create necessary elements var wrapper = domFactory.create("td"), icon = domFactory.create("div", null, wrapper), text = domFactory.create("label", null, wrapper), div = domFactory.create("div", { style: { "width": this.swatchSize + "px", "height":this.swatchSize + "px", "float": "left" } }, icon); domClass.add(icon, "dojoxLegendIcon dijitInline"); domClass.add(text, "dojoxLegendText"); // create a skeleton if(this._tr){ // horizontal this._tr.appendChild(wrapper); if(++this._inrow === this.horizontal){ // make a fresh container <tr> this._tr = domFactory.create("tr", null, this.legendBody); this._inrow = 0; } }else{ // vertical var tr = domFactory.create("tr", null, this.legendBody); tr.appendChild(wrapper); } // populate the skeleton this._makeIcon(div, dyn); text.innerHTML = String(label); text.dir = this.getTextDir(label, text.dir); }, _makeIcon: function(div, dyn){ var mb = { h: this.swatchSize, w: this.swatchSize }; var surface = gfx.createSurface(div, mb.w, mb.h); this._surfaces.push(surface); if(dyn.fill){ // regions surface.createRect({x: 2, y: 2, width: mb.w - 4, height: mb.h - 4}). setFill(dyn.fill).setStroke(dyn.stroke); }else if(dyn.stroke || dyn.marker){ // draw line var line = {x1: 0, y1: mb.h / 2, x2: mb.w, y2: mb.h / 2}; if(dyn.stroke){ surface.createLine(line).setStroke(dyn.stroke); } if(dyn.marker){ // draw marker on top var c = {x: mb.w / 2, y: mb.h / 2}; if(dyn.stroke){ surface.createPath({path: "M" + c.x + " " + c.y + " " + dyn.marker}). setFill(dyn.stroke.color).setStroke(dyn.stroke); }else{ surface.createPath({path: "M" + c.x + " " + c.y + " " + dyn.marker}). setFill(dyn.color).setStroke(dyn.color); } } }else{ // nothing surface.createRect({x: 2, y: 2, width: mb.w - 4, height: mb.h - 4}). setStroke("black"); surface.createLine({x1: 2, y1: 2, x2: mb.w - 2, y2: mb.h - 2}).setStroke("black"); surface.createLine({x1: 2, y1: mb.h - 2, x2: mb.w - 2, y2: 2}).setStroke("black"); } }, _isReversal: function(){ return (!this.horizontal) && arrayUtil.some(this.chart.stack, function(item){ return REVERSED_SERIES.test(item.declaredClass); }); } }); });
PypiClean
/Euphorie-15.0.2.tar.gz/Euphorie-15.0.2/src/euphorie/client/resources/oira/script/chunks/62382.14d0bf77192074a7b11c.min.js
"use strict";(self.webpackChunk_patternslib_patternslib=self.webpackChunk_patternslib_patternslib||[]).push([[62382],{95807:function(n,e,t){var o=t(87537),r=t.n(o),s=t(23645),a=t.n(s)()(r());a.push([n.id,".hljs-comment,.hljs-quote{color:#969896}.hljs-variable,.hljs-template-variable,.hljs-tag,.hljs-name,.hljs-selector-id,.hljs-selector-class,.hljs-regexp,.hljs-deletion{color:#d54e53}.hljs-number,.hljs-built_in,.hljs-builtin-name,.hljs-literal,.hljs-type,.hljs-params,.hljs-meta,.hljs-link{color:#e78c45}.hljs-attribute{color:#e7c547}.hljs-string,.hljs-symbol,.hljs-bullet,.hljs-addition{color:#b9ca4a}.hljs-title,.hljs-section{color:#7aa6da}.hljs-keyword,.hljs-selector-tag{color:#c397d8}.hljs{display:block;overflow-x:auto;background:#000;color:#eaeaea;padding:.5em}.hljs-emphasis{font-style:italic}.hljs-strong{font-weight:bold}","",{version:3,sources:["webpack://./node_modules/highlight.js/styles/tomorrow-night-bright.css"],names:[],mappings:"AAKA,0BAEE,aAAA,CAIF,+HAQE,aAAA,CAIF,2GAQE,aAAA,CAIF,gBACE,aAAA,CAIF,sDAIE,aAAA,CAIF,0BAEE,aAAA,CAIF,iCAEE,aAAA,CAGF,MACE,aAAA,CACA,eAAA,CACA,eAAA,CACA,aAAA,CACA,YAAA,CAGF,eACE,iBAAA,CAGF,aACE,gBAAA",sourcesContent:["/* Tomorrow Night Bright Theme */\n/* Original theme - https://github.com/chriskempson/tomorrow-theme */\n/* http://jmblog.github.com/color-themes-for-google-code-highlightjs */\n\n/* Tomorrow Comment */\n.hljs-comment,\n.hljs-quote {\n color: #969896;\n}\n\n/* Tomorrow Red */\n.hljs-variable,\n.hljs-template-variable,\n.hljs-tag,\n.hljs-name,\n.hljs-selector-id,\n.hljs-selector-class,\n.hljs-regexp,\n.hljs-deletion {\n color: #d54e53;\n}\n\n/* Tomorrow Orange */\n.hljs-number,\n.hljs-built_in,\n.hljs-builtin-name,\n.hljs-literal,\n.hljs-type,\n.hljs-params,\n.hljs-meta,\n.hljs-link {\n color: #e78c45;\n}\n\n/* Tomorrow Yellow */\n.hljs-attribute {\n color: #e7c547;\n}\n\n/* Tomorrow Green */\n.hljs-string,\n.hljs-symbol,\n.hljs-bullet,\n.hljs-addition {\n color: #b9ca4a;\n}\n\n/* Tomorrow Blue */\n.hljs-title,\n.hljs-section {\n color: #7aa6da;\n}\n\n/* Tomorrow Purple */\n.hljs-keyword,\n.hljs-selector-tag {\n color: #c397d8;\n}\n\n.hljs {\n display: block;\n overflow-x: auto;\n background: black;\n color: #eaeaea;\n padding: 0.5em;\n}\n\n.hljs-emphasis {\n font-style: italic;\n}\n\n.hljs-strong {\n font-weight: bold;\n}\n"],sourceRoot:""}]),e.Z=a},23645:function(n){n.exports=function(n){var e=[];return e.toString=function(){return this.map((function(e){var t="",o=void 0!==e[5];return e[4]&&(t+="@supports (".concat(e[4],") {")),e[2]&&(t+="@media ".concat(e[2]," {")),o&&(t+="@layer".concat(e[5].length>0?" ".concat(e[5]):""," {")),t+=n(e),o&&(t+="}"),e[2]&&(t+="}"),e[4]&&(t+="}"),t})).join("")},e.i=function(n,t,o,r,s){"string"==typeof n&&(n=[[null,n,void 0]]);var a={};if(o)for(var l=0;l<this.length;l++){var c=this[l][0];null!=c&&(a[c]=!0)}for(var i=0;i<n.length;i++){var u=[].concat(n[i]);o&&a[u[0]]||(void 0!==s&&(void 0===u[5]||(u[1]="@layer".concat(u[5].length>0?" ".concat(u[5]):""," {").concat(u[1],"}")),u[5]=s),t&&(u[2]?(u[1]="@media ".concat(u[2]," {").concat(u[1],"}"),u[2]=t):u[2]=t),r&&(u[4]?(u[1]="@supports (".concat(u[4],") {").concat(u[1],"}"),u[4]=r):u[4]="".concat(r)),e.push(u))}},e}},87537:function(n){n.exports=function(n){var e=n[1],t=n[3];if(!t)return e;if("function"==typeof btoa){var o=btoa(unescape(encodeURIComponent(JSON.stringify(t)))),r="sourceMappingURL=data:application/json;charset=utf-8;base64,".concat(o),s="/*# ".concat(r," */");return[e].concat([s]).join("\n")}return[e].join("\n")}},62382:function(n,e,t){t.r(e);var o=t(93379),r=t.n(o),s=t(7795),a=t.n(s),l=t(3565),c=t.n(l),i=t(19216),u=t.n(i),h=t(44589),p=t.n(h),d=t(95807),f={};f.styleTagTransform=p(),f.setAttributes=c(),f.insert=function(n){var e=document.head.querySelectorAll("*")[0];e?document.head.insertBefore(n,e):document.head.append(n)},f.domAPI=a(),f.insertStyleElement=u();r()(d.Z,f);e.default=d.Z&&d.Z.locals?d.Z.locals:void 0},93379:function(n){var e=[];function t(n){for(var t=-1,o=0;o<e.length;o++)if(e[o].identifier===n){t=o;break}return t}function o(n,o){for(var s={},a=[],l=0;l<n.length;l++){var c=n[l],i=o.base?c[0]+o.base:c[0],u=s[i]||0,h="".concat(i," ").concat(u);s[i]=u+1;var p=t(h),d={css:c[1],media:c[2],sourceMap:c[3],supports:c[4],layer:c[5]};if(-1!==p)e[p].references++,e[p].updater(d);else{var f=r(d,o);o.byIndex=l,e.splice(l,0,{identifier:h,updater:f,references:1})}a.push(h)}return a}function r(n,e){var t=e.domAPI(e);t.update(n);return function(e){if(e){if(e.css===n.css&&e.media===n.media&&e.sourceMap===n.sourceMap&&e.supports===n.supports&&e.layer===n.layer)return;t.update(n=e)}else t.remove()}}n.exports=function(n,r){var s=o(n=n||[],r=r||{});return function(n){n=n||[];for(var a=0;a<s.length;a++){var l=t(s[a]);e[l].references--}for(var c=o(n,r),i=0;i<s.length;i++){var u=t(s[i]);0===e[u].references&&(e[u].updater(),e.splice(u,1))}s=c}}},19216:function(n){n.exports=function(n){var e=document.createElement("style");return n.setAttributes(e,n.attributes),n.insert(e,n.options),e}},3565:function(n,e,t){n.exports=function(n){var e=t.nc;e&&n.setAttribute("nonce",e)}},7795:function(n){n.exports=function(n){if("undefined"==typeof document)return{update:function(){},remove:function(){}};var e=n.insertStyleElement(n);return{update:function(t){!function(n,e,t){var o="";t.supports&&(o+="@supports (".concat(t.supports,") {")),t.media&&(o+="@media ".concat(t.media," {"));var r=void 0!==t.layer;r&&(o+="@layer".concat(t.layer.length>0?" ".concat(t.layer):""," {")),o+=t.css,r&&(o+="}"),t.media&&(o+="}"),t.supports&&(o+="}");var s=t.sourceMap;s&&"undefined"!=typeof btoa&&(o+="\n/*# sourceMappingURL=data:application/json;base64,".concat(btoa(unescape(encodeURIComponent(JSON.stringify(s))))," */")),e.styleTagTransform(o,n,e.options)}(e,n,t)},remove:function(){!function(n){if(null===n.parentNode)return!1;n.parentNode.removeChild(n)}(e)}}}},44589:function(n){n.exports=function(n,e){if(e.styleSheet)e.styleSheet.cssText=n;else{for(;e.firstChild;)e.removeChild(e.firstChild);e.appendChild(document.createTextNode(n))}}}}]); //# sourceMappingURL=62382.14d0bf77192074a7b11c.min.js.map
PypiClean
/FlyForms-1.0.0b1.tar.gz/FlyForms-1.0.0b1/docs/build/html/searchindex.js
Search.setIndex({envversion:46,filenames:["api","changes","concept","index","indices"],objects:{"flyforms.core":{Form:[0,2,1,""],FormField:[0,2,1,""],FormMeta:[0,2,1,""],FormMetaOptions:[0,2,1,""],UnboundField:[0,2,1,""],UnboundForm:[0,2,1,""]},"flyforms.core.Form":{"_fields":[0,1,1,""],"_meta":[0,1,1,""],"_raw_data":[0,1,1,""],is_bound:[0,1,1,""],is_valid:[0,1,1,""],to_python:[0,3,1,""],validate:[0,6,1,""]},"flyforms.core.FormField":{"__get__":[0,3,1,""],"__set__":[0,3,1,""]},"flyforms.core.FormMetaOptions":{skip_extra:[0,1,1,""],unbound_field_render:[0,1,1,""]},"flyforms.fields":{ArrayField:[0,2,1,""],BooleanField:[0,2,1,""],DatetimeField:[0,2,1,""],DictField:[0,2,1,""],EmailField:[0,2,1,""],EmbeddedFormField:[0,2,1,""],Field:[0,2,1,""],FloatField:[0,2,1,""],IntField:[0,2,1,""],Ip4Field:[0,2,1,""],ListField:[0,2,1,""],SelectField:[0,2,1,""],SequenceField:[0,2,1,""],StringField:[0,2,1,""]},"flyforms.fields.Field":{"default":[0,1,1,""],base_validators:[0,1,1,""],bind:[0,3,1,""],custom_validators:[0,1,1,""],field_binding_hook:[0,5,1,""],field_validation_hook:[0,5,1,""],required:[0,1,1,""],validate:[0,3,1,""],validators:[0,1,1,""],value_types:[0,1,1,""],wrapper:[0,1,1,""]},"flyforms.form":{from_json:[0,4,1,""],to_json:[0,4,1,""],validate_json:[0,4,1,""]},"flyforms.validators":{EmailValidator:[0,2,1,""],EntryValidator:[0,2,1,""],Ip4AddressValidator:[0,2,1,""],MaxLengthValidator:[0,2,1,""],MaxValueValidator:[0,2,1,""],MinLengthValidator:[0,2,1,""],MinValueValidator:[0,2,1,""],RegexValidator:[0,2,1,""],RequiredValidator:[0,2,1,""],SimpleValidator:[0,2,1,""],TypeValidator:[0,2,1,""],ValidationError:[0,2,1,""],Validator:[0,2,1,""]},flyforms:{core:[0,0,0,"-"],fields:[0,0,0,"-"],form:[0,0,0,"-"],validators:[0,0,0,"-"]}},objnames:{"0":["py","module","Python module"],"1":["py","attribute","Python attribute"],"2":["py","class","Python class"],"3":["py","method","Python method"],"4":["py","function","Python function"],"5":["py","staticmethod","Python static method"],"6":["py","classmethod","Python class method"]},objtypes:{"0":"py:module","1":"py:attribute","2":"py:class","3":"py:method","4":"py:function","5":"py:staticmethod","6":"py:classmethod"},terms:{"__get__":0,"__init__":0,"__main__":[0,2],"__name__":[0,2],"__set__":0,"_field":0,"_meta":0,"_raw_data":[0,1],"abstract":0,"boolean":0,"case":0,"default":[0,1,2],"float":0,"function":0,"import":[0,2],"int":0,"new":[],"null":0,"return":[0,1],"static":0,"true":[0,2],"try":0,"while":2,about:[0,1,2],access:0,add:1,addit:[0,1],address:0,advanc:1,all:[0,1,2],allow:0,alreadi:0,also:[0,3],ani:0,arg:0,argument:[0,1],arrayfield:[0,1],attach:0,attribut:[0,1],attributeerror:0,author:2,avail:[0,1],avoid:0,awai:2,bad:0,base:[0,2],base_valid:0,basestr:0,basic:1,been:[0,1],befor:1,behavior:[0,1],behaviour:[0,1],below:1,better:0,between:2,bind:[0,1],bitbucket:[1,3],bool:0,booleanfield:0,bound:[0,1],bring:0,bug:[1,3],cach:0,call:0,callabl:0,can:[0,1,2,3],caus:0,certain:2,chain:1,chang:[],check:[0,2],child:0,child_field:0,choic:0,classmethod:0,code:[0,1,2],collect:[0,2],com:[0,2],comment:0,commentform:0,common:[0,1,2],common_list:0,comparison:0,complet:1,complianc:0,complic:0,condit:0,consist:2,constitut:0,construct:0,constructor:[0,2],contact:3,contain:[0,2],containerform:0,content:3,convert:0,core:[0,1,2],correspond:0,creat:[0,2],cred:0,custom_valid:0,datetim:0,datetimefield:[0,1],decid:0,declar:0,decod:0,decor:0,def:0,definit:[0,2],depend:0,deprec:1,descriptor:[0,1],dict:0,dictfield:[0,1],dictionari:0,discard:2,distribut:1,document:1,don:0,done:0,dump:0,duplic:0,dure:0,each:0,earlier:2,easi:3,easiest:3,easili:2,effect:0,els:0,email:[0,2],emailfield:[0,2],emailvalid:0,embeddedform:0,embeddedformfield:[0,1],encapsul:[0,1],encod:0,entryvalid:0,error:[0,2],etc:0,even:0,eventu:0,everi:0,everyth:0,exampl:[0,2],except:[0,1],express:0,extend:[],extra:0,extra_field1:0,extra_field2:0,fail:0,fals:[0,2],fast:3,father:0,feedback:1,field1:0,field2:0,field_binding_hook:0,field_obj:0,field_typ:0,field_validation_hook:0,find:0,finish:0,first:[1,2],first_nam:[0,2],fix:1,flag:0,flexibl:3,floatfield:0,fmt:0,follow:2,form_cl:0,format:0,formfield:[0,1],formmeta:[0,1],formmetaopt:[0,1],found:3,from:[0,2],from_json:0,frozendict:0,full:0,fulli:1,gener:[0,1],get:[0,1,2],given:[0,2],global:1,gmail:[0,2],goal:0,got:0,grandpar:0,grandparent_field:0,greater:0,handl:0,have:[0,1,2,3],held:1,hello:0,high:3,highest:0,hook:0,hope:1,immut:[0,1],improv:[1,3],index:4,indic:[],inform:[0,1,2],inherit:[0,1,2],initi:[0,1],input:0,instanc:[0,1,2],instanti:[0,2],instead:1,intern:0,interpret:0,intfield:0,invalid:0,ip4addressvalid:0,ip4field:0,ipv4:0,is_bound:0,is_valid:[0,1,2],issu:1,item:0,item_typ:0,item_valid:0,itemtypedvalid:1,iter:0,john:0,json:0,json_str:0,jsonifi:0,jsonify_list:0,jsonify_typ:0,jsonitemtypedvalid:1,just:[0,1],kwarg:0,last_nam:[0,2],length:0,less:0,let:2,librari:[1,3],lightweight:3,like:0,list:[0,1],list_field:0,listfield:[0,1],listform:0,load:0,logic:[0,1],login:0,login_data:0,loginform:[0,2],look:0,mai:0,main:[0,2],make:0,mani:1,map:0,match:0,max_length:[0,2],max_valu:0,maximum:0,maxlengthvalid:0,maxvaluevalid:0,mechan:0,meta:0,metaclass:0,method:[0,1],min_length:[0,2],min_valu:0,minimum:0,minlengthvalid:0,minor:1,minvaluevalid:0,model:[0,1],modul:[0,1,4],more:[0,1,2],most:0,mother:0,move:1,mro:0,multipl:0,must:0,mutabl:0,myform:0,myunboundfield:0,name:0,necessari:0,need:[0,2],nest:0,nested_dd:0,nested_dict:0,none:0,normal:[0,2],not_requir:0,noth:[0,3],now:[0,1],obj:0,object:[0,2],obtain:2,occur:0,onc:0,onli:0,optim:1,ordinari:0,other:[0,1],otherwis:0,our:2,own:[0,2],owner:0,page:4,paramet:0,parent:0,parent_field:0,pars:0,pass:[0,2],password:[0,2],pip:3,pleas:3,posit:0,positive_cas:1,possibl:0,pprint:0,prevent:0,previou:1,print:[0,2],privat:1,process:0,properti:[0,1,2],provid:[0,1,2,3],put:0,python:[0,2,3],quit:0,qw3rt:0,qwerti:[0,2],qwerty_:[0,2],rais:0,raw_data:1,realiz:0,recommend:0,refactor:1,reflect:0,regex:[0,2],regexp:0,regexvalid:0,registrationform:2,regular:0,remov:1,renam:1,replac:0,repres:[0,2],represent:0,requir:[0,2],requiredvalid:0,respons:0,result:0,rewrit:1,right:2,root:0,rule:[0,2],same:0,satisfi:0,schema:0,search:4,second:0,section:[0,1],see:[0,1,2],selectfield:0,self:0,separ:0,sequencefield:0,serial:0,set:[0,2],setup:3,share:2,should:0,simpl:[0,2],simplevalid:[0,1],simpli:0,sinc:0,singl:0,skip:0,skip_extra:0,smith:0,some:[0,1],someth:0,sourc:[0,1],specif:0,specifi:0,stabl:1,standalon:0,str:0,strict:0,string:0,stringfield:[0,2],strong:0,strptime:0,structur:[0,3],subclass:[0,2],submit:2,success:0,suggest:3,superflu:3,tabl:[],tag:0,tarbal:1,templat:0,than:0,them:0,therefor:0,thi:[0,1,2],to_json:0,to_python:[0,1],togeth:0,tracker:1,tupl:0,two:2,type:0,typedvalid:1,typeerror:0,typevalid:[0,1],typic:0,unbind:0,unbound_field_rend:0,unboundfield:[0,1],unboundform:[0,1],unboundstringfield:0,understand:0,unfortun:1,unicod:0,unset:0,usag:[0,1,2],user:[0,2],utf:[0,2],validate_json:0,validation_cas:[0,1],validationerror:0,valu:[0,1],value_typ:0,variabl:2,veri:3,verif:0,version:[],via:[0,2],wai:[0,2,3],want:[0,2],well:0,were:[0,1],wheel:1,when:[0,2],where:0,whether:0,which:[0,1,2],within:0,without:0,work:1,world:0,wrap:0,wrapper:0,wrong:2,yet:1,you:[0,1,2,3],your:[0,1,2],yourlogin:0},titles:["FlyForms reference","FlyForms Changelog","Introduction to FlyForms","FlyForms","Indices and tables"],titleterms:{"class":0,"new":1,api:0,builtin:0,chang:1,changelog:1,concept:2,custom:0,data:0,defin:[0,2],embed:0,extend:2,featur:1,feedback:3,field:0,flight:0,flyform:[0,1,2,3],form:[0,2],further:2,incompat:1,indic:4,instal:3,introduct:2,level:0,low:0,manipul:0,note:1,quickstart:2,read:2,refer:0,releas:1,render:0,summari:0,tabl:4,todo:0,unbound:0,valid:0,version:1}})
PypiClean
/Lokai-0.3.tar.gz/Lokai-0.3/lokai/lk_worker/sys_procs/lk_initial_data.py
#----------------------------------------------------------------------- # The program processes a python file that contains certain data sets. # # The process can be repeated. Data already in the database is left # unchanged. New items are added. #----------------------------------------------------------------------- # This program is designed to handle setting up nodes in the nb # database. Nodes are nested (linked to parents) so the setup includes # handling links. # The rules: # # - The parent of a node can be specified as empty, a single name, or # a path (string or list). If not empty, this path must result in a # single parent node. The implication is that all expected elements # must exist in order for the path to mean anything. # # - The path search works the same way as the UI, using # search_filtered. Do not forget the '=' at the beginning of a # string with a '/' delimted path. # # - The path search always starts from the top of the forest. That # is the previous node in the input data is never assumed to be # the parent of the node being processed. # # - Nodes are identified by node name. If the incomming data defines a # node of name 'xxx' and the database does not already contain a # node of name 'xxx' under the given path, then the node is # created. # # This process cannot detect nodes that have been renamed, or nodes # that have been moved. Conversely, the process does not rename an # existing node, nor does it re-link an existing node. # # The input data: # # - The input uses yaml text formatting. # # See http://yaml.org/spec/1.1/#id857168 # # - The input may be divided into more than one document (using # '---'). This has no actual significance for the data processing. # # - Input data is not nested. Each node is specified separately as a # new entry in a list. # # - Input data follows the data structure required for # put_structured_data (q.v.) with an additional entry 'parent' # containing the path. # #----------------------------------------------------------------------- import sys import os import logging import lokai.tool_box.tb_common.notification as notify import lokai.tool_box.tb_common.configuration as config from lokai.tool_box.tb_install.yaml_import import YamlImport from lokai.lk_worker.extensions.extension_manager import ( get_all_extensions, LK_REGISTER_TYPES_AND_MODELS, ) from lokai.lk_login.yaml_import import YamlLogin from lokai.lk_worker.yaml_import import YamlNodes, YamlLinks #----------------------------------------------------------------------- # Can be passed on command line as --path= globals()['default_path'] = './' #----------------------------------------------------------------------- # Get the data to process # Can be passed on command line as --file= self_dir, self_file = os.path.split(os.path.abspath(__file__)) param_file_default_name = 'lkw_initial_data.yml' param_file_default = os.path.join(self_dir, param_file_default_name) #----------------------------------------------------------------------- class InitialDataSet(YamlImport, YamlLogin, YamlNodes, YamlLinks): def __init__(self, options): YamlImport.__init__(self, file= options.file, ignore=options.ignore) YamlLogin.__init__(self) YamlNodes.__init__(self) YamlLinks.__init__(self) self.process_all() #----------------------------------------------------------------------- def main_(): from optparse import OptionParser parser = OptionParser(usage = ('usage: %prog [options]')) parser.add_option('-v', '--verbose', dest='verbose', action='count', help='Be a bit wordy' ) parser.add_option('-n', '--no-action', dest='ignore', action='store_true', help='Do nothing with the database' ) parser.add_option('-f', '--file', dest='file', action='store', help='Read input from this file') parser.add_option('--path', dest='path', action='store', default=None, help='Default base path for folder parameters') parser.add_option('--env', dest='env', action='store', default='nix', help='Environment trigger for predefined base paths') (options, args) = parser.parse_args() if options.verbose: print 'Verbosity set at %s' % str(options.verbose) config.handle_ini_declaration(prefix='lk') log_level = {1:logging.WARNING, 2:logging.INFO, 3:logging.DEBUG } notify.setLogName( os.path.splitext( os.path.basename(sys.argv[0]))[0]) level = min(options.verbose, 3) logging.basicConfig(level=log_level.get(level, logging.ERROR)) this_logger = notify.getLogger() this_logger.setLevel(level) target_handler = logging.StreamHandler() debug_logger = logging.getLogger(notify.getDebugName()) debug_logger.addHandler(logging.StreamHandler()) this_logger.addHandler(logging.StreamHandler()) import lokai.lk_worker.models lokai.lk_worker.models.model.init() get_all_extensions(LK_REGISTER_TYPES_AND_MODELS) ids = InitialDataSet(options) return 0 # Success? if __name__ == '__main__': main_() #-----------------------------------------------------------------------
PypiClean
/IPRA_SESG-3.20.51-py3-none-any.whl/ipra/View2/bootupCTK.py
import time import customtkinter import threading from tkinter.constants import LEFT, NW, TOP import sys import subprocess import pkg_resources from ipra.View2.main import Main class BootupCTK(): required = ['selenium', 'beautifulsoup4', 'webdriver_manager', 'pandas', 'xlsxwriter', 'openpyxl', 'lxml', 'configparser', 'packaging', 'Pillow','customtkinter','IPRA'] currentVersion = [] def __init__(self): self.root = customtkinter.CTk() self.root.title("IPRA Update Process") # sets the geometry of toplevel self.root.geometry("400x200") mainLabel = customtkinter.CTkLabel(self.root, text='Check for update...') mainLabel.pack(side=TOP, anchor=NW, padx=10, pady=10) # New Line emptyLable = customtkinter.CTkLabel(self.root, text='') emptyLable.pack() self.statusText = customtkinter.StringVar() self.statusText.set("") statusLable = customtkinter.CTkLabel(self.root, textvariable=self.statusText) statusLable.pack(side=TOP, anchor=NW, padx=10) # New Line emptyLable = customtkinter.CTkLabel(self.root, text='') emptyLable.pack() self.closeButton = customtkinter.CTkButton(self.root, text="START IPRA",command=self.__closeFrame) self.closeButton.pack_forget() self.updatePackageThread = threading.Thread(target=self.__updatePackage) self.updatePackageThread.start() #self.__getCurrentPackageVersion() self.root.mainloop() def __closeFrame(self): # Shut down all frame and close all webdriver # Important to release all resources self.root.quit() self.ipra = Main() def __getCurrentPackageVersion(self): for packageName in self.required: self.currentVersion.append( pkg_resources.get_distribution(packageName).version) return def __updatePackage(self): for packageName in self.required: pass # self.statusText.set("Checking for Update: {0}".format(packageName)) # python = sys.executable # subprocess.check_call( # [python, '-m', 'pip', 'install', packageName], stdout=subprocess.DEVNULL) # python = sys.executable # subprocess.check_call( # [python, '-m', 'pip', 'install', packageName, '--upgrade'], stdout=subprocess.DEVNULL) else: self.statusText.set("Update Completed! Starting IPRA...") self.closeButton.pack(side=TOP, anchor=NW, padx=10) # time.sleep(3) # self.__closeFrame() updateDialog = BootupCTK()
PypiClean
/FlaskCms-0.0.4.tar.gz/FlaskCms-0.0.4/flask_cms/static/js/ace/mode-vala.js
ace.define("ace/mode/vala_highlight_rules",["require","exports","module","ace/lib/oop","ace/mode/text_highlight_rules"], function(require, exports, module) { "use strict"; var oop = require("../lib/oop"); var TextHighlightRules = require("./text_highlight_rules").TextHighlightRules; var ValaHighlightRules = function() { this.$rules = { start: [ { token: [ 'meta.using.vala', 'keyword.other.using.vala', 'meta.using.vala', 'storage.modifier.using.vala', 'meta.using.vala', 'punctuation.terminator.vala' ], regex: '^(\\s*)(using)\\b(?:(\\s*)([^ ;$]+)(\\s*)((?:;)?))?' }, { include: '#code' } ], '#all-types': [ { include: '#primitive-arrays' }, { include: '#primitive-types' }, { include: '#object-types' } ], '#annotations': [ { token: [ 'storage.type.annotation.vala', 'punctuation.definition.annotation-arguments.begin.vala' ], regex: '(@[^ (]+)(\\()', push: [ { token: 'punctuation.definition.annotation-arguments.end.vala', regex: '\\)', next: 'pop' }, { token: [ 'constant.other.key.vala', 'text', 'keyword.operator.assignment.vala' ], regex: '(\\w*)(\\s*)(=)' }, { include: '#code' }, { token: 'punctuation.seperator.property.vala', regex: ',' }, { defaultToken: 'meta.declaration.annotation.vala' } ] }, { token: 'storage.type.annotation.vala', regex: '@\\w*' } ], '#anonymous-classes-and-new': [ { token: 'keyword.control.new.vala', regex: '\\bnew\\b', push_disabled: [ { token: 'text', regex: '(?<=\\)|\\])(?!\\s*{)|(?<=})|(?=;)', TODO: 'FIXME: regexp doesn\'t have js equivalent', originalRegex: '(?<=\\)|\\])(?!\\s*{)|(?<=})|(?=;)', next: 'pop' }, { token: [ 'storage.type.vala', 'text' ], regex: '(\\w+)(\\s*)(?=\\[)', push: [ { token: 'text', regex: '}|(?=;|\\))', next: 'pop' }, { token: 'text', regex: '\\[', push: [ { token: 'text', regex: '\\]', next: 'pop' }, { include: '#code' } ] }, { token: 'text', regex: '{', push: [ { token: 'text', regex: '(?=})', next: 'pop' }, { include: '#code' } ] } ] }, { token: 'text', regex: '(?=\\w.*\\()', push: [ { token: 'text', regex: '(?<=\\))', TODO: 'FIXME: regexp doesn\'t have js equivalent', originalRegex: '(?<=\\))', next: 'pop' }, { include: '#object-types' }, { token: 'text', regex: '\\(', push: [ { token: 'text', regex: '\\)', next: 'pop' }, { include: '#code' } ] } ] }, { token: 'meta.inner-class.vala', regex: '{', push: [ { token: 'meta.inner-class.vala', regex: '}', next: 'pop' }, { include: '#class-body' }, { defaultToken: 'meta.inner-class.vala' } ] } ] } ], '#assertions': [ { token: [ 'keyword.control.assert.vala', 'meta.declaration.assertion.vala' ], regex: '\\b(assert|requires|ensures)(\\s)', push: [ { token: 'meta.declaration.assertion.vala', regex: '$', next: 'pop' }, { token: 'keyword.operator.assert.expression-seperator.vala', regex: ':' }, { include: '#code' }, { defaultToken: 'meta.declaration.assertion.vala' } ] } ], '#class': [ { token: 'meta.class.vala', regex: '(?=\\w?[\\w\\s]*(?:class|(?:@)?interface|enum|struct|namespace)\\s+\\w+)', push: [ { token: 'punctuation.section.class.end.vala', regex: '}', next: 'pop' }, { include: '#storage-modifiers' }, { include: '#comments' }, { token: [ 'storage.modifier.vala', 'meta.class.identifier.vala', 'entity.name.type.class.vala' ], regex: '(class|(?:@)?interface|enum|struct|namespace)(\\s+)([\\w\\.]+)' }, { token: 'storage.modifier.extends.vala', regex: ':', push: [ { token: 'meta.definition.class.inherited.classes.vala', regex: '(?={|,)', next: 'pop' }, { include: '#object-types-inherited' }, { include: '#comments' }, { defaultToken: 'meta.definition.class.inherited.classes.vala' } ] }, { token: [ 'storage.modifier.implements.vala', 'meta.definition.class.implemented.interfaces.vala' ], regex: '(,)(\\s)', push: [ { token: 'meta.definition.class.implemented.interfaces.vala', regex: '(?=\\{)', next: 'pop' }, { include: '#object-types-inherited' }, { include: '#comments' }, { defaultToken: 'meta.definition.class.implemented.interfaces.vala' } ] }, { token: 'meta.class.body.vala', regex: '{', push: [ { token: 'meta.class.body.vala', regex: '(?=})', next: 'pop' }, { include: '#class-body' }, { defaultToken: 'meta.class.body.vala' } ] }, { defaultToken: 'meta.class.vala' } ], comment: 'attempting to put namespace in here.' } ], '#class-body': [ { include: '#comments' }, { include: '#class' }, { include: '#enums' }, { include: '#methods' }, { include: '#annotations' }, { include: '#storage-modifiers' }, { include: '#code' } ], '#code': [ { include: '#comments' }, { include: '#class' }, { token: 'text', regex: '{', push: [ { token: 'text', regex: '}', next: 'pop' }, { include: '#code' } ] }, { include: '#assertions' }, { include: '#parens' }, { include: '#constants-and-special-vars' }, { include: '#anonymous-classes-and-new' }, { include: '#keywords' }, { include: '#storage-modifiers' }, { include: '#strings' }, { include: '#all-types' } ], '#comments': [ { token: 'punctuation.definition.comment.vala', regex: '/\\*\\*/' }, { include: 'text.html.javadoc' }, { include: '#comments-inline' } ], '#comments-inline': [ { token: 'punctuation.definition.comment.vala', regex: '/\\*', push: [ { token: 'punctuation.definition.comment.vala', regex: '\\*/', next: 'pop' }, { defaultToken: 'comment.block.vala' } ] }, { token: [ 'text', 'punctuation.definition.comment.vala', 'comment.line.double-slash.vala' ], regex: '(\\s*)(//)(.*$)' } ], '#constants-and-special-vars': [ { token: 'constant.language.vala', regex: '\\b(?:true|false|null)\\b' }, { token: 'variable.language.vala', regex: '\\b(?:this|base)\\b' }, { token: 'constant.numeric.vala', regex: '\\b(?:0(?:x|X)[0-9a-fA-F]*|(?:[0-9]+\\.?[0-9]*|\\.[0-9]+)(?:(?:e|E)(?:\\+|-)?[0-9]+)?)(?:[LlFfUuDd]|UL|ul)?\\b' }, { token: [ 'keyword.operator.dereference.vala', 'constant.other.vala' ], regex: '((?:\\.)?)\\b([A-Z][A-Z0-9_]+)(?!<|\\.class|\\s*\\w+\\s*=)\\b' } ], '#enums': [ { token: 'text', regex: '^(?=\\s*[A-Z0-9_]+\\s*(?:{|\\(|,))', push: [ { token: 'text', regex: '(?=;|})', next: 'pop' }, { token: 'constant.other.enum.vala', regex: '\\w+', push: [ { token: 'meta.enum.vala', regex: '(?=,|;|})', next: 'pop' }, { include: '#parens' }, { token: 'text', regex: '{', push: [ { token: 'text', regex: '}', next: 'pop' }, { include: '#class-body' } ] }, { defaultToken: 'meta.enum.vala' } ] } ] } ], '#keywords': [ { token: 'keyword.control.catch-exception.vala', regex: '\\b(?:try|catch|finally|throw)\\b' }, { token: 'keyword.control.vala', regex: '\\?|:|\\?\\?' }, { token: 'keyword.control.vala', regex: '\\b(?:return|break|case|continue|default|do|while|for|foreach|switch|if|else|in|yield|get|set|value)\\b' }, { token: 'keyword.operator.vala', regex: '\\b(?:typeof|is|as)\\b' }, { token: 'keyword.operator.comparison.vala', regex: '==|!=|<=|>=|<>|<|>' }, { token: 'keyword.operator.assignment.vala', regex: '=' }, { token: 'keyword.operator.increment-decrement.vala', regex: '\\-\\-|\\+\\+' }, { token: 'keyword.operator.arithmetic.vala', regex: '\\-|\\+|\\*|\\/|%' }, { token: 'keyword.operator.logical.vala', regex: '!|&&|\\|\\|' }, { token: 'keyword.operator.dereference.vala', regex: '\\.(?=\\S)', originalRegex: '(?<=\\S)\\.(?=\\S)' }, { token: 'punctuation.terminator.vala', regex: ';' }, { token: 'keyword.operator.ownership', regex: 'owned|unowned' } ], '#methods': [ { token: 'meta.method.vala', regex: '(?!new)(?=\\w.*\\s+)(?=[^=]+\\()', push: [ { token: 'meta.method.vala', regex: '}|(?=;)', next: 'pop' }, { include: '#storage-modifiers' }, { token: [ 'entity.name.function.vala', 'meta.method.identifier.vala' ], regex: '([\\~\\w\\.]+)(\\s*\\()', push: [ { token: 'meta.method.identifier.vala', regex: '\\)', next: 'pop' }, { include: '#parameters' }, { defaultToken: 'meta.method.identifier.vala' } ] }, { token: 'meta.method.return-type.vala', regex: '(?=\\w.*\\s+\\w+\\s*\\()', push: [ { token: 'meta.method.return-type.vala', regex: '(?=\\w+\\s*\\()', next: 'pop' }, { include: '#all-types' }, { defaultToken: 'meta.method.return-type.vala' } ] }, { include: '#throws' }, { token: 'meta.method.body.vala', regex: '{', push: [ { token: 'meta.method.body.vala', regex: '(?=})', next: 'pop' }, { include: '#code' }, { defaultToken: 'meta.method.body.vala' } ] }, { defaultToken: 'meta.method.vala' } ] } ], '#namespace': [ { token: 'text', regex: '^(?=\\s*[A-Z0-9_]+\\s*(?:{|\\(|,))', push: [ { token: 'text', regex: '(?=;|})', next: 'pop' }, { token: 'constant.other.namespace.vala', regex: '\\w+', push: [ { token: 'meta.namespace.vala', regex: '(?=,|;|})', next: 'pop' }, { include: '#parens' }, { token: 'text', regex: '{', push: [ { token: 'text', regex: '}', next: 'pop' }, { include: '#code' } ] }, { defaultToken: 'meta.namespace.vala' } ] } ], comment: 'This is not quite right. See the class grammar right now' } ], '#object-types': [ { token: 'storage.type.generic.vala', regex: '\\b(?:[a-z]\\w*\\.)*[A-Z]+\\w*<', push: [ { token: 'storage.type.generic.vala', regex: '>|[^\\w\\s,\\?<\\[()\\]]', TODO: 'FIXME: regexp doesn\'t have js equivalent', originalRegex: '>|[^\\w\\s,\\?<\\[(?:[,]+)\\]]', next: 'pop' }, { include: '#object-types' }, { token: 'storage.type.generic.vala', regex: '<', push: [ { token: 'storage.type.generic.vala', regex: '>|[^\\w\\s,\\[\\]<]', next: 'pop' }, { defaultToken: 'storage.type.generic.vala' } ], comment: 'This is just to support <>\'s with no actual type prefix' }, { defaultToken: 'storage.type.generic.vala' } ] }, { token: 'storage.type.object.array.vala', regex: '\\b(?:[a-z]\\w*\\.)*[A-Z]+\\w*(?=\\[)', push: [ { token: 'storage.type.object.array.vala', regex: '(?=[^\\]\\s])', next: 'pop' }, { token: 'text', regex: '\\[', push: [ { token: 'text', regex: '\\]', next: 'pop' }, { include: '#code' } ] }, { defaultToken: 'storage.type.object.array.vala' } ] }, { token: [ 'storage.type.vala', 'keyword.operator.dereference.vala', 'storage.type.vala' ], regex: '\\b(?:([a-z]\\w*)(\\.))*([A-Z]+\\w*\\b)' } ], '#object-types-inherited': [ { token: 'entity.other.inherited-class.vala', regex: '\\b(?:[a-z]\\w*\\.)*[A-Z]+\\w*<', push: [ { token: 'entity.other.inherited-class.vala', regex: '>|[^\\w\\s,<]', next: 'pop' }, { include: '#object-types' }, { token: 'storage.type.generic.vala', regex: '<', push: [ { token: 'storage.type.generic.vala', regex: '>|[^\\w\\s,<]', next: 'pop' }, { defaultToken: 'storage.type.generic.vala' } ], comment: 'This is just to support <>\'s with no actual type prefix' }, { defaultToken: 'entity.other.inherited-class.vala' } ] }, { token: [ 'entity.other.inherited-class.vala', 'keyword.operator.dereference.vala', 'entity.other.inherited-class.vala' ], regex: '\\b(?:([a-z]\\w*)(\\.))*([A-Z]+\\w*)' } ], '#parameters': [ { token: 'storage.modifier.vala', regex: 'final' }, { include: '#primitive-arrays' }, { include: '#primitive-types' }, { include: '#object-types' }, { token: 'variable.parameter.vala', regex: '\\w+' } ], '#parens': [ { token: 'text', regex: '\\(', push: [ { token: 'text', regex: '\\)', next: 'pop' }, { include: '#code' } ] } ], '#primitive-arrays': [ { token: 'storage.type.primitive.array.vala', regex: '\\b(?:bool|byte|sbyte|char|decimal|double|float|int|uint|long|ulong|object|short|ushort|string|void|int8|int16|int32|int64|uint8|uint16|uint32|uint64)(?:\\[\\])*\\b' } ], '#primitive-types': [ { token: 'storage.type.primitive.vala', regex: '\\b(?:var|bool|byte|sbyte|char|decimal|double|float|int|uint|long|ulong|object|short|ushort|string|void|signal|int8|int16|int32|int64|uint8|uint16|uint32|uint64)\\b', comment: 'var is not really a primitive, but acts like one in most cases' } ], '#storage-modifiers': [ { token: 'storage.modifier.vala', regex: '\\b(?:public|private|protected|internal|static|final|sealed|virtual|override|abstract|readonly|volatile|dynamic|async|unsafe|out|ref|weak|owned|unowned|const)\\b', comment: 'Not sure about unsafe and readonly' } ], '#strings': [ { token: 'punctuation.definition.string.begin.vala', regex: '@"', push: [ { token: 'punctuation.definition.string.end.vala', regex: '"', next: 'pop' }, { token: 'constant.character.escape.vala', regex: '\\\\.|%[\\w\\.\\-]+|\\$(?:\\w+|\\([\\w\\s\\+\\-\\*\\/]+\\))' }, { defaultToken: 'string.quoted.interpolated.vala' } ] }, { token: 'punctuation.definition.string.begin.vala', regex: '"', push: [ { token: 'punctuation.definition.string.end.vala', regex: '"', next: 'pop' }, { token: 'constant.character.escape.vala', regex: '\\\\.' }, { token: 'constant.character.escape.vala', regex: '%[\\w\\.\\-]+' }, { defaultToken: 'string.quoted.double.vala' } ] }, { token: 'punctuation.definition.string.begin.vala', regex: '\'', push: [ { token: 'punctuation.definition.string.end.vala', regex: '\'', next: 'pop' }, { token: 'constant.character.escape.vala', regex: '\\\\.' }, { defaultToken: 'string.quoted.single.vala' } ] }, { token: 'punctuation.definition.string.begin.vala', regex: '"""', push: [ { token: 'punctuation.definition.string.end.vala', regex: '"""', next: 'pop' }, { token: 'constant.character.escape.vala', regex: '%[\\w\\.\\-]+' }, { defaultToken: 'string.quoted.triple.vala' } ] } ], '#throws': [ { token: 'storage.modifier.vala', regex: 'throws', push: [ { token: 'meta.throwables.vala', regex: '(?={|;)', next: 'pop' }, { include: '#object-types' }, { defaultToken: 'meta.throwables.vala' } ] } ], '#values': [ { include: '#strings' }, { include: '#object-types' }, { include: '#constants-and-special-vars' } ] } this.normalizeRules(); }; ValaHighlightRules.metaData = { comment: 'Based heavily on the Java bundle\'s language syntax. TODO:\n* Closures\n* Delegates\n* Properties: Better support for properties.\n* Annotations\n* Error domains\n* Named arguments\n* Array slicing, negative indexes, multidimensional\n* construct blocks\n* lock blocks?\n* regex literals\n* DocBlock syntax highlighting. (Currently importing javadoc)\n* Folding rule for comments.\n', fileTypes: [ 'vala' ], foldingStartMarker: '(\\{\\s*(//.*)?$|^\\s*// \\{\\{\\{)', foldingStopMarker: '^\\s*(\\}|// \\}\\}\\}$)', name: 'Vala', scopeName: 'source.vala' } oop.inherits(ValaHighlightRules, TextHighlightRules); exports.ValaHighlightRules = ValaHighlightRules; }); ace.define("ace/mode/folding/cstyle",["require","exports","module","ace/lib/oop","ace/range","ace/mode/folding/fold_mode"], function(require, exports, module) { "use strict"; var oop = require("../../lib/oop"); var Range = require("../../range").Range; var BaseFoldMode = require("./fold_mode").FoldMode; var FoldMode = exports.FoldMode = function(commentRegex) { if (commentRegex) { this.foldingStartMarker = new RegExp( this.foldingStartMarker.source.replace(/\|[^|]*?$/, "|" + commentRegex.start) ); this.foldingStopMarker = new RegExp( this.foldingStopMarker.source.replace(/\|[^|]*?$/, "|" + commentRegex.end) ); } }; oop.inherits(FoldMode, BaseFoldMode); (function() { this.foldingStartMarker = /(\{|\[)[^\}\]]*$|^\s*(\/\*)/; this.foldingStopMarker = /^[^\[\{]*(\}|\])|^[\s\*]*(\*\/)/; this.getFoldWidgetRange = function(session, foldStyle, row, forceMultiline) { var line = session.getLine(row); var match = line.match(this.foldingStartMarker); if (match) { var i = match.index; if (match[1]) return this.openingBracketBlock(session, match[1], row, i); var range = session.getCommentFoldRange(row, i + match[0].length, 1); if (range && !range.isMultiLine()) { if (forceMultiline) { range = this.getSectionRange(session, row); } else if (foldStyle != "all") range = null; } return range; } if (foldStyle === "markbegin") return; var match = line.match(this.foldingStopMarker); if (match) { var i = match.index + match[0].length; if (match[1]) return this.closingBracketBlock(session, match[1], row, i); return session.getCommentFoldRange(row, i, -1); } }; this.getSectionRange = function(session, row) { var line = session.getLine(row); var startIndent = line.search(/\S/); var startRow = row; var startColumn = line.length; row = row + 1; var endRow = row; var maxRow = session.getLength(); while (++row < maxRow) { line = session.getLine(row); var indent = line.search(/\S/); if (indent === -1) continue; if (startIndent > indent) break; var subRange = this.getFoldWidgetRange(session, "all", row); if (subRange) { if (subRange.start.row <= startRow) { break; } else if (subRange.isMultiLine()) { row = subRange.end.row; } else if (startIndent == indent) { break; } } endRow = row; } return new Range(startRow, startColumn, endRow, session.getLine(endRow).length); }; }).call(FoldMode.prototype); }); ace.define("ace/mode/behaviour/cstyle",["require","exports","module","ace/lib/oop","ace/mode/behaviour","ace/token_iterator","ace/lib/lang"], function(require, exports, module) { "use strict"; var oop = require("../../lib/oop"); var Behaviour = require("../behaviour").Behaviour; var TokenIterator = require("../../token_iterator").TokenIterator; var lang = require("../../lib/lang"); var SAFE_INSERT_IN_TOKENS = ["text", "paren.rparen", "punctuation.operator"]; var SAFE_INSERT_BEFORE_TOKENS = ["text", "paren.rparen", "punctuation.operator", "comment"]; var context; var contextCache = {} var initContext = function(editor) { var id = -1; if (editor.multiSelect) { id = editor.selection.id; if (contextCache.rangeCount != editor.multiSelect.rangeCount) contextCache = {rangeCount: editor.multiSelect.rangeCount}; } if (contextCache[id]) return context = contextCache[id]; context = contextCache[id] = { autoInsertedBrackets: 0, autoInsertedRow: -1, autoInsertedLineEnd: "", maybeInsertedBrackets: 0, maybeInsertedRow: -1, maybeInsertedLineStart: "", maybeInsertedLineEnd: "" }; }; var CstyleBehaviour = function() { this.add("braces", "insertion", function(state, action, editor, session, text) { var cursor = editor.getCursorPosition(); var line = session.doc.getLine(cursor.row); if (text == '{') { initContext(editor); var selection = editor.getSelectionRange(); var selected = session.doc.getTextRange(selection); if (selected !== "" && selected !== "{" && editor.getWrapBehavioursEnabled()) { return { text: '{' + selected + '}', selection: false }; } else if (CstyleBehaviour.isSaneInsertion(editor, session)) { if (/[\]\}\)]/.test(line[cursor.column]) || editor.inMultiSelectMode) { CstyleBehaviour.recordAutoInsert(editor, session, "}"); return { text: '{}', selection: [1, 1] }; } else { CstyleBehaviour.recordMaybeInsert(editor, session, "{"); return { text: '{', selection: [1, 1] }; } } } else if (text == '}') { initContext(editor); var rightChar = line.substring(cursor.column, cursor.column + 1); if (rightChar == '}') { var matching = session.$findOpeningBracket('}', {column: cursor.column + 1, row: cursor.row}); if (matching !== null && CstyleBehaviour.isAutoInsertedClosing(cursor, line, text)) { CstyleBehaviour.popAutoInsertedClosing(); return { text: '', selection: [1, 1] }; } } } else if (text == "\n" || text == "\r\n") { initContext(editor); var closing = ""; if (CstyleBehaviour.isMaybeInsertedClosing(cursor, line)) { closing = lang.stringRepeat("}", context.maybeInsertedBrackets); CstyleBehaviour.clearMaybeInsertedClosing(); } var rightChar = line.substring(cursor.column, cursor.column + 1); if (rightChar === '}') { var openBracePos = session.findMatchingBracket({row: cursor.row, column: cursor.column+1}, '}'); if (!openBracePos) return null; var next_indent = this.$getIndent(session.getLine(openBracePos.row)); } else if (closing) { var next_indent = this.$getIndent(line); } else { CstyleBehaviour.clearMaybeInsertedClosing(); return; } var indent = next_indent + session.getTabString(); return { text: '\n' + indent + '\n' + next_indent + closing, selection: [1, indent.length, 1, indent.length] }; } else { CstyleBehaviour.clearMaybeInsertedClosing(); } }); this.add("braces", "deletion", function(state, action, editor, session, range) { var selected = session.doc.getTextRange(range); if (!range.isMultiLine() && selected == '{') { initContext(editor); var line = session.doc.getLine(range.start.row); var rightChar = line.substring(range.end.column, range.end.column + 1); if (rightChar == '}') { range.end.column++; return range; } else { context.maybeInsertedBrackets--; } } }); this.add("parens", "insertion", function(state, action, editor, session, text) { if (text == '(') { initContext(editor); var selection = editor.getSelectionRange(); var selected = session.doc.getTextRange(selection); if (selected !== "" && editor.getWrapBehavioursEnabled()) { return { text: '(' + selected + ')', selection: false }; } else if (CstyleBehaviour.isSaneInsertion(editor, session)) { CstyleBehaviour.recordAutoInsert(editor, session, ")"); return { text: '()', selection: [1, 1] }; } } else if (text == ')') { initContext(editor); var cursor = editor.getCursorPosition(); var line = session.doc.getLine(cursor.row); var rightChar = line.substring(cursor.column, cursor.column + 1); if (rightChar == ')') { var matching = session.$findOpeningBracket(')', {column: cursor.column + 1, row: cursor.row}); if (matching !== null && CstyleBehaviour.isAutoInsertedClosing(cursor, line, text)) { CstyleBehaviour.popAutoInsertedClosing(); return { text: '', selection: [1, 1] }; } } } }); this.add("parens", "deletion", function(state, action, editor, session, range) { var selected = session.doc.getTextRange(range); if (!range.isMultiLine() && selected == '(') { initContext(editor); var line = session.doc.getLine(range.start.row); var rightChar = line.substring(range.start.column + 1, range.start.column + 2); if (rightChar == ')') { range.end.column++; return range; } } }); this.add("brackets", "insertion", function(state, action, editor, session, text) { if (text == '[') { initContext(editor); var selection = editor.getSelectionRange(); var selected = session.doc.getTextRange(selection); if (selected !== "" && editor.getWrapBehavioursEnabled()) { return { text: '[' + selected + ']', selection: false }; } else if (CstyleBehaviour.isSaneInsertion(editor, session)) { CstyleBehaviour.recordAutoInsert(editor, session, "]"); return { text: '[]', selection: [1, 1] }; } } else if (text == ']') { initContext(editor); var cursor = editor.getCursorPosition(); var line = session.doc.getLine(cursor.row); var rightChar = line.substring(cursor.column, cursor.column + 1); if (rightChar == ']') { var matching = session.$findOpeningBracket(']', {column: cursor.column + 1, row: cursor.row}); if (matching !== null && CstyleBehaviour.isAutoInsertedClosing(cursor, line, text)) { CstyleBehaviour.popAutoInsertedClosing(); return { text: '', selection: [1, 1] }; } } } }); this.add("brackets", "deletion", function(state, action, editor, session, range) { var selected = session.doc.getTextRange(range); if (!range.isMultiLine() && selected == '[') { initContext(editor); var line = session.doc.getLine(range.start.row); var rightChar = line.substring(range.start.column + 1, range.start.column + 2); if (rightChar == ']') { range.end.column++; return range; } } }); this.add("string_dquotes", "insertion", function(state, action, editor, session, text) { if (text == '"' || text == "'") { initContext(editor); var quote = text; var selection = editor.getSelectionRange(); var selected = session.doc.getTextRange(selection); if (selected !== "" && selected !== "'" && selected != '"' && editor.getWrapBehavioursEnabled()) { return { text: quote + selected + quote, selection: false }; } else { var cursor = editor.getCursorPosition(); var line = session.doc.getLine(cursor.row); var leftChar = line.substring(cursor.column-1, cursor.column); if (leftChar == '\\') { return null; } var tokens = session.getTokens(selection.start.row); var col = 0, token; var quotepos = -1; // Track whether we're inside an open quote. for (var x = 0; x < tokens.length; x++) { token = tokens[x]; if (token.type == "string") { quotepos = -1; } else if (quotepos < 0) { quotepos = token.value.indexOf(quote); } if ((token.value.length + col) > selection.start.column) { break; } col += tokens[x].value.length; } if (!token || (quotepos < 0 && token.type !== "comment" && (token.type !== "string" || ((selection.start.column !== token.value.length+col-1) && token.value.lastIndexOf(quote) === token.value.length-1)))) { if (!CstyleBehaviour.isSaneInsertion(editor, session)) return; return { text: quote + quote, selection: [1,1] }; } else if (token && token.type === "string") { var rightChar = line.substring(cursor.column, cursor.column + 1); if (rightChar == quote) { return { text: '', selection: [1, 1] }; } } } } }); this.add("string_dquotes", "deletion", function(state, action, editor, session, range) { var selected = session.doc.getTextRange(range); if (!range.isMultiLine() && (selected == '"' || selected == "'")) { initContext(editor); var line = session.doc.getLine(range.start.row); var rightChar = line.substring(range.start.column + 1, range.start.column + 2); if (rightChar == selected) { range.end.column++; return range; } } }); }; CstyleBehaviour.isSaneInsertion = function(editor, session) { var cursor = editor.getCursorPosition(); var iterator = new TokenIterator(session, cursor.row, cursor.column); if (!this.$matchTokenType(iterator.getCurrentToken() || "text", SAFE_INSERT_IN_TOKENS)) { var iterator2 = new TokenIterator(session, cursor.row, cursor.column + 1); if (!this.$matchTokenType(iterator2.getCurrentToken() || "text", SAFE_INSERT_IN_TOKENS)) return false; } iterator.stepForward(); return iterator.getCurrentTokenRow() !== cursor.row || this.$matchTokenType(iterator.getCurrentToken() || "text", SAFE_INSERT_BEFORE_TOKENS); }; CstyleBehaviour.$matchTokenType = function(token, types) { return types.indexOf(token.type || token) > -1; }; CstyleBehaviour.recordAutoInsert = function(editor, session, bracket) { var cursor = editor.getCursorPosition(); var line = session.doc.getLine(cursor.row); if (!this.isAutoInsertedClosing(cursor, line, context.autoInsertedLineEnd[0])) context.autoInsertedBrackets = 0; context.autoInsertedRow = cursor.row; context.autoInsertedLineEnd = bracket + line.substr(cursor.column); context.autoInsertedBrackets++; }; CstyleBehaviour.recordMaybeInsert = function(editor, session, bracket) { var cursor = editor.getCursorPosition(); var line = session.doc.getLine(cursor.row); if (!this.isMaybeInsertedClosing(cursor, line)) context.maybeInsertedBrackets = 0; context.maybeInsertedRow = cursor.row; context.maybeInsertedLineStart = line.substr(0, cursor.column) + bracket; context.maybeInsertedLineEnd = line.substr(cursor.column); context.maybeInsertedBrackets++; }; CstyleBehaviour.isAutoInsertedClosing = function(cursor, line, bracket) { return context.autoInsertedBrackets > 0 && cursor.row === context.autoInsertedRow && bracket === context.autoInsertedLineEnd[0] && line.substr(cursor.column) === context.autoInsertedLineEnd; }; CstyleBehaviour.isMaybeInsertedClosing = function(cursor, line) { return context.maybeInsertedBrackets > 0 && cursor.row === context.maybeInsertedRow && line.substr(cursor.column) === context.maybeInsertedLineEnd && line.substr(0, cursor.column) == context.maybeInsertedLineStart; }; CstyleBehaviour.popAutoInsertedClosing = function() { context.autoInsertedLineEnd = context.autoInsertedLineEnd.substr(1); context.autoInsertedBrackets--; }; CstyleBehaviour.clearMaybeInsertedClosing = function() { if (context) { context.maybeInsertedBrackets = 0; context.maybeInsertedRow = -1; } }; oop.inherits(CstyleBehaviour, Behaviour); exports.CstyleBehaviour = CstyleBehaviour; }); ace.define("ace/mode/matching_brace_outdent",["require","exports","module","ace/range"], function(require, exports, module) { "use strict"; var Range = require("../range").Range; var MatchingBraceOutdent = function() {}; (function() { this.checkOutdent = function(line, input) { if (! /^\s+$/.test(line)) return false; return /^\s*\}/.test(input); }; this.autoOutdent = function(doc, row) { var line = doc.getLine(row); var match = line.match(/^(\s*\})/); if (!match) return 0; var column = match[1].length; var openBracePos = doc.findMatchingBracket({row: row, column: column}); if (!openBracePos || openBracePos.row == row) return 0; var indent = this.$getIndent(doc.getLine(openBracePos.row)); doc.replace(new Range(row, 0, row, column-1), indent); }; this.$getIndent = function(line) { return line.match(/^\s*/)[0]; }; }).call(MatchingBraceOutdent.prototype); exports.MatchingBraceOutdent = MatchingBraceOutdent; }); ace.define("ace/mode/vala",["require","exports","module","ace/lib/oop","ace/mode/text","ace/tokenizer","ace/mode/vala_highlight_rules","ace/mode/folding/cstyle","ace/mode/behaviour/cstyle","ace/mode/folding/cstyle","ace/mode/matching_brace_outdent"], function(require, exports, module) { "use strict"; var oop = require("../lib/oop"); var TextMode = require("./text").Mode; var Tokenizer = require("../tokenizer").Tokenizer; var ValaHighlightRules = require("./vala_highlight_rules").ValaHighlightRules; var FoldMode = require("./folding/cstyle").FoldMode; var CstyleBehaviour = require("./behaviour/cstyle").CstyleBehaviour; var CStyleFoldMode = require("./folding/cstyle").FoldMode; var MatchingBraceOutdent = require("./matching_brace_outdent").MatchingBraceOutdent; var Mode = function() { this.HighlightRules = ValaHighlightRules; this.$outdent = new MatchingBraceOutdent(); this.$behaviour = new CstyleBehaviour(); this.foldingRules = new CStyleFoldMode(); }; oop.inherits(Mode, TextMode); (function() { this.lineCommentStart = "//"; this.blockComment = {start: "/*", end: "*/"}; this.getNextLineIndent = function(state, line, tab) { var indent = this.$getIndent(line); var tokenizedLine = this.getTokenizer().getLineTokens(line, state); var tokens = tokenizedLine.tokens; var endState = tokenizedLine.state; if (tokens.length && tokens[tokens.length-1].type == "comment") { return indent; } if (state == "start" || state == "no_regex") { var match = line.match(/^.*(?:\bcase\b.*\:|[\{\(\[])\s*$/); if (match) { indent += tab; } } else if (state == "doc-start") { if (endState == "start" || endState == "no_regex") { return ""; } var match = line.match(/^\s*(\/?)\*/); if (match) { if (match[1]) { indent += " "; } indent += "* "; } } return indent; }; this.checkOutdent = function(state, line, input) { return this.$outdent.checkOutdent(line, input); }; this.autoOutdent = function(state, doc, row) { this.$outdent.autoOutdent(doc, row); }; this.$id = "ace/mode/vala" }).call(Mode.prototype); exports.Mode = Mode; });
PypiClean
/conpot-0.6.0-py3-none-any.whl/conpot/core/loggers/mysql_log.py
import gevent import logging # import MySQLdb -- change with SQLAlchemy! from warnings import filterwarnings filterwarnings('ignore', category=MySQLdb.Warning) logger = logging.getLogger(__name__) class MySQLlogger(object): def __init__(self, host, port, db, username, passphrase, logdevice, logsocket, sensorid): self.host = host self.port = port self.db = db self.username = username self.passphrase = passphrase self.logdevice = logdevice self.logsocket = logsocket self.sensorid = sensorid self._connect() def _connect(self): try: if str(self.logsocket).lower() == 'tcp': self.conn = MySQLdb.connect(host=self.host, port=self.port, user=self.username, passwd=self.passphrase, db=self.db) self._create_db() elif str(self.logsocket).lower() == 'dev': self.conn = MySQLdb.connect(unix_socket=self.logdevice, user=self.username, passwd=self.passphrase, db=self.db) self._create_db() except (AttributeError, MySQLdb.OperationalError): logger.error('Could not create a stable database connection for logging. Check database and credentials.') def _create_db(self): cursor = self.conn.cursor() cursor.execute(""" SELECT count(*) FROM information_schema.tables WHERE table_name = %s and table_schema=%s""",("events",self.db)) if (cursor.fetchone()[0]) == 0: cursor.execute("""CREATE TABLE IF NOT EXISTS `events` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `sensorid` text NOT NULL, `session` text NOT NULL, `timestamp` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `remote` text NOT NULL, `protocol` text NOT NULL, `request` text NOT NULL, `response` text NOT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=latin1; """) def log(self, event, retry=1): cursor = self.conn.cursor() try: if len(list(event["data"].keys())) > 1: cursor.execute("""INSERT INTO events (sensorid, session, remote, protocol, request, response) VALUES (%s, %s, %s, %s, %s, %s)""", (str(self.sensorid), str(event["id"]), str(event["remote"]), event["data_type"], event["data"].get('request'), event["data"].get('response'))) else: cursor.execute("""INSERT INTO events (sensorid, session, remote, protocol,request, response) VALUES (%s, %s, %s, %s, %s,"NA")""", (str(self.sensorid), str(event["id"]), str(event["remote"]), event["data_type"], event["data"].get('type'))) self.conn.commit() except (AttributeError, MySQLdb.OperationalError): self._connect() if retry == 0: logger.error('Logging failed. Database connection not available.') return False else: logger.debug('Logging failed: Database connection lost. Retrying (%s tries left)...', retry) retry -= 1 gevent.sleep(float(0.5)) return self.log(event, retry) return cursor.lastrowid def log_session(self, session): pass def select_data(self): cursor = self.conn.cursor() cursor.execute("SELECT * FROM events") print((cursor.fetchall())) def select_session_data(self, sessionid): cursor = self.conn.cursor() cursor.execute("SELECT * FROM events WHERE session = %s", [str(sessionid)]) return cursor.fetchall() def truncate_table(self, table): cursor = self.conn.cursor() try: affected = cursor.execute("TRUNCATE TABLE %s", [str(table)]) self.conn.commit() except (AttributeError, MySQLdb.IntegrityError, MySQLdb.OperationalError): return False return affected
PypiClean
/FreePyBX-1.0-RC1.tar.gz/FreePyBX-1.0-RC1/freepybx/public/js/dojox/highlight/languages/pygments/javascript.js
define("dojox/highlight/languages/pygments/javascript",["dojox/main","../../_base"],function(_1){ var dh=_1.highlight,_2=dh.constants; dh.languages.javascript={defaultMode:{lexems:["\\b[a-zA-Z]+"],keywords:{"keyword":{"for":1,"in":1,"while":1,"do":1,"break":1,"return":1,"continue":1,"if":1,"else":1,"throw":1,"try":1,"catch":1,"var":1,"with":1,"const":1,"label":1,"function":1,"new":1,"typeof":1,"instanceof":1},"keyword constant":{"true":1,"false":1,"null":1,"NaN":1,"Infinity":1,"undefined":1},"name builtin":{"Array":1,"Boolean":1,"Date":1,"Error":1,"Function":1,"Math":1,"netscape":1,"Number":1,"Object":1,"Packages":1,"RegExp":1,"String":1,"sun":1,"decodeURI":1,"decodeURIComponent":1,"encodeURI":1,"encodeURIComponent":1,"Error":1,"eval":1,"isFinite":1,"isNaN":1,"parseFloat":1,"parseInt":1,"document":1,"window":1},"name builtin pseudo":{"this":1}},contains:["comment single","comment multiline","number integer","number oct","number hex","number float","string single","string double","string regex","operator","punctuation","_function"]},modes:[{className:"comment single",begin:"//",end:"$",relevance:0},{className:"comment multiline",begin:"/\\*",end:"\\*/"},{className:"number integer",begin:"0|([1-9][0-9]*)",end:"^",relevance:0},{className:"number oct",begin:"0[0-9]+",end:"^",relevance:0},{className:"number hex",begin:"0x[0-9a-fA-F]+",end:"^",relevance:0},{className:"number float",begin:"([1-9][0-9]*\\.[0-9]*([eE][\\+-]?[0-9]+)?)|(\\.[0-9]+([eE][\\+-]?[0-9]+)?)|([0-9]+[eE][\\+-]?[0-9]+)",end:"^",relevance:0},{className:"string single",begin:"'",end:"'",illegal:"\\n",contains:["string escape"],relevance:0},{className:"string double",begin:"\"",end:"\"",illegal:"\\n",contains:["string escape"],relevance:0},{className:"string escape",begin:"\\\\.",end:"^",relevance:0},{className:"string regex",begin:"/.*?[^\\\\/]/[gim]*",end:"^"},{className:"operator",begin:"\\|\\||&&|\\+\\+|--|-=|\\+=|/=|\\*=|==|[-\\+\\*/=\\?:~\\^]",end:"^",relevance:0},{className:"punctuation",begin:"[{}\\(\\)\\[\\]\\.;]",end:"^",relevance:0},{className:"_function",begin:"function\\b",end:"{",lexems:[_2.UNDERSCORE_IDENT_RE],keywords:{keyword:{"function":1}},contains:["name function","_params"],relevance:5},{className:"name function",begin:_2.UNDERSCORE_IDENT_RE,end:"^"},{className:"_params",begin:"\\(",end:"\\)",contains:["comment single","comment multiline"]}]}; return dh.languages.javascript; });
PypiClean
/Bis-Miner-3.11.1.tar.gz/Bis-Miner-3.11.0/Orange/canvas/config.py
import os import sys import logging import pickle as pickle import itertools import pkg_resources from AnyQt.QtGui import ( QPainter, QFont, QFontMetrics, QColor, QPixmap, QIcon ) from AnyQt.QtCore import Qt, QCoreApplication, QPoint, QRect, QSettings from .utils.settings import Settings, config_slot log = logging.getLogger(__name__) def init(): """ Initialize the QCoreApplication.organizationDomain, applicationName, applicationVersion and the default settings format. Will only run once. .. note:: This should not be run before QApplication has been initialized. Otherwise it can break Qt's plugin search paths. """ dist = pkg_resources.get_distribution("Orange3") version = dist.version # Use only major.minor version = ".".join(version.split(".", 2)[:2]) QCoreApplication.setOrganizationDomain("webvoc.com") QCoreApplication.setApplicationName("BIS-Miner“雅典娜”数据挖掘系统") QCoreApplication.setApplicationVersion(version) QSettings.setDefaultFormat(QSettings.IniFormat) # Make it a null op. global init init = lambda: None rc = {} spec = \ [("startup/show-splash-screen", bool, True, "Show splash screen at startup"), ("startup/show-welcome-screen", bool, True, "Show Welcome screen at startup"), ("startup/check-updates", bool, False, "Check for updates"), ("stylesheet", str, "orange", "QSS stylesheet to use"), ("schemeinfo/show-at-new-scheme", bool, True, "Show Workflow Properties when creating a new Workflow"), ("mainwindow/scheme-margins-enabled", bool, False, "Show margins around the workflow view"), ("mainwindow/show-scheme-shadow", bool, True, "Show shadow around the workflow view"), ("mainwindow/toolbox-dock-exclusive", bool, True, "Should the toolbox show only one expanded category at the time"), ("mainwindow/toolbox-dock-floatable", bool, False, "Is the canvas toolbox floatable (detachable from the main window)"), ("mainwindow/toolbox-dock-movable", bool, True, "Is the canvas toolbox movable (between left and right edge)"), ("mainwindow/toolbox-dock-use-popover-menu", bool, True, "Use a popover menu to select a widget when clicking on a category " "button"), ("mainwindow/number-of-recent-schemes", int, 15, "Number of recent workflows to keep in history"), ("schemeedit/show-channel-names", bool, True, "Show channel names"), ("schemeedit/show-link-state", bool, True, "Show link state hints."), ("schemeedit/enable-node-animations", bool, True, "Enable node animations."), ("schemeedit/freeze-on-load", bool, False, "Freeze signal propagation when loading a workflow."), ("quickmenu/trigger-on-double-click", bool, True, "Show quick menu on double click."), ("quickmenu/trigger-on-right-click", bool, True, "Show quick menu on right click."), ("quickmenu/trigger-on-space-key", bool, True, "Show quick menu on space key press."), ("quickmenu/trigger-on-any-key", bool, False, "Show quick menu on double click."), ("logging/level", int, 1, "Logging level"), ("logging/show-on-error", bool, True, "Show log window on error"), ("logging/dockable", bool, True, "Allow log window to be docked"), ("help/open-in-external-browser", bool, False, "Open help in an external browser"), ("error-reporting/machine-id", str, '', "Report custom name instead of machine ID"), ("add-ons/allow-conda", bool, True, "Install add-ons with conda"), ("add-ons/pip-install-arguments", str, '', 'Arguments to pass to "pip install" when installing add-ons.'), ("network/http-proxy", str, '', 'HTTP proxy.'), ("network/https-proxy", str, '', 'HTTPS proxy.'), ] spec = [config_slot(*t) for t in spec] def settings(): init() store = QSettings() settings = Settings(defaults=spec, store=store) return settings def data_dir(): """Return the application data directory. If the directory path does not yet exists then create it. """ from Orange.misc import environ path = os.path.join(environ.data_dir(), "canvas") if not os.path.isdir(path): os.makedirs(path, exist_ok=True) return path def cache_dir(): """Return the application cache directory. If the directory path does not yet exists then create it. """ from Orange.misc import environ path = os.path.join(environ.cache_dir(), "canvas") if not os.path.isdir(path): os.makedirs(path, exist_ok=True) return path def log_dir(): """ Return the application log directory. """ init() if sys.platform == "darwin": name = str(QCoreApplication.applicationName()) logdir = os.path.join(os.path.expanduser("~/Library/Logs"), name) else: logdir = data_dir() if not os.path.exists(logdir): os.makedirs(logdir) return logdir def widget_settings_dir(): """ Return the widget settings directory. """ from Orange.misc import environ return environ.widget_settings_dir() def open_config(): global rc app_dir = data_dir() filename = os.path.join(app_dir, "canvas-rc.pck") if os.path.exists(filename): with open(os.path.join(app_dir, "canvas-rc.pck"), "rb") as f: rc.update(pickle.load(f)) def save_config(): app_dir = data_dir() with open(os.path.join(app_dir, "canvas-rc.pck"), "wb") as f: pickle.dump(rc, f) def recent_schemes(): """Return a list of recently accessed schemes. """ app_dir = data_dir() recent_filename = os.path.join(app_dir, "recent.pck") recent = [] if os.path.isdir(app_dir) and os.path.isfile(recent_filename): with open(recent_filename, "rb") as f: recent = pickle.load(f) # Filter out files not found on the file system recent = [(title, path) for title, path in recent \ if os.path.exists(path)] return recent def save_recent_scheme_list(scheme_list): """Save the list of recently accessed schemes """ app_dir = data_dir() recent_filename = os.path.join(app_dir, "recent.pck") if os.path.isdir(app_dir): with open(recent_filename, "wb") as f: pickle.dump(scheme_list, f) WIDGETS_ENTRY = "orange.widgets" # This could also be achieved by declaring the entry point in # Orange's setup.py, but that would not guaranty this entry point # is the first in a list. def default_entry_point(): """ Return a default orange.widgets entry point for loading default Orange Widgets. """ dist = pkg_resources.get_distribution("Orange3") ep = pkg_resources.EntryPoint("Orange Widgets", "Orange.widgets", dist=dist) return ep def widgets_entry_points(): """ Return an `EntryPoint` iterator for all 'orange.widget' entry points plus the default Orange Widgets. """ ep_iter = pkg_resources.iter_entry_points(WIDGETS_ENTRY) chain = [[default_entry_point()], ep_iter ] return itertools.chain(*chain) #: Parameters for searching add-on packages in PyPi using xmlrpc api. ADDON_KEYWORD = 'orange3 add-on' ADDON_PYPI_SEARCH_SPEC = {"keywords": ADDON_KEYWORD} #: Entry points by which add-ons register with pkg_resources. ADDON_ENTRY = "orange3.addon" def splash_screen(): """ """ pm = QPixmap( pkg_resources.resource_filename( __name__, "icons/orange-splash-screen.png") ) version = QCoreApplication.applicationVersion() size = 21 if len(version) < 5 else 16 font = QFont("Helvetica") font.setPixelSize(size) font.setBold(True) font.setItalic(True) font.setLetterSpacing(QFont.AbsoluteSpacing, 2) metrics = QFontMetrics(font) br = metrics.boundingRect(version).adjusted(-5, 0, 5, 0) br.moveCenter(QPoint(436, 224)) p = QPainter(pm) p.setRenderHint(QPainter.Antialiasing) p.setRenderHint(QPainter.TextAntialiasing) p.setFont(font) p.setPen(QColor("#231F20")) p.drawText(br, Qt.AlignCenter, version) p.end() return pm, QRect(88, 193, 200, 20) def application_icon(): """ Return the main application icon. """ path = pkg_resources.resource_filename( __name__, "icons/orange-canvas.svg" ) return QIcon(path)
PypiClean
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/docs/34/replacement.py
# System import os import sys # Types import types # Debugging from pdb import set_trace as db # Inspection import inspect # Iteration from itertools import zip_longest as izip, tee # Imports from importlib import util, abc ,machinery, _bootstrap as bootstrap import imp # Debugging import logging # Local Libraries try : from . import descriptors from . import utilities except SystemError: import descriptors import utilities # Constants modsep = '.' class OverlayImporter(abc.MetaPathFinder, abc.SourceLoader, utilities.Indentation): """ This class combines a Finder and a Loader into an Importer. .. inheritance-diagram:: apeman.__34__ :parts: 2 The strategy used maps overwrites the imported module with the overlay import under a different name Since Python imports are atomic one needs to trap modules being loaded and wrapped overlay.tiers is to be mapped to overlay._tiers_.py which is imported as tiers, while tiers, the original module is imported as _tiers_ .. note :: This is not an especially good implementation, it is not thread safe as it does not invoke module locks when loaded. """ # See section 5.5 in [1] to determine if the Path Based Finder # is a better fit for this class # # https://docs.python.org/3/reference/import.html root = descriptors.PathName() def __init__(self, *args, name = None, path = None, root = None, **kvps): super().__init__(*args, **kvps) self.mask = "_{}_" self.trap = {} self.wrap = {} self.name = name or inspect.getmodule(inspect.stack()[1][0]).__name__ self.root = root or os.path.dirname(inspect.getmodule(inspect.stack()[1][0]).__file__) self.mods = self.modules() self.log = logging.getLogger(__name__) self.log.debug("{:{}}: {:40} {}".format(self.ondent("Instance"), self.__taglen__, str(self.__class__), [key for key in sys.modules.keys() if self.name in key])) def mapToTarget(self, name) : """Maps request to the overlay module""" return modsep.join([self.mask.format(part) for part in name.split(modsep)]) def modules(self) : # This differs from overlays in that it recurses through the # folder structure to find python modules ext = '.py' mod = lambda parts, ext : [part[:-len(ext)] if enum + 1 == len(parts) else part for enum, part in enumerate(parts)] lst = [(mod(file.relative_to(self.root).parts, ext), file) for file in self.root.rglob('*'+ext)] return {modsep.join(item[0][:-1]) if item[0][-1] == "__init__" else modsep.join(item[0]) : item[1] for item in lst} # The 3.5 module should implement this. # # def find_spec(self, name, path, target = None) : # self.log.debug("{}> {:<40} {:<80}".format(self.indent("FS:" + self.name),name, str(path))) # spec = util.spec_from_file_location(self.mapToTarget(name), str(self.modules()[self.mapToTarget(name)])) # self.log.debug(spec) # self.trap[name] = spec.loader # spec.loader = self # self.log.debug(spec) # return spec # # def exec_module(self, *args, **kvps) : # self.log.debug("Exec_Module") # self.log.debug(args) # self.log.debug(kvps) # # def create_module(self, *args, **kvps) : # self.log.debug("Create_Module") # self.log.debug(args) # self.log.debug(kvps) def find_module(self, name, path=None): # self.log.debug("Find_module") self.log.debug("{}> {:<40} {:<80}".format(self.indent("F:" + self.name),name, str(path))) # self.log.debug([sys.modules[key] for key in sys.modules.keys() if name in key]) if self.mapToTarget(name) in self.mods : # User imports _PACKAGE_ # self.log.debug(self.undent("F:Trap")) self.trap[name] = self.mods.pop(self.mapToTarget(name)) return self if self.trap.pop(name) : # overlay imports PACKAGE # self.log.debug(self.undent("F:Wrap")) for meta in [meta for meta in sys.meta_path if meta is not self]: self.wrap[name] = self.wrap.get(name) or meta.find_module(name, path) return self # if name in self.wrap : # overlay imports PACKAGE # return self return None def load_module(self, name): # self.log.debug("{}: {:<40}".format(self.indent("L:" + self.name),name)) load = sys.modules.get(name) if name in self.trap : # One should strictly use SourceFileLoader here instead. # self.log.debug(self.ondent("L:Trap")) file = self.trap.get(name) load = types.ModuleType(self.mapToTarget(name)) with file.open('r') as data : code = data.read() # self.log.debug([key for key in sys.modules.keys() if name in key]) load.__file__ = str(file) code = compile(code, str(file), 'exec') sys.modules[name] = load # must occur before exec exec(code, load.__dict__) # self.log.debug([key for key in sys.modules.keys() if name in key]) # self.log.debug(load.__version__) if name in self.wrap : # Note : importing PACKAGE as _PACKAGE_ fails. # This is due to the to the `builtin` importers preventing # name changes. To be explicit they can't find a funny # named module and one can't cross assign the module. One # can reassign it however # self.log.debug(self.ondent("L:Wrap")) spec = self.wrap.pop(name) load = spec.load_module() # self.log.debug([sys.modules[key] for key in sys.modules.keys() if name in key]) # self.log.debug(self.undent("L:Done")) return load # temp = self.modules() # file = str(temp[self.mapToTarget(name)]) # self.log.debug([key for key in sys.modules.keys() if name in key]) # OverlayLoader(self.mapToTarget(name), file).load_module(self.mapToTarget(name)) # self.log.debug([key for key in sys.modules.keys() if name in key]) # OverlayLoader(name, file).load_module(self.mapToTarget(name)) # self.log.debug([key for key in sys.modules.keys() if name in key]) # self.log.debug(self.mapToTarget(name)) # self.log.debug(self.modules().keys()) # file = self.modules()[self.mapToTarget(name)] # # self.log.debug(file) # temp = machinery.SourceFileLoader(name, [str(self.root)]) # temp.load_module() # temp = machinery.SourceFileLoader(name, self.modules()[self.mapToTarget(name)]).load_module() # be weary here, re-assigning names is a bit finnicky and has a rollover impact. # sys.modules[name] = temp # Using sys.modules[module] = temp fails # self.log.debug([key for key in sys.modules.keys() if key in name]) # self.trap[name].load_module() # temp = OverlayLoader(name, str(self.trap[name])).load_module(modsep.join([self.name,name])) # temp = machinery.SourceFileLoader(name, str(self.trap[name])).load_module() # return temp # self.log.debug([key for key in sys.modules.keys() if key in name]) # # be weary here, re-assigning names is a bit finnicky and has a rollover impact. # sys.modules[name] = temp # Using sys.modules[module] = temp fails # parent, _, module = name.partition(modsep) # Was rpartition # if name in self.trap : # This might break # # Handle Source Import # self.trap.pop(name) # self.log.debug(self.ondent("Pass Trapped")) # temp = self.temp.load_module() # sys.modules[self.mapTarget(name)] = temp # self.log.debug("{}< {}".format(self.undent("Imported"),self.mapTarget(name))) # return temp # else : # # Handle Overlay Import # if module in sys.modules: # Already Imported # return sys.modules[module] # Modules' absolute path # # Import the module # self.trap.append(module) # file = self.mapToRoot(name) # _name_ = self.mapToSource(name) # root,stem = self.pathParts(self.mapToSource(name)) # self.log.debug("{}: {:18} -> {:18} {:80}".format(self.ondent("FileLoader"),root, stem, file)) # temp = machinery.SourceFileLoader(name, file).load_module() # be weary here, re-assigning names is a bit finnicky and has a rollover impact. # sys.modules[name] = temp # Using sys.modules[module] = temp fails # self.log.debug("{}< {}".format(self.undent("Imported"),temp)) # return temp if __name__ == "__main__" : # Setup Logging import logging logging.basicConfig(format = '%(message)s') logger = logging.getLogger() # "__34__" logger.setLevel(logging.DEBUG) # Call Test Suites # import unittest # tests = { # "all" : 'test*.py', # "overlay" : '*Overlay.py', # "uppercase": '*UpperCase.py', # "tiers" : '*Tiers.py', # } # test = 'all' # suite = unittest.TestLoader().discover('..',tests[test]) # unittest.TextTestRunner(verbosity=1).run(suite) __root__ = os.path.join(os.path.dirname(os.path.abspath(__file__)),'..\\tests') sys.path.append(__root__) import builtins def _import_(*args, importer = __import__) : # Hooks the import statement logger.debug("import : {}".format(args[0])) temp = importer(*args) # logger.debug(dir(temp)) logger.debug([temp.__name__, temp.__file__, temp.__package__, temp.__loader__]) return temp # Atomic Imports import uppercase builtins.__import__ = _import_ # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'tiers' in key]) # logger.debug("Primary") # import tiers # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'tiers' in key]) # logger.debug(tiers.__version__) logger.debug("Secondary") from tiers import module_a # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'tiers' in key]) # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'os' in key]) # logger.debug(module_a.__version__) # from tiers import package_a # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'tiers' in key]) # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'os' in key]) logger.debug(package_a.__version__) # Implicit Root Import # from overlay import * # Test with/out __all__ defined # Explicit Root Import # from uppercase import tiers # Explicit Nested import # from overlay.tiers import module_a # Explicit Nested import # from overlay.tiers.module_a import Alpha # print(Alpha()) # Explicit Staged import # from overlay import tiers # logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')])) # from tiers import module_a # logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if 'overlay' in key or 'tiers' in key])) # logger.debug("\n".join(["{} : {}".format(key, sys.modules[key]) for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')]))
PypiClean
/BioMine-0.9.5.zip/BioMine-0.9.5/biomine/variant/clinvarvariant.py
from biomine.variant.mafvariant import mafvariant import re from biomine.webapi import webapi class clinvarvariant(mafvariant): pathogenic = "Pathogenic" likelyPathogenic = "Likely Pathogenic" likelyBenign = "Likely Benign" benign = "Benign" uncertain = "Uncertain Significance" def __init__(self , **kwargs): super(clinvarvariant,self).__init__(**kwargs) self.uid = kwargs.get('uid',None) self.trait = kwargs.get('trait',None) self.clinical = kwargs.get('clinical',{}) aParentVariant = kwargs.get( 'parentVariant' , None ) if aParentVariant: super( clinvarvariant , self ).copyInfo( aParentVariant ) def copyInfo( self , copy ): super( clinvarvariant , self ).copyInfo( copy ) self.uid = copy.uid self.trait = copy.trait self.clinical = copy.clinical def fillMissingInfo( self , copy ): #print "Variant.clinvarvariant::fillMissingInfo" , super( clinvarvariant , self ).fillMissingInfo( copy ) if not self.uid: try: self.uid = copy.uid except: pass #print "no uid" if not self.trait: try: self.trait = copy.trait except: pass #print "no trait" if not self.clinical: try: self.clinical = copy.clinical except: pass #print "no clinical" def __nonzero__( self ): for k , v in self.__dict__.iteritems(): if ( self.checkIfRefAltStrand( k ) ): if ( self.nonzeroRefAltStrand( k ) ): return True else: if ( bool( v ) ): return True return False def printVariant( self , delim , **kwargs ): onlyThisVariant = kwargs.get( 'minimal' , False ) if not onlyThisVariant: super(clinvarvariant,self).printVariant( delim , **kwargs ) print "clinvarvariant: { " , if self.uid: print "uid= " , print self.uid + delim , if self.trait: print "trait=> {" , for db in self.trait: print str(db) + "=>" + str(self.trait[db]) + delim , print "}" , if self.clinical: print "clinical=> {" , print "description=>" + self.clinical["description"] + delim , print "review_status=>" + self.clinical["review_status"] , print "}" , print " }" def getClinical( self ): return self.clinical def getTraits( self , delim , **kwargs ): traits = [] for trait in self.trait: traits.append( str( self.trait[trait] ) ) return delim.join( traits ) def attr(self): attributes = super(clinvarvariant,self).attr() if self.trait: attributes.append(self.trait) if self.clinical: attributes.append(self.clinical) if self.uid: attributes.append(self.uid) return attributes def linkPubMed( self , **kwargs ): skip = kwargs.get( 'skip' , True ) try: return self.testLink( skip = skip ) except: #print "biomine::variant::clinvarvariant Warning: no uid" return None def testLink( self , **kwargs ): base = "http://www.ncbi.nlm.nih.gov/" subset = "pubmed?" action = "LinkName=clinvar_pubmed&from_uid=" + str( self.uid ) pm = webapi( base , subset ) pm.action = action skip = kwargs.get( 'skip' , True ) if pm.testURL( skip = skip ): return self.checkPubMedItems( pm ) print "biomine::variant::clinvarvariant Warning: no site with uid " , return None def checkPubMedItems( self , pm ): #TODO checks not yet working #pattern = re.compile( "(No\ items\ found)" ) #found = pattern.match( pm.response.text ) #if found: # print "biomine::variant::clinvarvariant Warning: no items with uid " , # return None return pm.buildURL()
PypiClean
/ETLT-MySQL-0.10.1.tar.gz/ETLT-MySQL-0.10.1/test/writer/MySqlLoaderWriterTest.py
import datetime import math import unittest from decimal import Decimal from os import path from uuid import UUID import pytz from etlt_mysql.writer.MySqlLoaderWriter import MySqlLoaderWriter class MySqlLoaderWriterTest(unittest.TestCase): """ Test cases for MySqlLoaderWriter. """ # ------------------------------------------------------------------------------------------------------------------ def test_types(self): filename_actual = path.abspath(path.dirname(__file__)) + '/MySqlLoaderWriterTest/test_types.csv' filename_expected = path.abspath(path.dirname(__file__)) + '/MySqlLoaderWriterTest/test_types.expected.csv' writer = MySqlLoaderWriter(filename_actual) writer.fields = ['bool', 'date', 'datetime', 'timedelta', 'decimal', 'empty', 'float', 'int', 'none', 'str', 'uuid'] rows = [{'bool': False, 'date': datetime.date(1994, 1, 1), 'datetime': datetime.datetime(1994, 1, 1, 23, 15, 30), 'timedelta': datetime.timedelta(days=1, seconds=12345, microseconds=1), 'decimal': Decimal('0.1428571428571428571428571429'), 'empty': '', 'float': 1.0 / 3.0, 'int': 123, 'none': None, 'str': 'Ministry of Silly Walks', 'uuid': UUID('{12345678-1234-5678-1234-567812345678}')}, {'bool': True, 'date': None, 'datetime': datetime.datetime(2016, 1, 1, 23, 15, 30, tzinfo=pytz.timezone('UTC')), 'timedelta': datetime.timedelta(), 'decimal': Decimal(1) / Decimal(7), 'empty': '', 'float': math.pi, 'int': 123, 'none': None, 'str': 'мỉאַîśŧґỷ өƒ Šỉŀłỷ שׂǻĺκŝ', # https://www.tienhuis.nl/utf8-generator 'uuid': UUID(int=0x12345678123456781234567812345678)}] with writer: for row in rows: writer.writerow(row) with open(filename_actual, 'rt', encoding='utf8') as file: actual = file.read() with open(filename_expected, 'rt', encoding='utf8') as file: expected = file.read() self.assertEqual(actual, expected) # ------------------------------------------------------------------------------------------------------------------ # @todo test strings with tabs and EOL # @todo test with lading and selecting data from actual MySQL database # ------------------------------------------------------------------------------------------------------------------
PypiClean
/Djaloha-0.4.2.tar.gz/Djaloha-0.4.2/djaloha/static/aloha.0.20/plugins/common/block/lib/editor.js
* @name block.editor * @namespace Block attribute editors */ define(['aloha/jquery', 'aloha/observable'], function(jQuery, Observable) { /** * This is the base class for all editors in the sidebar. You need to extend * this class if you need to write your own editor. In most cases, however, * it is sufficent to subclass the AbstractFormElementEditor. * * @name block.editor.AbstractEditor * @class An abstract editor */ var AbstractEditor = Class.extend(Observable, /** @lends block.editor.AbstractEditor */ { /** * Schema of the current element * * @param {Object} * @api */ schema: null, /** * @constructor */ _constructor: function(schema) { this.schema = schema; }, /** * Template method to render the editor elements. Override it * in your subclass! Needs to return the jQuery element which * should be added to the DOM * * @return {jQuery} * @api */ render: function() { // Implement in subclass! }, /** * Template method to get the current editor value * * Override it in your subclass! * * @return {String} * @api */ getValue: function() { // Implement in subclass! }, /** * Method which is called at initialization time, to set the current value. * * Override it in your subclass! * * You should not throw any change event here, as we need to break the loop "Block" -> "Editor" -> "Block" * * @param {String} value * @api */ setValue: function(value) { // Implement in subclass! }, /** * Destroy the editor elements and unbind events * @api */ destroy: function() { // Implement in subclass! }, /** * On deactivating, we still need to trigger a change event if the value has been modified. * * @private */ _deactivate: function() { this.trigger('change', this.getValue()); this.destroy(); } }); /** * This is a more specialized FormElementEditor which should be used * for form-based editors. * * @name block.editor.AbstractFormElementEditor * @class An abstract form editor with label * @extends block.editor.AbstractEditor */ var AbstractFormElementEditor = AbstractEditor.extend( /** @lends block.editor.AbstractFormElementEditor */ { /** * Input element HTML definition * * You need to override this in your subclass. * * @type String * * @api */ formInputElementDefinition: null, /** * The jQuery element of the form input element. * * @type {jQuery} */ _$formInputElement: null, /** * Render the label and form element * @return {jQuery} */ render: function() { var $wrapper = jQuery('<div class="aloha-block-editor" />'); var guid = GENTICS.Utils.guid(); $wrapper.append(this.renderLabel().attr('id', guid)); $wrapper.append(this.renderFormElement().attr('id', guid)); return $wrapper; }, /** * Render the label for the editor, by using the "label" property * from the schema. * * @return {jQuery} */ renderLabel: function() { var element = jQuery('<label />'); element.html(this.schema.label); return element; }, /** * Render the form input element * @return {jQuery} */ renderFormElement: function() { var that = this; this._$formInputElement = jQuery(this.formInputElementDefinition); this.afterRenderFormElement(this._$formInputElement); this._$formInputElement.change(function() { that.trigger('change', that.getValue()); }); return this._$formInputElement; }, /** * Callback which can be implemented by subclasses to adjust the rendered * form input element * * @param {jQuery} $formElement the form element being rendered * @api */ afterRenderFormElement: function($formElement) { }, /** * @return {String} */ getValue: function() { return this._$formInputElement.val(); }, /** * We do not throw any change event here, as we need to break the loop "Block" -> "Editor" -> "Block" */ setValue: function(value) { this._$formInputElement.val(value); }, /** * Cleanup and remove the input element */ destroy: function() { this._$formInputElement.remove(); } }); /** * @name block.editor.StringEditor * @class An editor for string input * @extends block.editor.AbstractFormElementEditor */ var StringEditor = AbstractFormElementEditor.extend( /** @lends block.editor.StringEditor */ { formInputElementDefinition: '<input type="text" />' }); /** * @name block.editor.NumberEditor * @class An editor for numbers * @extends block.editor.AbstractFormElementEditor */ var NumberEditor = AbstractFormElementEditor.extend( /** @lends block.editor.NumberEditor */ { // TODO Range should be an option formInputElementDefinition: '<input type="range" />', afterRenderFormElement: function($formElement) { if (!this.schema.range) return; if (this.schema.range.min) { $formElement.attr('min', this.schema.range.min); } if (this.schema.range.max) { $formElement.attr('max', this.schema.range.max); } if (this.schema.range.step) { $formElement.attr('step', this.schema.range.step); } } }); /** * @name block.editor.UrlEditor * @class An editor for URLs * @extends block.editor.AbstractFormElementEditor */ var UrlEditor = AbstractFormElementEditor.extend( /** @lends block.editor.UrlEditor */ { formInputElementDefinition: '<input type="url" />' }); /** * @name block.editor.EmailEditor * @class An editor for email addresses * @extends block.editor.AbstractFormElementEditor */ var EmailEditor = AbstractFormElementEditor.extend( /** @lends block.editor.EmailEditor */ { formInputElementDefinition: '<input type="email" />' }); /** * @name block.editor.SelectEditor * @class An editor for select fields * @extends block.editor.AbstractFormElementEditor */ var SelectEditor = AbstractFormElementEditor.extend( /** @lends block.editor.SelectEditor */ { formInputElementDefinition: '<select />', afterRenderFormElement: function($formElement) { jQuery.each(this.schema.values, function() { var el = this; $formElement.append(jQuery('<option />').attr('value', el.key).html(el.label)); }); } }); /** * @name block.editor.ButtonEditor * @class An editor for buttons, executing a custom supplied callback "callback" * @extends block.editor.AbstractFormElementEditor */ var ButtonEditor = AbstractFormElementEditor.extend( /** @lends block.editor.SelectEditor */ { formInputElementDefinition: '<button />', afterRenderFormElement: function($formElement) { var that = this; $formElement.html(this.schema.buttonLabel); $formElement.click(function() { that.schema.callback(); }) } }); return { AbstractEditor: AbstractEditor, AbstractFormElementEditor: AbstractFormElementEditor, StringEditor: StringEditor, NumberEditor: NumberEditor, UrlEditor: UrlEditor, EmailEditor: EmailEditor, SelectEditor: SelectEditor, ButtonEditor: ButtonEditor } });
PypiClean
/Chrones-1.0.3.tar.gz/Chrones-1.0.3/README.md
<!-- Copyright 2020-2022 Laurent Cabaret Copyright 2020-2022 Vincent Jacques --> *Chrones* is a software development tool to visualize runtime statistics (CPU percentage, GPU percentage, memory usage, *etc.*) about your program and correlate them with the phases of your program. It aims at being very simple to use and provide useful information out of the box<!-- @todo(later) *and* at being customizable to your specific use cases -->. Here is an example of graph produced by *Chrones* about a shell script launching a few executables (see exactly how this image is generated [at the end of this Readme](#code-of-the-example-image)): ![Example](integration-tests/readme-example/report.png) *Chrones* was sponsored by [Laurent Cabaret](https://cabaretl.pages.centralesupelec.fr/en/publications/) from the [MICS](http://www.mics.centralesupelec.fr/) and written by [Vincent Jacques](https://vincent-jacques.net). It's licensed under the [MIT license](http://choosealicense.com/licenses/mit/). Its [documentation and source code](https://github.com/jacquev6/Chrones) are on GitHub. Questions? Remarks? Bugs? Want to contribute? Open [an issue](https://github.com/jacquev6/Chrones/issues) or [a discussion](https://github.com/jacquev6/Chrones/discussions)! <!-- @todo(later) Insert paragraph about Chrones' clients? --> # Conceptual overview *Chrones* consist of three parts: instrumentation (optional), monitoring and reporting. The instrumentation part of *Chrones* runs inside your program after you've modified it. It's used as a library for your programming language. To use it, you add one-liners to the functions you want to know about. After that, your program logs insider timing information about these functions. The monitoring part is a wrapper around your program. It runs your program as you instruct it to, preserving its access to the standard input and outputs, the environment, and its command-line. While doing so, it monitors your program's whole process tree and logs resource usage metrics. The reporting part reads the logs produced by the instrumentation and monitoring, and produces human-readable reports including graphs. The instrumentation part is completely optional. You can use the monitoring part on non-instrumented programs, or even on partially instrumented programs like a shell script calling an instrumented executable and a non-instrumented executable. The graphs produced by *Chrones*' reporting will just miss information about your program's phases. We've chosen the command-line as the main user interface for *Chrones*' to allow easy integration into your automated workflows. <!-- @todo(later) It can also be used as a Python library for advanced use-cases. --> Please note that *Chrones* currently only works on Linux. Furthermore, the C++ instrumentation requires g++. We would gladly accept contributions that extend *Chrones*' usability. *Chrones*' instrumentation libraries are available for <!-- @todo(later) Python,--> C++ and the shell language. # Expected performance The instrumentation part of *Chrones* accurately measures and reports durations down to the millisecond. Its monitoring part takes samples a few times per second. No nanoseconds in this project; *Chrones* is well suited for programs that run at least a few seconds. Overhead introduced by *Chrones* in C++ programs is less than a second per million instrumented blocks. Don't use it for functions called billions of times. # Get started ## Install *Chrones* The monitoring and reporting parts of *Chrones* are distributed as a [Python package on PyPI](https://pypi.org/project/Chrones/). Install them with `pip install Chrones`. <details> <summary>And at the moment that's all you need. <small>(Click the arrow for more information)</small></summary> The instrumentation parts are distributed in language-specific ways. The Python version comes with the `Chrones` Python packages you've just installed. The C++ and shell languages don't really have package managers, so the C++ and shell versions happen to also be distributed within the Python package. Versions for other languages will be distributed using the appropriate packages managers. </details> ## (Optional) Instrument your code ### Concepts The instrumentation libraries are based on the following concepts: #### Coordinator The *coordinator* is a single object that centralizes measurements and writes them into a log file. It also takes care of enabling or disabling instrumentation: the log will be created if and only if it detects it's being run inside *Chrones*' monitoring. This lets you run your programm outside *Chrones*' monitoring as if it was not instrumented. #### Chrone A *chrone* is the main instrumentation tool. You can think of it as a stopwatch that logs an event when it's started and another event when it's stoped. Multiple chrones can be nested. This makes them particularly suitable to instrument [structured code](https://en.wikipedia.org/wiki/Structured_programming) with blocks and functions (*i.e.* the vast majority of modern programs). From the log of the nested chrones, *Chrones*' reporting is able to reconstruct the evolution of the call stack(s) of the program. Chrones have three identifying attributes: a *name*, an optional *label* and an optional *index*. The three of them are used in reports to distinguish between chrones. Here is their meaning: - In languages that support it, the name is set automatically from the name of the enclosing function. In languages that don't, we strongly recommend that you use the same convention: a chrone's name comes from the closest named piece of code. - It sometimes makes sense to instrument a block inside a function. The label is here to identify those blocks. - Finaly, when these blocks are iterations of a loop, you can use the index to distinguish them. See `simple.cpp` at the end of this Readme for a complete example. <!-- @todo(later) Later because they don't appear on repport.png, only in summaries. #### Mini-chrone --> ### Language-specific instructions The *Chrones* instrumentation library is currently available for the following languages: #### Shell First, import *Chrones* and initialize the coordinator with: source <(chrones instrument shell enable program-name) where `program-name` is... the name of your program. You can then use the two functions `chrones_start` and `chrones_stop` to instrument your shell functions: function foo { chrones_start foo # Do something chrones_stop } `chrones_start` accepts one mandatory argument: the `name`, and two optional ones: the `label` and `index`. See their description in the [Concepts](#concepts) section above. #### C++ First, `#include <chrones.hpp>`. The header is distributed within *Chrones*' Python package. You can get is location with `chrones instrument c++ header-location`, that you can pass to the `-I` option of you compiler. For example, ``g++ -I`chrones instrument c++ header-location` foo.cpp -o foo``. `chrones.hpp` uses variadic macros with `__VA_OPT__`, so if you need to set your `-std` option, you can use either `gnu++11` or `c++20` or later. Create the coordinator at global scope, before your `main` function: CHRONABLE("program-name") where `program-name` is... the name of your program. You can then instrument functions and blocks using the `CHRONE` macro: int main() { CHRONE(); { CHRONE("loop"); for (int i = 0; i != 100; ++i) { CHRONE("iteration", i); // Do something } } } Then `CHRONE` macro accepts zero to two arguments: the optional label and index. See their description in the [Concepts](#concepts) section above. In the example above, all three chrones will have the same name, `"int main()"`. `"loop"` and `"iteration"` will be the respective labels of the last two chrones, and the last chrone will also have an index. *Chrones*' instrumentation can be statically disabled by passing `-DCHRONES_DISABLED` to the compiler. In that case, all macros provided by the header will be empty and your code will compile exactly as if it was not using *Chrones*. Troubleshooting tip: if you get an `undefined reference to chrones::global_coordinator` error, double-check you're linking with the translation unit that calls `CHRONABLE`. Known limitations: - `CHRONE` must not be used outside `main`, *e.g.* in constructors and destructors of static variables <!-- @todo(later) #### Python First, import *Chrones*' decorator: `from chrones.instumentation import chrone`. Then, decorate your functions: @chrone def foo(): # Do something You can also instrument blocks that are not functions: with chrone("bar"): # Do something @todo(later) Name, label, and index --> ## Run using `chrones run` Compile your executable(s) if required. Then launch them using `chrones run -- your_program --with --its --options`, or `chrones run --monitor-gpu -- your_program` if your code uses an NVidia GPU. Everything before the `--` is interpreted as options for `chrones run`. Everything after is passed as-is to your program. The standard input and output are passed unchanged to your program. The exit code of `chrones run` is the exit code of `your_program`. Have a look at `chrones run --help` for its detailed usage. ## Generate report Run `chrones report` to generate a report in the current directory. Have a look at `chrones report --help` for its detailed usage. <!-- @todo(later) ## Use *Chrones* as a library Out of the box, *Chrones* produces generic reports and graphs, but you can customize them by using *Chrones* as a Python library. --> # Code of the example image As a complete example, here is the shell script that the image at the top of this Readme is about (named `example.sh`): <!-- START example.sh --><!-- #!/bin/bash set -o errexit trap 'echo "Error on ${BASH_SOURCE[0]}:$LINENO"' ERR --><!-- STOP --> <!-- EXTEND example.sh --> source <(chrones instrument shell enable example) function waste_time { chrones_start waste_time sleep 0.5 chrones_stop } waste_time dd status=none if=/dev/random of=in.dat bs=16M count=1 chrones_start run-cpu ./cpu chrones_stop waste_time chrones_start run-gpu ./gpu chrones_stop waste_time <!-- STOP --> <!-- CHMOD+X example.sh --> And the two executables called by the script: - `cpu.cpp`: <!-- START cpu.cpp --> #include <time.h> #include <chrones.hpp> CHRONABLE("cpu"); void waste_time() { CHRONE(); usleep(500'000); } void input_and_output() { CHRONE(); char data[4 * 1024 * 1024]; std::ifstream in("in.dat"); for (int i = 0; i != 2; ++i) { in.read(data, sizeof(data)); waste_time(); std::ofstream out("out.dat"); out.write(data, sizeof(data)); waste_time(); } } void use_cpu(const int repetitions) { CHRONE(); for (int i = 0; i < repetitions; ++i) { volatile double x = 3.14; for (int j = 0; j != 1'000'000; ++j) { x = x * j; } } } void use_several_cores() { CHRONE(); #pragma omp parallel for for (int i = 0; i != 8; ++i) { use_cpu(256 + i * 32); } } int main() { CHRONE(); waste_time(); input_and_output(); { CHRONE("loop"); for (int i = 0; i != 2; ++i) { CHRONE("iteration", i); waste_time(); use_cpu(256); } } waste_time(); use_several_cores(); } <!-- STOP --> - `gpu.cu`: <!-- START gpu.cu --> #include <cassert> #include <chrones.hpp> const int block_size = 1024; const int blocks_count = 128; const int data_size = blocks_count * block_size; CHRONABLE("gpu"); void waste_time() { CHRONE(); usleep(500'000); } void transfer_to_device(double* h, double* d) { CHRONE(); for (int i = 0; i != 8'000'000; ++i) { cudaMemcpy(h, d, data_size * sizeof(double), cudaMemcpyHostToDevice); } cudaDeviceSynchronize(); } __global__ void use_gpu_(double* data) { const int i = blockIdx.x * block_size + threadIdx.x; assert(i < data_size); volatile double x = 3.14; for (int j = 0; j != 700'000; ++j) { x = x * j; } data[i] *= x; } void use_gpu(double* data) { CHRONE(); use_gpu_<<<blocks_count, block_size>>>(data); cudaDeviceSynchronize(); } void transfer_to_host(double* d, double* h) { CHRONE(); for (int i = 0; i != 8'000'000; ++i) { cudaMemcpy(d, h, data_size * sizeof(double), cudaMemcpyDeviceToHost); } cudaDeviceSynchronize(); } int main() { CHRONE(); waste_time(); { CHRONE("Init CUDA"); cudaFree(0); } waste_time(); double* h = (double*)malloc(data_size * sizeof(double)); for (int i = 0; i != data_size; ++i) { h[i] = i; } waste_time(); double* d; cudaMalloc(&d, data_size * sizeof(double)); waste_time(); transfer_to_device(h, d); waste_time(); use_gpu(d); waste_time(); transfer_to_host(d, h); waste_time(); cudaFree(d); waste_time(); free(h); waste_time(); } <!-- STOP --> <!-- @todo(later) Understand why transfers don't show in the report --> This code is built using `make` and the following `Makefile`: <!-- START run.sh --><!-- #!/bin/bash set -o errexit trap 'echo "Error on ${BASH_SOURCE[0]}:$LINENO"' ERR if [[ -z "$CHRONES_DEV_USE_GPU" ]] then exit fi rm -f run-results.json example.*.chrones.csv cpu.*.chrones.csv gpu.*.chrones.csv report.png in.dat out.dat make --><!-- STOP --> <!-- CHMOD+X run.sh --> <!-- START Makefile --> all: cpu gpu cpu: cpu.cpp g++ -fopenmp -O3 -I`chrones instrument c++ header-location` cpu.cpp -o cpu gpu: gpu.cu nvcc -O3 -I`chrones instrument c++ header-location` gpu.cu -o gpu <!-- STOP --> <!-- EXTEND Makefile --><!-- cpu: Makefile gpu: Makefile --><!-- STOP --> It's executed like this: <!-- EXTEND run.sh --> OMP_NUM_THREADS=4 chrones run --monitor-gpu -- ./example.sh <!-- STOP --> And the report is created like this: <!-- EXTEND run.sh --> chrones report <!-- STOP --> # Known limitations ## Impacts of instrumentation Adding instrumentation to your program will change what's observed by the monitoring: - data is continuously output to the log file and this is visible in the "I/O" graph of the report - the log file is also counted in the "Open files" graph - in C++, an additional thread is launched in your process, visible in the "Threads" graph ## Non-monotonous system clock *Chrones* does not handle Leap seconds well. But who does, really? ## Multiple GPUs Machines with more than one GPU are not yet supported. <!-- @todo(later) Support machines with several GPUs --> # Developing *Chrones* itself You'll need a Linux machine with: - a reasonably recent version of Docker - a reasonably recent version of Bash <!-- @todo(later) Support developing on a machine without a GPU. --> Oh, and for the moment, you need an NVidia GPU, with drivers installed and `nvidia-container-runtime` configured. To build everything and run all tests: ./run-development-cycle.sh To [bump the version number](semver.org) and publish on PyPI: ./publish.sh [patch|minor|major]
PypiClean
/K40Silence-0.0.1.tar.gz/K40Silence-0.0.1/src/gui/zmatrix.py
from wx import AffineMatrix2D, Matrix2D, Point2D class ZMatrix(AffineMatrix2D): def __init__(self, matrix=None): AffineMatrix2D.__init__(self) if matrix is not None: self.Set( Matrix2D( matrix.value_scale_x(), matrix.value_skew_x(), matrix.value_skew_y(), matrix.value_scale_y(), ), Point2D(matrix.value_trans_x(), matrix.value_trans_y()), ) def __str__(self): m = self.Get()[0] p = self.Get()[1] return "[%3f, %3f, %3f,\n %3f, %3f, %3f,\n %3f, %3f, %3f]" % ( m.m_11, m.m_12, 0, m.m_21, m.m_22, 0, p.x, p.y, 1, ) def Reset(self): AffineMatrix2D.__init__(self) def PostScale(self, sx, sy=None, ax=0, ay=0): self.Invert() if sy is None: sy = sx if ax == 0 and ay == 0: self.Scale(1.0 / sx, 1.0 / sy) else: self.Translate(ax, ay) self.Scale(1.0 / sx, 1.0 / sy) self.Translate(-ax, -ay) self.Invert() def PostTranslate(self, px, py): self.Invert() self.Translate(-px, -py) self.Invert() def PostRotate(self, radians, rx=0, ry=0): self.Invert() if rx == 0 and ry == 0: self.Rotate(-radians) else: self.Translate(rx, ry) self.Rotate(-radians) self.Translate(-rx, -ry) self.Invert() def PreScale(self, sx, sy=None, ax=0, ay=0): if sy is None: sy = sx if ax == 0 and ay == 0: self.Scale(sx, sy) else: self.Translate(ax, ay) self.Scale(sx, sy) self.Translate(-ax, -ay) def PreTranslate(self, px, py): self.Translate(px, py) def PreRotate(self, radians, rx=0, ry=0): if rx == 0 and ry == 0: self.Rotate(radians) else: self.Translate(rx, ry) self.Rotate(radians) self.Translate(-rx, -ry) def GetScaleX(self): return self.Get()[0].m_11 def GetScaleY(self): return self.Get()[0].m_22 def GetSkewX(self): return self.Get()[0].m_12 def GetSkewY(self): return self.Get()[0].m_21 def GetTranslateX(self): return self.Get()[1].x def GetTranslateY(self): return self.Get()[1].y def InverseTransformPoint(self, position): self.Invert() converted_point = self.TransformPoint(position) self.Invert() return converted_point
PypiClean
/BubbleDet-1.0.0.tar.gz/BubbleDet-1.0.0/examples/thinwall.py
import numpy as np # arrays and maths import matplotlib.pyplot as plt # plotting from scipy import special # gamma function from cosmoTransitions.tunneling1D import SingleFieldInstanton # bounce from BubbleDet import BubbleConfig, ParticleConfig, BubbleDeterminant # the dimension, dim = 2, 3, ... 7 dim = 4 print(f"{dim=}") # Ivanov et al's potential (and Kosowsky et al's), with lambda and v scaled out def V(phi, Delta): return 0.125 * (phi ** 2 - 1) ** 2 + Delta * (phi - 1) # first derivative of potential def dV(phi, Delta): return 0.5 * phi * (phi ** 2 - 1) + Delta # second derivative of potential def ddV(phi, Delta): return 0.5 * (3 * phi ** 2 - 1) # metastable minimum of potential def phi_true(Delta): if Delta > 0: return np.sort(np.roots([0.5, 0, -0.5, Delta]))[0] else: return np.sort(np.roots([0.5, 0, -0.5, Delta]))[-1] # stable minimum of potential def phi_false(Delta): if Delta > 0: return np.sort(np.roots([0.5, 0, -0.5, Delta]))[-1] else: return np.sort(np.roots([0.5, 0, -0.5, Delta]))[0] # radius of thin-wall bubble def thinwall_radius(Delta, dim): return (dim - 1) / (3 * Delta) + Delta * ( 6 * np.pi ** 2 - 40 + dim * (26 - 4 * dim - 3 * np.pi ** 2) ) / (3 * (dim - 1)) def solid_angles(dim): return 2 * np.pi ** (dim / 2) / special.gamma(dim / 2) def thinwall_action_LO(Delta, dim): # at leading order return Delta ** (1 - dim) * solid_angles(dim) * ( ((dim - 1) / 3) ** (dim - 1) * 2 / 3 / dim ) def thinwall_action(Delta, dim): # at next-to-leading order factor_NLO = (1 + dim * (25 - 8 * dim - 3 * np.pi ** 2)) / 2 / (dim - 1) return thinwall_action_LO(Delta, dim) * (1 + Delta ** 2 * factor_NLO) # minus the logarithm of the one-loop determinant in thin-wall regime def thinwall_log_det(Delta, dim): log_prefactor = (dim / 2) * ( 1 - dim + np.log(12 * thinwall_action(Delta, dim) / (2 * np.pi)) ) expo = Delta ** (1 - dim) if dim == 2: expo *= -1 + np.pi / 6 / np.sqrt(3) log_prefactor = np.log(Delta / np.pi) elif dim == 3: expo *= 10 / 27 + np.log(3) / 6 elif dim == 4: expo *= 9 / 32 - np.pi / 16 / np.sqrt(3) elif dim == 5: expo *= -296 / 3645 - 2 * np.log(3) / 27 elif dim == 6: expo *= -1376251 / 11197440 + 625 * np.pi / 20736 / np.sqrt(3) elif dim == 7: expo *= -1 / 140 + 3 * np.log(3) / 80 else: print(f"thinwall_log_det error: dimension {dim=}") return float("nan") return expo - log_prefactor if dim == 2: Deltas = np.array([0.07, 0.06, 0.05, 0.04, 0.03, 0.02, 0.015, 0.01]) elif dim == 3: Deltas = np.array([0.1, 0.09, 0.08, 0.07, 0.06, 0.05, 0.04, 0.03, 0.02]) elif dim == 4: Deltas = np.array( [0.1, 0.09, 0.08, 0.07, 0.06, 0.05, 0.04, 0.03, 0.02, 0.015] ) elif dim == 5: Deltas = np.array( [0.1, 0.09, 0.08, 0.07, 0.06, 0.05, 0.04, 0.03, 0.02, 0.015] ) elif dim == 6: Deltas = Deltas = np.array( [0.1, 0.09, 0.08, 0.07, 0.06, 0.05, 0.04, 0.03, 0.02, 0.015] ) elif dim == 7: Deltas = np.array([0.1, 0.09, 0.08, 0.07, 0.06, 0.05, 0.04]) else: raise ValueError(f"Error, results not known for {dim=}") diffS = [] diffDet = [] print( "%-8s %-16s %-16s %-16s %-16s %-16s" % ("Delta", "S1", "err(S1)", "S1*Delta^(d-1)", "diff(S1)/|S1|", "err(S1)/|S1|") ) for Delta in Deltas: # CosmoTransitions object ct_obj = SingleFieldInstanton( phi_true(Delta), phi_false(Delta), lambda phi: V(phi, Delta), lambda phi: dV(phi, Delta), d2V=lambda phi: ddV(phi, Delta), alpha=(dim - 1), ) # bounce calculation profile = ct_obj.findProfile( phitol=1e-12, xtol=1e-12, npoints=4000 ) # bounce action # creating bubble config instance bub_config = BubbleConfig.fromCosmoTransitions(ct_obj, profile) # creating particle instance higgs = ParticleConfig( W_Phi=lambda phi: ddV(phi, Delta), spin=0, dof_internal=1, zero_modes="Higgs", ) # creating bubble determinant instance bub_det = BubbleDeterminant(bub_config, [higgs]) # fluctuation determinant S1, S1_err = bub_det.findDeterminant() # difference to analytic thin-wall result diffS1 = abs(S1 - thinwall_log_det(Delta, dim)) diffDet.append(diffS1 / abs(S1)) print( "%-8.4g %-16.8g %-16.8g %-16.8g %-16.8g %-16.8g" % ( Delta, S1, S1_err, S1 * Delta ** (dim - 1), diffS1 / abs(S1), S1_err / abs(S1), ) ) DeltasS1 = Deltas ** 2 plt.plot(DeltasS1, diffDet, "o", fillstyle="none") plt.plot([0, DeltasS1[-1]], [0, diffDet[-1]]) plt.plot([0, DeltasS1[0]], [0, diffDet[0]]) plt.ylabel("$S_1$ relative difference") plt.xlabel(r"$\Delta^2$") plt.title(f"${dim=}$") plt.show()
PypiClean
/ImageScraper-2.0.7.tar.gz/ImageScraper-2.0.7/image_scraper/progressbar.py
from __future__ import division from __future__ import print_function from builtins import range from past.utils import old_div from builtins import object import sys import time from array import array try: from fcntl import ioctl import termios except ImportError: pass import signal class ProgressBarWidget(object): """This is an element of ProgressBar formatting. The ProgressBar object will call it's update value when an update is needed. It's size may change between call, but the results will not be good if the size changes drastically and repeatedly. """ def update(self, pbar): """Returns the string representing the widget. The parameter pbar is a reference to the calling ProgressBar, where one can access attributes of the class for knowing how the update must be made. At least this function must be overriden.""" pass class ProgressBarWidgetHFill(object): """This is a variable width element of ProgressBar formatting. The ProgressBar object will call it's update value, informing the width this object must the made. This is like TeX \\hfill, it will expand to fill the line. You can use more than one in the same line, and they will all have the same width, and together will fill the line. """ def update(self, pbar, width): """Returns the string representing the widget. The parameter pbar is a reference to the calling ProgressBar, where one can access attributes of the class for knowing how the update must be made. The parameter width is the total horizontal width the widget must have. At least this function must be overriden.""" pass class ETA(ProgressBarWidget): "Widget for the Estimated Time of Arrival" def format_time(self, seconds): return time.strftime('%H:%M:%S', time.gmtime(seconds)) def update(self, pbar): if pbar.currval == 0: return 'ETA: --:--:--' elif pbar.finished: return 'Time: %s' % self.format_time(pbar.seconds_elapsed) else: elapsed = pbar.seconds_elapsed eta = elapsed * pbar.maxval / pbar.currval - elapsed return 'ETA: %s' % self.format_time(eta) class FileTransferSpeed(ProgressBarWidget): "Widget for showing the transfer speed (useful for file transfers)." def __init__(self): self.fmt = '%6.2f %s' self.units = ['B', 'K', 'M', 'G', 'T', 'P'] def update(self, pbar): if pbar.seconds_elapsed < 2e-6: # == 0: bps = 0.0 else: bps = old_div(float(pbar.currval), pbar.seconds_elapsed) spd = bps for u in self.units: if spd < 1000: break spd /= 1000 return self.fmt % (spd, u + '/s') class RotatingMarker(ProgressBarWidget): "A rotating marker for filling the bar of progress." def __init__(self, markers='|/-\\'): self.markers = markers self.curmark = -1 def update(self, pbar): if pbar.finished: return self.markers[0] self.curmark = (self.curmark + 1) % len(self.markers) return self.markers[self.curmark] class Percentage(ProgressBarWidget): "Just the percentage done." def update(self, pbar): return '%3d%%' % pbar.percentage() class Bar(ProgressBarWidgetHFill): "The bar of progress. It will strech to fill the line." def __init__(self, marker='#', left='|', right='|'): self.marker = marker self.left = left self.right = right def _format_marker(self, pbar): if isinstance(self.marker, str): return self.marker else: return self.marker.update(pbar) def update(self, pbar, width): percent = pbar.percentage() cwidth = width - len(self.left) - len(self.right) marked_width = int(percent * cwidth / 100) m = self._format_marker(pbar) bar = (self.left + (m * marked_width).ljust(cwidth) + self.right) return bar class ReverseBar(Bar): "The reverse bar of progress, or bar of regress. :)" def update(self, pbar, width): percent = pbar.percentage() cwidth = width - len(self.left) - len(self.right) marked_width = int(percent * cwidth / 100) m = self._format_marker(pbar) bar = (self.left + (m * marked_width).rjust(cwidth) + self.right) return bar default_widgets = [Percentage(), ' ', Bar()] class ProgressBar(object): """This is the ProgressBar class, it updates and prints the bar. The term_width parameter may be an integer. Or None, in which case it will try to guess it, if it fails it will default to 80 columns. The simple use is like this: >>> pbar = ProgressBar().start() >>> for i in xrange(100): ... # do something ... pbar.update(i+1) ... >>> pbar.finish() But anything you want to do is possible (well, almost anything). You can supply different widgets of any type in any order. And you can even write your own widgets! There are many widgets already shipped and you should experiment with them. When implementing a widget update method you may access any attribute or function of the ProgressBar object calling the widget's update method. The most important attributes you would like to access are: - currval: current value of the progress, 0 <= currval <= maxval - maxval: maximum (and final) value of the progress - finished: True if the bar is have finished (reached 100%), False o/w - start_time: first time update() method of ProgressBar was called - seconds_elapsed: seconds elapsed since start_time - percentage(): percentage of the progress (this is a method) """ def __init__(self, maxval=100, widgets=default_widgets, term_width=None, fd=sys.stderr): assert maxval > 0 self.maxval = maxval self.widgets = widgets self.fd = fd self.signal_set = False if term_width is None: try: self.handle_resize(None, None) signal.signal(signal.SIGWINCH, self.handle_resize) self.signal_set = True except: self.term_width = 79 else: self.term_width = term_width self.currval = 0 self.finished = False self.prev_percentage = -1 self.start_time = None self.seconds_elapsed = 0 def handle_resize(self, signum, frame): h, w = array('h', ioctl(self.fd, termios.TIOCGWINSZ, '\0' * 8))[:2] self.term_width = w def percentage(self): "Returns the percentage of the progress." return self.currval * 100.0 / self.maxval def _format_widgets(self): r = [] hfill_inds = [] num_hfill = 0 currwidth = 0 for i, w in enumerate(self.widgets): if isinstance(w, ProgressBarWidgetHFill): r.append(w) hfill_inds.append(i) num_hfill += 1 elif isinstance(w, str): r.append(w) currwidth += len(w) else: weval = w.update(self) currwidth += len(weval) r.append(weval) for iw in hfill_inds: r[iw] = r[iw].update( self, old_div((self.term_width - currwidth), num_hfill)) return r def _format_line(self): return ''.join(self._format_widgets()).ljust(self.term_width) def _need_update(self): return int(self.percentage()) != int(self.prev_percentage) def update(self, value): "Updates the progress bar to a new value." if value <= 0.1: value = 0 assert 0 <= value <= self.maxval self.currval = value if not self._need_update() or self.finished: return if not self.start_time: self.start_time = time.time() self.seconds_elapsed = time.time() - self.start_time self.prev_percentage = self.percentage() if value != self.maxval: self.fd.write(self._format_line() + '\r') else: self.finished = True self.fd.write(self._format_line() + '\n') def start(self): """Start measuring time, and prints the bar at 0%. It returns self so you can use it like this: >>> pbar = ProgressBar().start() >>> for i in xrange(100): ... # do something ... pbar.update(i+1) ... >>> pbar.finish() """ self.update(0) return self def finish(self): """Used to tell the progress is finished.""" self.update(self.maxval) if self.signal_set: signal.signal(signal.SIGWINCH, signal.SIG_DFL) if __name__ == '__main__': import os def example1(): widgets = ['Test: ', Percentage(), ' ', Bar(marker=RotatingMarker()), ' ', ETA(), ' ', FileTransferSpeed()] pbar = ProgressBar(widgets=widgets, maxval=10000000).start() for i in range(1000000): # do something pbar.update(10 * i + 1) pbar.finish() print() def example2(): class CrazyFileTransferSpeed(FileTransferSpeed): "It's bigger between 45 and 80 percent" def update(self, pbar): if 45 < pbar.percentage() < 80: return 'Bigger Now ' + FileTransferSpeed.update(self, pbar) else: return FileTransferSpeed.update(self, pbar) widgets = [ CrazyFileTransferSpeed(), ' <<<', Bar(), '>>> ', Percentage(), ' ', ETA()] pbar = ProgressBar(widgets=widgets, maxval=10000000) # maybe do something pbar.start() for i in range(2000000): # do something pbar.update(5 * i + 1) pbar.finish() print() def example3(): widgets = [Bar('>'), ' ', ETA(), ' ', ReverseBar('<')] pbar = ProgressBar(widgets=widgets, maxval=10000000).start() for i in range(1000000): # do something pbar.update(10 * i + 1) pbar.finish() print() def example4(): widgets = ['Test: ', Percentage(), ' ', Bar(marker='0', left='[', right=']'), ' ', ETA(), ' ', FileTransferSpeed()] pbar = ProgressBar(widgets=widgets, maxval=500) pbar.start() for i in range(100, 500 + 1, 50): time.sleep(0.2) pbar.update(i) pbar.finish() print() example1() example2() example3() example4()
PypiClean
/DUI-1.1.0.tar.gz/DUI-1.1.0/README.md
# DUI ## 简介 它用print及一些基础库(sys,os等)制作,当你使用时不需要考虑太多兼容性问题,理论上能运行python解释器的地方就能使用,实际上我们经常会使用很多奇怪的终端界面(这个问题很难解决) 但是你仍然可以用很少的代码获得一个比较美观的界面。 此库尚未完善,只写了一点内容,以后会向更多功能进行制作。有兴趣的同学可联系我邮箱:[email protected] DUI库可以通过 ```shell pip install DUI ``` 来实现安装库,如果要检查自己的DUI是否正常及其版本号,可以使用 ```python import DUI DUI.showTestWindow() ``` 来获取信息 。如果一切正常则会出现 ```shell +-主界面-------------------X-+ |DUI库的测试窗口 | | | | 版本:V0.1.0 | | | | 作者:Lettle | | | |一起学习?加作者QQ:1071445082| | | | ---By Lettle| |>|测试按钮1 | |测试按钮2 | |测试按钮3 | | | | | | | | | | | | | +----------------------------+ -> ``` 这样的窗口(在Windows cmd中应该是对齐的) 更好的例子在项目中的guess.py中,直接运行 ``` python guess.py ``` 就可以体验DUI多窗口的乐趣了! ## 快速开始 首先,准备好`python, pip`环境 通过如下命令安装DUI库 ``` pip install DUI ``` 下面演示一个基本`DUI`界面的例子 ``` from DUI import * f = Frame("w") # 参数"w"代表Windows格式,详细信息请查阅文档 mainWindow = Window("主界面") #参数为窗口标题,将写在上边框左侧 mainWindow.addWidget(2, Text("DUI库的测试窗口", 0)) f.addWindow(mainWindow, 0) f.showWindow(0) ``` 此时一个简单的带一行文字窗口创建成功! ## DUI文档 ### Frame Frame把Widget, Window, Listener等集成在一起进行操作 Frame是界面的框架,`DUI`的一切动作将通过Frame来实现,比如`showWindow` #### Frame() |参数|类型|说明|默认值| |:-:|:-:|:-:|:-:| |system|`str`| 点击位置x坐标 |Windows| |showFPS|`boolean`| 是否显示帧数 |False| |noClean|`boolean`| 不进行清屏 |False| 获取一个Frame对象: ```python from DUI import * frame = Frame() ``` #### frame.addWindow() > 添加窗口 | 参数 | 类型 | 说明 | 默认值 | | :----: | :----: | :----------------------------------: | :----: | | window | Window | 要传入的窗口 | | | index | `int` | 要传入的窗口的索引值**(按顺序加入)** | | #### frame.showWindow() > 显示一个已经加载到Frame的窗口 | 参数 | 类型 | 说明 | 默认值 | | :---: | :---: | :------------------: | :----: | | index | `int` | 要显示的窗口的索引值 | | #### frame.updateWindow() > 刷新一个窗口 | 参数 | 类型 | 说明 | 默认值 | | :----: | :----: | :------------------------------------: | :----: | | window | Window | 要刷新的窗口 | | | index | `int` | 要刷新的窗口的索引值**(按原位置填写)** | | #### frame.delWindow() > 删除一个窗口 | 参数 | 类型 | 说明 | 默认值 | | :---: | :---: | :------------------: | :----: | | index | `int` | 要删除的窗口的索引值 | | #### frame.setListener() > 传入监听器 | 参数 | 类型 | 说明 | 默认值 | | :------: | :--------: | :------------------: | :----: | | listener | `Listener` | 要传入的Listener对象 | | #### frame.setSkin() > 设置默认皮肤 (不建议使用此功能,可能引起无法对齐等问题) | 参数 | 类型 | 说明 | 默认值 | | :------: | :----: | :------------------: | :----: | | skinList | `List` | 要传入的窗口皮肤List | | ### Window Window是一个DUI界面最重要部分,它由开发者自行设计。一个DUI界面可以有多个Window且可以灵活地互相切换,由用户操作按钮来操控。 Window实际上可以单独使用,所有方法都是可以直接调用执行的,即脱离Frame单独显示 #### Window() > 获取一个窗口对象 | 参数 | 类型 | 说明 | 默认值 | | :----: | :----: | :--------------------------: | :-----------------: | | title | `str` | 窗口上边框左侧显示的窗口名称 | | | width | `int` | 窗口宽度 | 30 | | height | `int` | 窗口高度 | 20 | | system | `int` | 0为Windows 1为Linux | 0 | | skin | `List` | 窗口的皮肤 | defaultSkin4Windows | #### window.addWidget() > 添加一个控件 | 参数 | 类型 | 说明 | 默认值 | | :----: | :------: | :--------------: | :----: | | line | `int` | 在窗口第几行 | | | widget | `Widget` | 要添加的控件对象 | | #### window.updateWidget() > 更新一个控件 | 参数 | 类型 | 说明 | 默认值 | | :----: | :------: | :----------------: | :----: | | line | `int` | 在窗口第几行 | | | widget | `Widget` | 要更新的新控件对象 | | #### window.showWindow() > 显示窗口**(可以但不建议直接使用)** ### Listener Listener是一个DUI界面用户操作部分监听工具,按钮、输入框等控件由Listener控制,最后返回给Frame。 #### Listener() | 参数 | 类型 | 说明 | 默认值 | | :--: | :----: | :-----------------------------: | :----: | | dict | `Dict` | 监听的关键字表 | {} | | mode | `int` | 监听器的类型:0为python内置input | 0 | dict例子: ```python dict = \ { "quit": quit, "w": mainWindow.up, "s": mainWindow.down, "y": listen.confirm } ``` 在dict中,dict为字典类型,key为`监听的关键字`,value为`运行的函数` #### listener.setDict() > 设置Listener的关键字表 | 参数 | 类型 | 说明 | 默认值 | | :--: | :----: | :------------: | :----: | | dict | `Dict` | 监听的关键字表 | | #### listener.getText() > 即原生的input,默认以 -> 标记输入 #### listener.run() > 运行监听器(可以与while True一起使用) ### Widget ​ Widget为Window服务,可以调用Listener和Alert,最后组成一个开发者需要的组件。Widget为开发者提供基础的对象,开发者可以通过Widget对象扩展新的控件来充实自己的DUI界面。 ​ DUI内置一个Text控件,继承自Widget类,开发者可以根据Text控件的写法来自行编写新的插件。 #### Widget > 基本的控件父类,所有控件继承自Widget,所有属性有get/set方法 | 属性 | 类型 | 说明 | 默认值 | | :---------: | :---: | :----------: | :----: | | widget_type | `str` | 标记控件类型 | | | id | `str` | 控件id | None | #### Alert(Widget) > 提示框控件,继承自Widget | 参数 | 类型 | 说明 | 默认值 | | :----------: | :---: | :----------: | :----: | | text | `str` | 要显示的内容 | “” | | window_width | `int` | 窗口长度 | None | #### Text(Widget) > 文字控件,继承自Widget | 参数 | 类型 | 说明 | 默认值 | | :--: | :---: | :-----------------------------: | :----: | | text | `str` | 要显示的内容 | “” | | type | `int` | 显示方式: 0左对齐 1居中 2右对齐 | 0 | | id | str | id | None | ##### Text.setText() | 参数 | 类型 | 说明 | 默认值 | | :--: | :---: | :-----------------------------: | :----: | | text | `str` | 要显示的内容 | | | type | `int` | 显示方式: 0左对齐 1居中 2右对齐 | 0 | ##### Text.setType() | 参数 | 类型 | 说明 | 默认值 | | :--: | :---: | :-----------------------------: | :----: | | type | `int` | 显示方式: 0左对齐 1居中 2右对齐 | | #### Button(Text) > 按钮控件,继承自Text | 参数 | 类型 | 说明 | 默认值 | | :-----: | :--------: | :----------: | :----: | | text | `str` | 要显示的内容 | “” | | onClick | `function` | 按钮点击事件 | None | | id | str | id | None | #### # 使用案例 项目根目录有两个python文件: guess.py和ball.py guess.py 猜数游戏,展示了按钮、提示框等功能 ball.py 弹球演示,展示了DUI目前如何绘制连续的动画(后期会有变化)
PypiClean
/Flask_IPInfo-0.0.5-py3-none-any.whl/flask_ipinfo.py
import re import urllib import json from flask import request __version__ = '0.0.5' __author__ = "borisliu" class IPInfo(object): """IP地址类.""" @property def browser(self): """获得访客浏览器类型.""" ua = request.headers.get('User-Agent') if (ua): browser = ua if (re.compile(r'MicroMessenger', re.I).search(ua)): browser = 'WeChat' elif (re.compile(r'MinxingMessenger', re.I).search(ua)): browser = 'MinxingMessenger' elif (re.compile(r'QQ', re.I).search(ua)): browser = 'TencentQQ' elif (re.compile(r'MSIE', re.I).search(ua) or re.compile(r'rv:([^\)]+)\) like Gecko', re.I).search(ua)): browser = 'MSIE' elif (re.compile(r'Firefox', re.I).search(ua)): browser = 'Firefox' elif (re.compile(r'Chrome', re.I).search(ua)): browser = 'Chrome' elif (re.compile(r'Safari', re.I).search(ua)): browser = 'Safari' elif (re.compile(r'Opera', re.I).search(ua)): browser = 'Opera' return(browser) else: return('Unknown') @property def lang(self): """获得访客浏览器语言.""" lang = request.headers.get('Accept-Language') if (lang): if (re.compile(r'zh-cn', re.I).search(lang)): lang = '简体中文' elif (re.compile(r'zh', re.I).search(lang)): lang = '繁体中文' else: lang = 'English' return(lang) else: return('Unknown') @property def os(self): """获得访客操作系统.""" ua = request.headers.get('User-Agent') if (ua): if (re.compile(r'win', re.I).search(ua)): os = 'Windows' elif (re.compile(r'iphone', re.I).search(ua)): os = 'iPhone' elif (re.compile(r'mac', re.I).search(ua)): os = 'MAC' elif (re.compile(r'android', re.I).search(ua)): os = 'Android' elif (re.compile(r'linux', re.I).search(ua)): os = 'Linux' elif (re.compile(r'unix', re.I).search(ua)): os = 'Unix' elif (re.compile(r'bsd', re.I).search(ua)): os = 'BSD' else: os = 'Other' return(os) else: return('Unknown') @property def ipaddress(self): """获得IP地址.""" ip = request.remote_addr if (ip): return(ip) else: return('Unknown')
PypiClean
/GTW-1.2.6.tar.gz/GTW-1.2.6/__test__/MF3.py
from __future__ import division, print_function from __future__ import absolute_import, unicode_literals from _GTW import GTW from _MOM.import_MOM import Q import _GTW._OMP._PAP.import_PAP GTW.OMP.PAP.Phone.change_attribute_default ("cc", "+43") from _GTW.__test__.model import * from _GTW._MF3 import Element as MF3_E from _TFL._Meta.Single_Dispatch import Single_Dispatch from _TFL.Regexp import Multi_Re_Replacer, Re_Replacer, re import _GTW._OMP._SRM.Ranking _cleaner = Re_Replacer \ ( r"'\$sid' : '[0-9a-f]+'" , r"'$sid' : <sid value>" ) def show_elements (f, attr) : getter = getattr (Q, attr) for e in f.elements_transitive () : try : v = getter (e) except AttributeError : v = "---" if isinstance (v, TFL.Undef) : v = "---" print (("%s %s" % (e, v)).strip ()) # end def show_elements @Single_Dispatch (T = MF3_E._Base_) def elem_x (x, getters, default = "---") : nick = "_".join \ (p [0] for p in x.__class__.__name__.strip ("_").split ("_")) result = ["%-2s " % (nick, )] for getter in getters : try : tail = getter (x) except AttributeError : tail = default if isinstance (tail, TFL.Undef) : tail = default if isinstance (tail, dict) : tail = ", ".join \ ("%s = %s" % (k, v) for k, v in sorted (pyk.iteritems (tail))) if isinstance (tail, (dict, list, tuple)) : tail = portable_repr (tail) result.append (str (tail)) return result # end def elem_x def show_elements_x (f, * attrs, ** kw) : def _format (lens, elems) : return " ".join \ (("%-*s" % (l, e)) for l, e in zip (lens, elems)).rstrip () default = kw.get ("---") filter = kw.get ("filter", lambda x : True) getters = tuple (getattr (Q, attr) for attr in attrs) elems = tuple \ ( elem_x (e, getters, default = default) for e in f.elements_transitive () if filter (e) ) lens = list ((max (len (e) for e in es)) for es in zip (* elems)) print (_format (lens, ("Type", ) + attrs)) print ("=" * (sum (lens) + 2 * len (lens))) for es in elems : print (_format (lens, es)) # end def show_elements_x def show_completers (f, * attrs, ** kw) : kw.setdefault ("filter", Q.completer != None) return show_elements_x (f, * attrs, ** kw) # end def show_completers def show_completers_js (f) : show_formatted (f.as_json_cargo ["completers"]) # end def show_completers_js def show_field_values (f) : show_formatted (f.as_json_cargo ["cargo"] ["field_values"]) # end def show_field_values def show_formatted (x) : result = _cleaner (formatted (x)) print (result) # end def show_formatted _test_element = """ >>> scope = Scaffold.scope (%(p1)s, %(n1)s) # doctest:+ELLIPSIS Creating new scope MOMT__... >>> scope.db_meta_data.dbid = '2d802327-5c99-49ca-9af7-2ddc6b4c648b' >>> PAP = scope.PAP >>> p = PAP.Person ("Tanzer", "Christian", lifetime = ("19590926", ), raw=True) >>> ph = PAP.Phone ("43", "1", "98765432", raw=True) >>> pph = PAP.Person_has_Phone (p, ph, desc = "example", extension = "42", raw=True) >>> p_attr_spec= { "lifetime.start" : dict (init = "2000-07-23"), "sex" : dict (init = "M")} >>> F_Person = MF3_E.Entity.Auto (scope.PAP.Person, id_prefix = "X") >>> F_Person_s = MF3_E.Entity.Auto (scope.PAP.Person, id_prefix = "Y", attr_spec = { "lifetime.finish" : dict (skip = True)}) >>> F_Person_z = MF3_E.Entity.Auto (scope.PAP.Person, id_prefix = "Z", include_rev_refs = ("phones", )) >>> f_Person = F_Person (scope) >>> f_Person_s = F_Person_s (scope, attr_spec = p_attr_spec) >>> f_Person_z = F_Person_z (scope) >>> f_p = F_Person (scope, p) >>> f_p_s = F_Person_s (scope, p, attr_spec = p_attr_spec) >>> f_p_z = F_Person_z (scope, p) >>> f_p_z2 = F_Person_z (scope, p) >>> _ = f_p_z2 ["Z-26:phones"].add () >>> _ = f_p_z2 ["Z-26:phones"].add () >>> F_Person <class Entity X-26> >>> F_Person ["X-26:lifetime.start"] <class Field X-26:lifetime.start> >>> F_Person ["lifetime.finish"] <class Field X-26:lifetime.finish> >>> f_Person ["X-26:lifetime.start"] <Field X-26:lifetime.start> >>> f_Person ["lifetime.finish"] <Field X-26:lifetime.finish> >>> show_elements (f_Person, "cooked") <Entity X-26> --- <Field X-26:last_name> <Field X-26:first_name> <Field X-26:middle_name> <Field X-26:title> <Field_Composite X-26:lifetime> () <Field X-26:lifetime.start> None <Field X-26:lifetime.finish> None <Field X-26:sex> None >>> show_elements (f_p, "cooked") <Entity X-26> --- <Field X-26:last_name> tanzer <Field X-26:first_name> christian <Field X-26:middle_name> <Field X-26:title> <Field_Composite X-26:lifetime> ('1959-09-26', ) <Field X-26:lifetime.start> 1959-09-26 <Field X-26:lifetime.finish> None <Field X-26:sex> None >>> show_elements (f_p, "q_name") <Entity X-26> None <Field X-26:last_name> last_name <Field X-26:first_name> first_name <Field X-26:middle_name> middle_name <Field X-26:title> title <Field_Composite X-26:lifetime> lifetime <Field X-26:lifetime.start> lifetime.start <Field X-26:lifetime.finish> lifetime.finish <Field X-26:sex> sex >>> for e in f_p.field_elements : ... print (e) <Field X-26:last_name> <Field X-26:first_name> <Field X-26:middle_name> <Field X-26:title> <Field X-26:lifetime.start> <Field X-26:lifetime.finish> <Field X-26:sex> >>> show_elements (f_Person, "edit") <Entity X-26> --- <Field X-26:last_name> <Field X-26:first_name> <Field X-26:middle_name> <Field X-26:title> <Field_Composite X-26:lifetime> <Field X-26:lifetime.start> <Field X-26:lifetime.finish> <Field X-26:sex> >>> show_elements (f_Person_s, "edit") <Entity Y-26> --- <Field Y-26:last_name> <Field Y-26:first_name> <Field Y-26:middle_name> <Field Y-26:title> <Field_Composite Y-26:lifetime> <Field Y-26:lifetime.start> 2000-07-23 <Field Y-26:sex> M >>> show_elements (f_p, "edit") <Entity X-26> --- <Field X-26:last_name> Tanzer <Field X-26:first_name> Christian <Field X-26:middle_name> <Field X-26:title> <Field_Composite X-26:lifetime> <Field X-26:lifetime.start> 1959-09-26 <Field X-26:lifetime.finish> <Field X-26:sex> >>> show_elements (f_p_s, "edit") <Entity Y-26> --- <Field Y-26:last_name> Tanzer <Field Y-26:first_name> Christian <Field Y-26:middle_name> <Field Y-26:title> <Field_Composite Y-26:lifetime> <Field Y-26:lifetime.start> 2000-07-23 <Field Y-26:sex> M >>> show_elements (f_p, "ui_display") <Entity X-26> Tanzer Christian <Field X-26:last_name> Tanzer <Field X-26:first_name> Christian <Field X-26:middle_name> <Field X-26:title> <Field_Composite X-26:lifetime> 1959-09-26 <Field X-26:lifetime.start> 1959-09-26 <Field X-26:lifetime.finish> <Field X-26:sex> >>> show_elements (f_p, "essence") <Entity X-26> ('tanzer', 'christian', '', '') <Field X-26:last_name> ('tanzer', 'christian', '', '') <Field X-26:first_name> ('tanzer', 'christian', '', '') <Field X-26:middle_name> ('tanzer', 'christian', '', '') <Field X-26:title> ('tanzer', 'christian', '', '') <Field_Composite X-26:lifetime> ('1959-09-26', ) <Field X-26:lifetime.start> ('1959-09-26', ) <Field X-26:lifetime.finish> ('1959-09-26', ) <Field X-26:sex> ('tanzer', 'christian', '', '') >>> show_elements (f_p, "Entity.essence") <Entity X-26> ('tanzer', 'christian', '', '') <Field X-26:last_name> ('tanzer', 'christian', '', '') <Field X-26:first_name> ('tanzer', 'christian', '', '') <Field X-26:middle_name> ('tanzer', 'christian', '', '') <Field X-26:title> ('tanzer', 'christian', '', '') <Field_Composite X-26:lifetime> ('tanzer', 'christian', '', '') <Field X-26:lifetime.start> ('tanzer', 'christian', '', '') <Field X-26:lifetime.finish> ('tanzer', 'christian', '', '') <Field X-26:sex> ('tanzer', 'christian', '', '') >>> show_elements (f_Person, "root") <Entity X-26> <Entity X-26> <Field X-26:last_name> <Entity X-26> <Field X-26:first_name> <Entity X-26> <Field X-26:middle_name> <Entity X-26> <Field X-26:title> <Entity X-26> <Field_Composite X-26:lifetime> <Entity X-26> <Field X-26:lifetime.start> <Entity X-26> <Field X-26:lifetime.finish> <Entity X-26> <Field X-26:sex> <Entity X-26> >>> show_elements (f_Person, "Entity") <Entity X-26> <Entity X-26> <Field X-26:last_name> <Entity X-26> <Field X-26:first_name> <Entity X-26> <Field X-26:middle_name> <Entity X-26> <Field X-26:title> <Entity X-26> <Field_Composite X-26:lifetime> <Entity X-26> <Field X-26:lifetime.start> <Entity X-26> <Field X-26:lifetime.finish> <Entity X-26> <Field X-26:sex> <Entity X-26> >>> show_elements (f_Person_z, "Entity") <Entity Z-26> <Entity Z-26> <Field Z-26:last_name> <Entity Z-26> <Field Z-26:first_name> <Entity Z-26> <Field Z-26:middle_name> <Entity Z-26> <Field Z-26:title> <Entity Z-26> <Field_Composite Z-26:lifetime> <Entity Z-26> <Field Z-26:lifetime.start> <Entity Z-26> <Field Z-26:lifetime.finish> <Entity Z-26> <Field Z-26:sex> <Entity Z-26> <Field_Rev_Ref Z-26:phones> <Entity Z-26> >>> show_elements (f_Person, "template_macro") <Entity X-26> Entity_Form <Field X-26:last_name> Field <Field X-26:first_name> Field <Field X-26:middle_name> Field <Field X-26:title> Field <Field_Composite X-26:lifetime> Field_Composite <Field X-26:lifetime.start> Field <Field X-26:lifetime.finish> Field <Field X-26:sex> Field >>> show_elements (f_Person, "template_module") <Entity X-26> mf3 <Field X-26:last_name> None <Field X-26:first_name> None <Field X-26:middle_name> None <Field X-26:title> None <Field_Composite X-26:lifetime> mf3_h_cols <Field X-26:lifetime.start> None <Field X-26:lifetime.finish> None <Field X-26:sex> None >>> show_elements (f_Person_z ["phones"].proto, "parent") <class Entity_Rev_Ref Z-26:phones> <class Field_Rev_Ref Z-26:phones> <class Field_Entity Z-26:phones::right> <class Entity_Rev_Ref Z-26:phones> <class Field Z-26:phones::right.cc> <class Field_Entity Z-26:phones::right> <class Field Z-26:phones::right.ndc> <class Field_Entity Z-26:phones::right> <class Field Z-26:phones::right.sn> <class Field_Entity Z-26:phones::right> <class Field Z-26:phones::extension> <class Entity_Rev_Ref Z-26:phones> <class Field Z-26:phones::desc> <class Entity_Rev_Ref Z-26:phones> <class Field_Ref_Hidden Z-26:phones::left> <class Entity_Rev_Ref Z-26:phones> <class Field Z-26:phones::left.last_name> <class Field_Ref_Hidden Z-26:phones::left> <class Field Z-26:phones::left.first_name> <class Field_Ref_Hidden Z-26:phones::left> <class Field Z-26:phones::left.middle_name> <class Field_Ref_Hidden Z-26:phones::left> <class Field Z-26:phones::left.title> <class Field_Ref_Hidden Z-26:phones::left> >>> show_elements (f_p_z, "Entity") <Entity Z-26> <Entity Z-26> <Field Z-26:last_name> <Entity Z-26> <Field Z-26:first_name> <Entity Z-26> <Field Z-26:middle_name> <Entity Z-26> <Field Z-26:title> <Entity Z-26> <Field_Composite Z-26:lifetime> <Entity Z-26> <Field Z-26:lifetime.start> <Entity Z-26> <Field Z-26:lifetime.finish> <Entity Z-26> <Field Z-26:sex> <Entity Z-26> <Field_Rev_Ref Z-26:phones> <Entity Z-26> <Entity_Rev_Ref Z-26:phones@3> <Entity_Rev_Ref Z-26:phones@3> <Field_Entity Z-26:phones::right@3> <Entity_Rev_Ref Z-26:phones@3> <Field Z-26:phones::right.cc@3> <Field_Entity Z-26:phones::right@3> <Field Z-26:phones::right.ndc@3> <Field_Entity Z-26:phones::right@3> <Field Z-26:phones::right.sn@3> <Field_Entity Z-26:phones::right@3> <Field Z-26:phones::extension@3> <Entity_Rev_Ref Z-26:phones@3> <Field Z-26:phones::desc@3> <Entity_Rev_Ref Z-26:phones@3> <Field_Ref_Hidden Z-26:phones::left@3> <Entity_Rev_Ref Z-26:phones@3> >>> show_elements (f_p_z, "essence") <Entity Z-26> ('tanzer', 'christian', '', '') <Field Z-26:last_name> ('tanzer', 'christian', '', '') <Field Z-26:first_name> ('tanzer', 'christian', '', '') <Field Z-26:middle_name> ('tanzer', 'christian', '', '') <Field Z-26:title> ('tanzer', 'christian', '', '') <Field_Composite Z-26:lifetime> ('1959-09-26', ) <Field Z-26:lifetime.start> ('1959-09-26', ) <Field Z-26:lifetime.finish> ('1959-09-26', ) <Field Z-26:sex> ('tanzer', 'christian', '', '') <Field_Rev_Ref Z-26:phones> ('tanzer', 'christian', '', '') <Entity_Rev_Ref Z-26:phones@3> (('tanzer', 'christian', '', ''), ('43', '1', '98765432'), '42') <Field_Entity Z-26:phones::right@3> ('43', '1', '98765432') <Field Z-26:phones::right.cc@3> ('43', '1', '98765432') <Field Z-26:phones::right.ndc@3> ('43', '1', '98765432') <Field Z-26:phones::right.sn@3> ('43', '1', '98765432') <Field Z-26:phones::extension@3> (('tanzer', 'christian', '', ''), ('43', '1', '98765432'), '42') <Field Z-26:phones::desc@3> (('tanzer', 'christian', '', ''), ('43', '1', '98765432'), '42') <Field_Ref_Hidden Z-26:phones::left@3> ('tanzer', 'christian', '', '') >>> show_elements (f_p_z, "label") <Entity Z-26> Person <Field Z-26:last_name> Last name <Field Z-26:first_name> First name <Field Z-26:middle_name> Middle name <Field Z-26:title> Academic title <Field_Composite Z-26:lifetime> Lifetime <Field Z-26:lifetime.start> Start <Field Z-26:lifetime.finish> Finish <Field Z-26:sex> Sex <Field_Rev_Ref Z-26:phones> Phones <Entity_Rev_Ref Z-26:phones@3> Person has Phone <Field_Entity Z-26:phones::right@3> Phone <Field Z-26:phones::right.cc@3> Country code <Field Z-26:phones::right.ndc@3> Network destination code <Field Z-26:phones::right.sn@3> Subscriber number <Field Z-26:phones::extension@3> Extension <Field Z-26:phones::desc@3> Description <Field_Ref_Hidden Z-26:phones::left@3> Person >>> show_elements (f_p_z, "_po_index") <Entity Z-26> None <Field Z-26:last_name> None <Field Z-26:first_name> None <Field Z-26:middle_name> None <Field Z-26:title> None <Field_Composite Z-26:lifetime> None <Field Z-26:lifetime.start> None <Field Z-26:lifetime.finish> None <Field Z-26:sex> None <Field_Rev_Ref Z-26:phones> None <Entity_Rev_Ref Z-26:phones@3> None <Field_Entity Z-26:phones::right@3> None <Field Z-26:phones::right.cc@3> None <Field Z-26:phones::right.ndc@3> None <Field Z-26:phones::right.sn@3> None <Field Z-26:phones::extension@3> None <Field Z-26:phones::desc@3> None <Field_Ref_Hidden Z-26:phones::left@3> None >>> show_elements (f_p_z, "po_index") <Entity Z-26> 0 <Field Z-26:last_name> 1 <Field Z-26:first_name> 2 <Field Z-26:middle_name> 3 <Field Z-26:title> 4 <Field_Composite Z-26:lifetime> 5 <Field Z-26:lifetime.start> 6 <Field Z-26:lifetime.finish> 7 <Field Z-26:sex> 8 <Field_Rev_Ref Z-26:phones> 9 <Entity_Rev_Ref Z-26:phones@3> 10 <Field_Entity Z-26:phones::right@3> 11 <Field Z-26:phones::right.cc@3> 12 <Field Z-26:phones::right.ndc@3> 13 <Field Z-26:phones::right.sn@3> 14 <Field Z-26:phones::extension@3> 15 <Field Z-26:phones::desc@3> 16 <Field_Ref_Hidden Z-26:phones::left@3> 17 >>> show_elements (f_p_z, "_po_index") <Entity Z-26> 0 <Field Z-26:last_name> 1 <Field Z-26:first_name> 2 <Field Z-26:middle_name> 3 <Field Z-26:title> 4 <Field_Composite Z-26:lifetime> 5 <Field Z-26:lifetime.start> 6 <Field Z-26:lifetime.finish> 7 <Field Z-26:sex> 8 <Field_Rev_Ref Z-26:phones> 9 <Entity_Rev_Ref Z-26:phones@3> 10 <Field_Entity Z-26:phones::right@3> 11 <Field Z-26:phones::right.cc@3> 12 <Field Z-26:phones::right.ndc@3> 13 <Field Z-26:phones::right.sn@3> 14 <Field Z-26:phones::extension@3> 15 <Field Z-26:phones::desc@3> 16 <Field_Ref_Hidden Z-26:phones::left@3> 17 >>> f_p_z.reset_once_properties () >>> print (f_p_z, f_p_z._po_index) <Entity Z-26> None >>> f_p_z ["Z-26:phones"] <Field_Rev_Ref Z-26:phones> >>> f_p_z ["Z-26:phones@3"] <Entity_Rev_Ref Z-26:phones@3> >>> show_elements (f_p_z, "id") <Entity Z-26> Z-26 <Field Z-26:last_name> Z-26:last_name <Field Z-26:first_name> Z-26:first_name <Field Z-26:middle_name> Z-26:middle_name <Field Z-26:title> Z-26:title <Field_Composite Z-26:lifetime> Z-26:lifetime <Field Z-26:lifetime.start> Z-26:lifetime.start <Field Z-26:lifetime.finish> Z-26:lifetime.finish <Field Z-26:sex> Z-26:sex <Field_Rev_Ref Z-26:phones> Z-26:phones <Entity_Rev_Ref Z-26:phones@3> Z-26:phones@3 <Field_Entity Z-26:phones::right@3> Z-26:phones::right@3 <Field Z-26:phones::right.cc@3> Z-26:phones::right.cc@3 <Field Z-26:phones::right.ndc@3> Z-26:phones::right.ndc@3 <Field Z-26:phones::right.sn@3> Z-26:phones::right.sn@3 <Field Z-26:phones::extension@3> Z-26:phones::extension@3 <Field Z-26:phones::desc@3> Z-26:phones::desc@3 <Field_Ref_Hidden Z-26:phones::left@3> Z-26:phones::left@3 >>> show_elements (f_p_z, "index") <Entity Z-26> <Field Z-26:last_name> <Field Z-26:first_name> <Field Z-26:middle_name> <Field Z-26:title> <Field_Composite Z-26:lifetime> <Field Z-26:lifetime.start> <Field Z-26:lifetime.finish> <Field Z-26:sex> <Field_Rev_Ref Z-26:phones> <Entity_Rev_Ref Z-26:phones@3> @3 <Field_Entity Z-26:phones::right@3> @3 <Field Z-26:phones::right.cc@3> @3 <Field Z-26:phones::right.ndc@3> @3 <Field Z-26:phones::right.sn@3> @3 <Field Z-26:phones::extension@3> @3 <Field Z-26:phones::desc@3> @3 <Field_Ref_Hidden Z-26:phones::left@3> @3 >>> show_elements (f_p_z, "parent") <Entity Z-26> None <Field Z-26:last_name> <Entity Z-26> <Field Z-26:first_name> <Entity Z-26> <Field Z-26:middle_name> <Entity Z-26> <Field Z-26:title> <Entity Z-26> <Field_Composite Z-26:lifetime> <Entity Z-26> <Field Z-26:lifetime.start> <Field_Composite Z-26:lifetime> <Field Z-26:lifetime.finish> <Field_Composite Z-26:lifetime> <Field Z-26:sex> <Entity Z-26> <Field_Rev_Ref Z-26:phones> <Entity Z-26> <Entity_Rev_Ref Z-26:phones@3> <Field_Rev_Ref Z-26:phones> <Field_Entity Z-26:phones::right@3> <Entity_Rev_Ref Z-26:phones@3> <Field Z-26:phones::right.cc@3> <Field_Entity Z-26:phones::right@3> <Field Z-26:phones::right.ndc@3> <Field_Entity Z-26:phones::right@3> <Field Z-26:phones::right.sn@3> <Field_Entity Z-26:phones::right@3> <Field Z-26:phones::extension@3> <Entity_Rev_Ref Z-26:phones@3> <Field Z-26:phones::desc@3> <Entity_Rev_Ref Z-26:phones@3> <Field_Ref_Hidden Z-26:phones::left@3> <Entity_Rev_Ref Z-26:phones@3> >>> show_elements (f_p_z, "q_name") <Entity Z-26> None <Field Z-26:last_name> last_name <Field Z-26:first_name> first_name <Field Z-26:middle_name> middle_name <Field Z-26:title> title <Field_Composite Z-26:lifetime> lifetime <Field Z-26:lifetime.start> lifetime.start <Field Z-26:lifetime.finish> lifetime.finish <Field Z-26:sex> sex <Field_Rev_Ref Z-26:phones> phones <Entity_Rev_Ref Z-26:phones@3> phones <Field_Entity Z-26:phones::right@3> phones.right <Field Z-26:phones::right.cc@3> phones.right.cc <Field Z-26:phones::right.ndc@3> phones.right.ndc <Field Z-26:phones::right.sn@3> phones.right.sn <Field Z-26:phones::extension@3> phones.extension <Field Z-26:phones::desc@3> phones.desc <Field_Ref_Hidden Z-26:phones::left@3> phones.left >>> show_elements (f_p_z, "r_name") <Entity Z-26> --- <Field Z-26:last_name> last_name <Field Z-26:first_name> first_name <Field Z-26:middle_name> middle_name <Field Z-26:title> title <Field_Composite Z-26:lifetime> lifetime <Field Z-26:lifetime.start> lifetime.start <Field Z-26:lifetime.finish> lifetime.finish <Field Z-26:sex> sex <Field_Rev_Ref Z-26:phones> phones <Entity_Rev_Ref Z-26:phones@3> --- <Field_Entity Z-26:phones::right@3> right <Field Z-26:phones::right.cc@3> cc <Field Z-26:phones::right.ndc@3> ndc <Field Z-26:phones::right.sn@3> sn <Field Z-26:phones::extension@3> extension <Field Z-26:phones::desc@3> desc <Field_Ref_Hidden Z-26:phones::left@3> left >>> for e in f_p.entity_elements : ... print (e, portable_repr (sorted (getattr (e, "_Element_Map", [])))) <Entity X-26> ['X-26:first_name', 'X-26:last_name', 'X-26:lifetime', 'X-26:lifetime.finish', 'X-26:lifetime.start', 'X-26:middle_name', 'X-26:sex', 'X-26:title', 'first_name', 'last_name', 'lifetime', 'lifetime.finish', 'lifetime.start', 'middle_name', 'sex', 'title'] >>> for e in f_p_z.entity_elements : ... print (e, portable_repr (sorted (getattr (e, "_Element_Map", [])))) <Entity Z-26> ['Z-26:first_name', 'Z-26:last_name', 'Z-26:lifetime', 'Z-26:lifetime.finish', 'Z-26:lifetime.start', 'Z-26:middle_name', 'Z-26:phones', 'Z-26:phones::desc@3', 'Z-26:phones::extension@3', 'Z-26:phones::left.first_name@3', 'Z-26:phones::left.last_name@3', 'Z-26:phones::left.middle_name@3', 'Z-26:phones::left.title@3', 'Z-26:phones::left@3', 'Z-26:phones::right.cc@3', 'Z-26:phones::right.ndc@3', 'Z-26:phones::right.sn@3', 'Z-26:phones::right@3', 'Z-26:phones@3', 'Z-26:sex', 'Z-26:title', 'first_name', 'last_name', 'lifetime', 'lifetime.finish', 'lifetime.start', 'middle_name', 'phones', 'phones.desc', 'phones.extension', 'phones.left', 'phones.left.first_name', 'phones.left.last_name', 'phones.left.middle_name', 'phones.left.title', 'phones.right', 'phones.right.cc', 'phones.right.ndc', 'phones.right.sn', 'sex', 'title'] <Entity_Rev_Ref Z-26:phones@3> ['Z-26:phones::desc@3', 'Z-26:phones::extension@3', 'Z-26:phones::left.first_name@3', 'Z-26:phones::left.last_name@3', 'Z-26:phones::left.middle_name@3', 'Z-26:phones::left.title@3', 'Z-26:phones::left@3', 'Z-26:phones::right.cc@3', 'Z-26:phones::right.ndc@3', 'Z-26:phones::right.sn@3', 'Z-26:phones::right@3', 'desc', 'extension', 'left', 'left.first_name', 'left.last_name', 'left.middle_name', 'left.title', 'phones.desc', 'phones.extension', 'phones.left', 'phones.left.first_name', 'phones.left.last_name', 'phones.left.middle_name', 'phones.left.title', 'phones.right', 'phones.right.cc', 'phones.right.ndc', 'phones.right.sn', 'right', 'right.cc', 'right.ndc', 'right.sn'] <Field_Entity Z-26:phones::right@3> [] >>> print (F_Person_z ["Z-26:phones"]) <class Field_Rev_Ref Z-26:phones> >>> F_Person_z ["Z-26:phones::left.first_name"] <class Field Z-26:phones::left.first_name> >>> F_Person_z ["Z-26:phones"] ["Z-26:phones::left.first_name"] <class Field Z-26:phones::left.first_name> >>> F_Person_z ["Z-26:phones::extension"] <class Field Z-26:phones::extension> >>> F_Person_z ["Z-26:phones"] ["extension"] <class Field Z-26:phones::extension> >>> proto = f_p_z ["Z-26:phones"].proto >>> print (portable_repr (sorted (proto._Element_Map))) ['Z-26:phones::desc', 'Z-26:phones::extension', 'Z-26:phones::left', 'Z-26:phones::left.first_name', 'Z-26:phones::left.last_name', 'Z-26:phones::left.middle_name', 'Z-26:phones::left.title', 'Z-26:phones::right', 'Z-26:phones::right.cc', 'Z-26:phones::right.ndc', 'Z-26:phones::right.sn', 'desc', 'extension', 'left', 'left.first_name', 'left.last_name', 'left.middle_name', 'left.title', 'phones.desc', 'phones.extension', 'phones.left', 'phones.left.first_name', 'phones.left.last_name', 'phones.left.middle_name', 'phones.left.title', 'phones.right', 'phones.right.cc', 'phones.right.ndc', 'phones.right.sn', 'right', 'right.cc', 'right.ndc', 'right.sn'] >>> print (proto, proto.__class__, list (proto.elements_transitive ())) <class Entity_Rev_Ref Z-26:phones> <class '_GTW._MF3.Element.M_Entity_Rev_Ref'> [<class Entity_Rev_Ref Z-26:phones>, <class Field_Entity Z-26:phones::right>, <class Field Z-26:phones::right.cc>, <class Field Z-26:phones::right.ndc>, <class Field Z-26:phones::right.sn>, <class Field Z-26:phones::extension>, <class Field Z-26:phones::desc>, <class Field_Ref_Hidden Z-26:phones::left>, <class Field Z-26:phones::left.last_name>, <class Field Z-26:phones::left.first_name>, <class Field Z-26:phones::left.middle_name>, <class Field Z-26:phones::left.title>] >>> print (f_p_z ["Z-26:phones"]) <Field_Rev_Ref Z-26:phones> >>> print (f_p_z ["Z-26:phones::left.first_name@3"]) <Field Z-26:phones::left.first_name@3> >>> print (f_p_z ["Z-26:phones::extension@3"]) <Field Z-26:phones::extension@3> >>> show_completers (f_p, "q_name", "attr.completer.kind") Type q_name attr.completer.kind ============================== F last_name Atom F first_name Atom F middle_name Atom F title Atom F lifetime.start Atom F lifetime.finish Atom >>> show_completers (f_p_z, "q_name", "attr.completer.kind") Type q_name attr.completer.kind =============================== F last_name Atom F first_name Atom F middle_name Atom F title Atom F lifetime.start Atom F lifetime.finish Atom F phones.right.cc Atom F phones.right.ndc Atom F phones.right.sn Atom F phones.desc Atom >>> show_field_values (f_p) { 'X-26:first_name' : {'init' : 'Christian'} , 'X-26:last_name' : {'init' : 'Tanzer'} , 'X-26:lifetime.finish' : {} , 'X-26:lifetime.start' : {'init' : '1959-09-26'} , 'X-26:middle_name' : {} , 'X-26:sex' : {} , 'X-26:title' : {} } >>> show_field_values (f_p_z) { 'Z-26:first_name' : {'init' : 'Christian'} , 'Z-26:last_name' : {'init' : 'Tanzer'} , 'Z-26:lifetime.finish' : {} , 'Z-26:lifetime.start' : {'init' : '1959-09-26'} , 'Z-26:middle_name' : {} , 'Z-26:phones::desc@3' : {'init' : 'example'} , 'Z-26:phones::extension@3' : {'init' : '42'} , 'Z-26:phones::left@3' : { 'init' : { 'cid' : 1 , 'display' : 'Tanzer Christian' , 'pid' : 1 } } , 'Z-26:phones::right.cc@3' : {'init' : '+43'} , 'Z-26:phones::right.ndc@3' : {'init' : '1'} , 'Z-26:phones::right.sn@3' : {'init' : '98765432'} , 'Z-26:phones::right@3' : { 'init' : { 'cid' : 2 , 'display' : '+43-1-987 654 32' , 'pid' : 2 } } , 'Z-26:phones@3' : { 'init' : { 'cid' : 3 , 'display' : 'Tanzer Christian, +43-1-987 654 32, 42' , 'pid' : 3 } } , 'Z-26:sex' : {} , 'Z-26:title' : {} } >>> show_completers (f_p, "q_name", "completer.buddies_id", "polisher.id") Type q_name completer.buddies_id polisher.id ============================== F last_name 1 5 F first_name 1 6 F middle_name 1 7 F title 2 2 F lifetime.start 3 8 F lifetime.finish 4 8 >>> print (formatted (f_p.as_json_cargo ["buddies"])) { 1 : [ 'X-26:first_name' , 'X-26:last_name' , 'X-26:middle_name' , 'X-26:title' ] , 2 : ['X-26:title'] , 3 : ['X-26:lifetime.start'] , 4 : ['X-26:lifetime.finish'] , 5 : ['X-26:last_name'] , 6 : ['X-26:first_name'] , 7 : ['X-26:middle_name'] , 8 : [ 'X-26:lifetime.finish' , 'X-26:lifetime.start' ] } >>> show_completers (f_p_z, "q_name", "completer.buddies_id", "polisher.id") Type q_name completer.buddies_id polisher.id ================================== F last_name 1 9 F first_name 1 10 F middle_name 1 11 F title 2 2 F lifetime.start 3 12 F lifetime.finish 4 12 F phones.right.cc 5 7 F phones.right.ndc 6 7 F phones.right.sn 7 7 F phones.desc 8 None >>> print (formatted (f_p_z.as_json_cargo ["buddies"])) { 1 : [ 'Z-26:first_name' , 'Z-26:last_name' , 'Z-26:middle_name' , 'Z-26:title' ] , 2 : ['Z-26:title'] , 3 : ['Z-26:lifetime.start'] , 4 : ['Z-26:lifetime.finish'] , 5 : ['Z-26:phones::right.cc@3'] , 6 : [ 'Z-26:phones::right.cc@3' , 'Z-26:phones::right.ndc@3' ] , 7 : [ 'Z-26:phones::right.cc@3' , 'Z-26:phones::right.ndc@3' , 'Z-26:phones::right.sn@3' ] , 8 : ['Z-26:phones::desc@3'] , 9 : ['Z-26:last_name'] , 10 : ['Z-26:first_name'] , 11 : ['Z-26:middle_name'] , 12 : [ 'Z-26:lifetime.finish' , 'Z-26:lifetime.start' ] } >>> print (portable_repr (sorted (f_p._Element_Map))) ['X-26:first_name', 'X-26:last_name', 'X-26:lifetime', 'X-26:lifetime.finish', 'X-26:lifetime.start', 'X-26:middle_name', 'X-26:sex', 'X-26:title', 'first_name', 'last_name', 'lifetime', 'lifetime.finish', 'lifetime.start', 'middle_name', 'sex', 'title'] >>> print (portable_repr (sorted (f_p_z._Element_Map))) ['Z-26:first_name', 'Z-26:last_name', 'Z-26:lifetime', 'Z-26:lifetime.finish', 'Z-26:lifetime.start', 'Z-26:middle_name', 'Z-26:phones', 'Z-26:phones::desc@3', 'Z-26:phones::extension@3', 'Z-26:phones::left.first_name@3', 'Z-26:phones::left.last_name@3', 'Z-26:phones::left.middle_name@3', 'Z-26:phones::left.title@3', 'Z-26:phones::left@3', 'Z-26:phones::right.cc@3', 'Z-26:phones::right.ndc@3', 'Z-26:phones::right.sn@3', 'Z-26:phones::right@3', 'Z-26:phones@3', 'Z-26:sex', 'Z-26:title', 'first_name', 'last_name', 'lifetime', 'lifetime.finish', 'lifetime.start', 'middle_name', 'phones', 'phones.desc', 'phones.extension', 'phones.left', 'phones.left.first_name', 'phones.left.last_name', 'phones.left.middle_name', 'phones.left.title', 'phones.right', 'phones.right.cc', 'phones.right.ndc', 'phones.right.sn', 'sex', 'title'] >>> show_elements (f_p_z2, "Entity") <Entity Z-26> <Entity Z-26> <Field Z-26:last_name> <Entity Z-26> <Field Z-26:first_name> <Entity Z-26> <Field Z-26:middle_name> <Entity Z-26> <Field Z-26:title> <Entity Z-26> <Field_Composite Z-26:lifetime> <Entity Z-26> <Field Z-26:lifetime.start> <Entity Z-26> <Field Z-26:lifetime.finish> <Entity Z-26> <Field Z-26:sex> <Entity Z-26> <Field_Rev_Ref Z-26:phones> <Entity Z-26> <Entity_Rev_Ref Z-26:phones@3> <Entity_Rev_Ref Z-26:phones@3> <Field_Entity Z-26:phones::right@3> <Entity_Rev_Ref Z-26:phones@3> <Field Z-26:phones::right.cc@3> <Field_Entity Z-26:phones::right@3> <Field Z-26:phones::right.ndc@3> <Field_Entity Z-26:phones::right@3> <Field Z-26:phones::right.sn@3> <Field_Entity Z-26:phones::right@3> <Field Z-26:phones::extension@3> <Entity_Rev_Ref Z-26:phones@3> <Field Z-26:phones::desc@3> <Entity_Rev_Ref Z-26:phones@3> <Field_Ref_Hidden Z-26:phones::left@3> <Entity_Rev_Ref Z-26:phones@3> <Entity_Rev_Ref Z-26:phones/1> <Entity_Rev_Ref Z-26:phones/1> <Field_Entity Z-26:phones::right/1> <Entity_Rev_Ref Z-26:phones/1> <Field Z-26:phones::right.cc/1> <Field_Entity Z-26:phones::right/1> <Field Z-26:phones::right.ndc/1> <Field_Entity Z-26:phones::right/1> <Field Z-26:phones::right.sn/1> <Field_Entity Z-26:phones::right/1> <Field Z-26:phones::extension/1> <Entity_Rev_Ref Z-26:phones/1> <Field Z-26:phones::desc/1> <Entity_Rev_Ref Z-26:phones/1> <Field_Ref_Hidden Z-26:phones::left/1> <Entity_Rev_Ref Z-26:phones/1> <Entity_Rev_Ref Z-26:phones/2> <Entity_Rev_Ref Z-26:phones/2> <Field_Entity Z-26:phones::right/2> <Entity_Rev_Ref Z-26:phones/2> <Field Z-26:phones::right.cc/2> <Field_Entity Z-26:phones::right/2> <Field Z-26:phones::right.ndc/2> <Field_Entity Z-26:phones::right/2> <Field Z-26:phones::right.sn/2> <Field_Entity Z-26:phones::right/2> <Field Z-26:phones::extension/2> <Entity_Rev_Ref Z-26:phones/2> <Field Z-26:phones::desc/2> <Entity_Rev_Ref Z-26:phones/2> <Field_Ref_Hidden Z-26:phones::left/2> <Entity_Rev_Ref Z-26:phones/2> >>> show_elements (f_p_z2, "index") <Entity Z-26> <Field Z-26:last_name> <Field Z-26:first_name> <Field Z-26:middle_name> <Field Z-26:title> <Field_Composite Z-26:lifetime> <Field Z-26:lifetime.start> <Field Z-26:lifetime.finish> <Field Z-26:sex> <Field_Rev_Ref Z-26:phones> <Entity_Rev_Ref Z-26:phones@3> @3 <Field_Entity Z-26:phones::right@3> @3 <Field Z-26:phones::right.cc@3> @3 <Field Z-26:phones::right.ndc@3> @3 <Field Z-26:phones::right.sn@3> @3 <Field Z-26:phones::extension@3> @3 <Field Z-26:phones::desc@3> @3 <Field_Ref_Hidden Z-26:phones::left@3> @3 <Entity_Rev_Ref Z-26:phones/1> /1 <Field_Entity Z-26:phones::right/1> /1 <Field Z-26:phones::right.cc/1> /1 <Field Z-26:phones::right.ndc/1> /1 <Field Z-26:phones::right.sn/1> /1 <Field Z-26:phones::extension/1> /1 <Field Z-26:phones::desc/1> /1 <Field_Ref_Hidden Z-26:phones::left/1> /1 <Entity_Rev_Ref Z-26:phones/2> /2 <Field_Entity Z-26:phones::right/2> /2 <Field Z-26:phones::right.cc/2> /2 <Field Z-26:phones::right.ndc/2> /2 <Field Z-26:phones::right.sn/2> /2 <Field Z-26:phones::extension/2> /2 <Field Z-26:phones::desc/2> /2 <Field_Ref_Hidden Z-26:phones::left/2> /2 >>> show_elements (f_p_z2, "q_name") <Entity Z-26> None <Field Z-26:last_name> last_name <Field Z-26:first_name> first_name <Field Z-26:middle_name> middle_name <Field Z-26:title> title <Field_Composite Z-26:lifetime> lifetime <Field Z-26:lifetime.start> lifetime.start <Field Z-26:lifetime.finish> lifetime.finish <Field Z-26:sex> sex <Field_Rev_Ref Z-26:phones> phones <Entity_Rev_Ref Z-26:phones@3> phones <Field_Entity Z-26:phones::right@3> phones.right <Field Z-26:phones::right.cc@3> phones.right.cc <Field Z-26:phones::right.ndc@3> phones.right.ndc <Field Z-26:phones::right.sn@3> phones.right.sn <Field Z-26:phones::extension@3> phones.extension <Field Z-26:phones::desc@3> phones.desc <Field_Ref_Hidden Z-26:phones::left@3> phones.left <Entity_Rev_Ref Z-26:phones/1> phones <Field_Entity Z-26:phones::right/1> phones.right <Field Z-26:phones::right.cc/1> phones.right.cc <Field Z-26:phones::right.ndc/1> phones.right.ndc <Field Z-26:phones::right.sn/1> phones.right.sn <Field Z-26:phones::extension/1> phones.extension <Field Z-26:phones::desc/1> phones.desc <Field_Ref_Hidden Z-26:phones::left/1> phones.left <Entity_Rev_Ref Z-26:phones/2> phones <Field_Entity Z-26:phones::right/2> phones.right <Field Z-26:phones::right.cc/2> phones.right.cc <Field Z-26:phones::right.ndc/2> phones.right.ndc <Field Z-26:phones::right.sn/2> phones.right.sn <Field Z-26:phones::extension/2> phones.extension <Field Z-26:phones::desc/2> phones.desc <Field_Ref_Hidden Z-26:phones::left/2> phones.left >>> f_p_z2 ["Z-26:phones"] <Field_Rev_Ref Z-26:phones> >>> f_p_z2 ["Z-26:phones@3"] <Entity_Rev_Ref Z-26:phones@3> >>> f_p_z2 ["Z-26:phones/1"] <Entity_Rev_Ref Z-26:phones/1> >>> f_p_z2 ["Z-26:phones/2"] <Entity_Rev_Ref Z-26:phones/2> >>> f_p_z2_cargo = f_p_z2.as_json_cargo ["cargo"] >>> print (formatted (f_p_z2_cargo)) { 'field_values' : { 'Z-26:first_name' : {'init' : 'Christian'} , 'Z-26:last_name' : {'init' : 'Tanzer'} , 'Z-26:lifetime.finish' : {} , 'Z-26:lifetime.start' : {'init' : '1959-09-26'} , 'Z-26:middle_name' : {} , 'Z-26:phones::desc/1' : {} , 'Z-26:phones::desc/2' : {} , 'Z-26:phones::desc@3' : {'init' : 'example'} , 'Z-26:phones::extension/1' : {} , 'Z-26:phones::extension/2' : {} , 'Z-26:phones::extension@3' : {'init' : '42'} , 'Z-26:phones::left/1' : {'init' : {}} , 'Z-26:phones::left/2' : {'init' : {}} , 'Z-26:phones::left@3' : { 'init' : { 'cid' : 1 , 'display' : 'Tanzer Christian' , 'pid' : 1 } } , 'Z-26:phones::right.cc/1' : {'edit' : '+43'} , 'Z-26:phones::right.cc/2' : {'edit' : '+43'} , 'Z-26:phones::right.cc@3' : {'init' : '+43'} , 'Z-26:phones::right.ndc/1' : {} , 'Z-26:phones::right.ndc/2' : {} , 'Z-26:phones::right.ndc@3' : {'init' : '1'} , 'Z-26:phones::right.sn/1' : {} , 'Z-26:phones::right.sn/2' : {} , 'Z-26:phones::right.sn@3' : {'init' : '98765432'} , 'Z-26:phones::right/1' : {'init' : {}} , 'Z-26:phones::right/2' : {'init' : {}} , 'Z-26:phones::right@3' : { 'init' : { 'cid' : 2 , 'display' : '+43-1-987 654 32' , 'pid' : 2 } } , 'Z-26:phones@3' : { 'init' : { 'cid' : 3 , 'display' : 'Tanzer Christian, +43-1-987 654 32, 42' , 'pid' : 3 } } , 'Z-26:sex' : {} , 'Z-26:title' : {} } , 'pid' : 1 , 'sid' : 0 , 'sigs' : { 'Z-26' : 'dxIDJ3yZVcgB4EOzcrzloZ-PespkJUDvqZDpNQ' , 'Z-26:phones/1' : '5Emb8noyfH6y9iXocwihOVKUY7Fl87CwfK_snQ' , 'Z-26:phones/2' : 'ABEse4QgSmUV2kHs11jPb0YZoX17UQpbPsZWTg' , 'Z-26:phones::right/1' : 'kBHk1wYXSwdUar1xHmKmvYCokwZLYqPrxtc-_Q' , 'Z-26:phones::right/2' : '7BUGMsG-u3B_iQcFEVGES3DI2bFYHBGVfYXdsA' , 'Z-26:phones::right@3' : 'RI8lMuuAH4Aq2IAf3BH8tKLwvhc8ZgyvLniDyg' , 'Z-26:phones@3' : 'luUj-F9qHmVLs2-5J14hRA_jl0uSYQDtBBhuhA' } } >>> for e in f_p_z.entity_elements : ... print (e) <Entity Z-26> <Entity_Rev_Ref Z-26:phones@3> <Field_Entity Z-26:phones::right@3> >>> f_p_z.populate_new (f_p_z2_cargo) >>> for e in f_p_z.entity_elements : ... print (e) <Entity Z-26> <Entity_Rev_Ref Z-26:phones@3> <Field_Entity Z-26:phones::right@3> <Entity_Rev_Ref Z-26:phones/1> <Field_Entity Z-26:phones::right/1> <Entity_Rev_Ref Z-26:phones/2> <Field_Entity Z-26:phones::right/2> >>> show_elements (f_p_z, "template_macro") <Entity Z-26> Entity_Form <Field Z-26:last_name> Field <Field Z-26:first_name> Field <Field Z-26:middle_name> Field <Field Z-26:title> Field <Field_Composite Z-26:lifetime> Field_Composite <Field Z-26:lifetime.start> Field <Field Z-26:lifetime.finish> Field <Field Z-26:sex> Field <Field_Rev_Ref Z-26:phones> Field_Rev_Ref <Entity_Rev_Ref Z-26:phones@3> Entity_Rev_Ref <Field_Entity Z-26:phones::right@3> Field_Entity <Field Z-26:phones::right.cc@3> Field <Field Z-26:phones::right.ndc@3> Field <Field Z-26:phones::right.sn@3> Field <Field Z-26:phones::extension@3> Field <Field Z-26:phones::desc@3> Field <Field_Ref_Hidden Z-26:phones::left@3> Field_Ref_Hidden <Entity_Rev_Ref Z-26:phones/1> Entity_Rev_Ref <Field_Entity Z-26:phones::right/1> Field_Entity <Field Z-26:phones::right.cc/1> Field <Field Z-26:phones::right.ndc/1> Field <Field Z-26:phones::right.sn/1> Field <Field Z-26:phones::extension/1> Field <Field Z-26:phones::desc/1> Field <Field_Ref_Hidden Z-26:phones::left/1> Field_Ref_Hidden <Entity_Rev_Ref Z-26:phones/2> Entity_Rev_Ref <Field_Entity Z-26:phones::right/2> Field_Entity <Field Z-26:phones::right.cc/2> Field <Field Z-26:phones::right.ndc/2> Field <Field Z-26:phones::right.sn/2> Field <Field Z-26:phones::extension/2> Field <Field Z-26:phones::desc/2> Field <Field_Ref_Hidden Z-26:phones::left/2> Field_Ref_Hidden >>> show_elements (f_p_z, "input_widget") <Entity Z-26> --- <Field Z-26:last_name> mf3_input, string <Field Z-26:first_name> mf3_input, string <Field Z-26:middle_name> mf3_input, string <Field Z-26:title> mf3_input, string <Field_Composite Z-26:lifetime> mf3_input, string <Field Z-26:lifetime.start> mf3_input, date <Field Z-26:lifetime.finish> mf3_input, date <Field Z-26:sex> mf3_input, named_object <Field_Rev_Ref Z-26:phones> mf3_input, string <Entity_Rev_Ref Z-26:phones@3> mf3_input, id_entity <Field_Entity Z-26:phones::right@3> mf3_input, id_entity <Field Z-26:phones::right.cc@3> mf3_input, number <Field Z-26:phones::right.ndc@3> mf3_input, number <Field Z-26:phones::right.sn@3> mf3_input, number <Field Z-26:phones::extension@3> mf3_input, number <Field Z-26:phones::desc@3> mf3_input, string <Field_Ref_Hidden Z-26:phones::left@3> mf3_input, hidden <Entity_Rev_Ref Z-26:phones/1> mf3_input, id_entity <Field_Entity Z-26:phones::right/1> mf3_input, id_entity <Field Z-26:phones::right.cc/1> mf3_input, number <Field Z-26:phones::right.ndc/1> mf3_input, number <Field Z-26:phones::right.sn/1> mf3_input, number <Field Z-26:phones::extension/1> mf3_input, number <Field Z-26:phones::desc/1> mf3_input, string <Field_Ref_Hidden Z-26:phones::left/1> mf3_input, hidden <Entity_Rev_Ref Z-26:phones/2> mf3_input, id_entity <Field_Entity Z-26:phones::right/2> mf3_input, id_entity <Field Z-26:phones::right.cc/2> mf3_input, number <Field Z-26:phones::right.ndc/2> mf3_input, number <Field Z-26:phones::right.sn/2> mf3_input, number <Field Z-26:phones::extension/2> mf3_input, number <Field Z-26:phones::desc/2> mf3_input, string <Field_Ref_Hidden Z-26:phones::left/2> mf3_input, hidden >>> _ = f_p_z ["Z-26:phones"].add () >>> for e in f_p_z.entity_elements : ... print (e) <Entity Z-26> <Entity_Rev_Ref Z-26:phones@3> <Field_Entity Z-26:phones::right@3> <Entity_Rev_Ref Z-26:phones/1> <Field_Entity Z-26:phones::right/1> <Entity_Rev_Ref Z-26:phones/2> <Field_Entity Z-26:phones::right/2> <Entity_Rev_Ref Z-26:phones/3> <Field_Entity Z-26:phones::right/3> >>> F_PhP = MF3_E.Entity.Auto (scope.PAP.Person_has_Phone, id_prefix = "X") >>> f_PhP = F_PhP (scope) >>> f_pph = F_PhP (scope, pph) >>> F_PhP_s = MF3_E.Entity.Auto (scope.PAP.Person_has_Phone, id_prefix = "Y", attr_spec = { "left.middle_name" : dict (skip = 1), "right.cc" : dict (init ="49", prefilled = 1)}) >>> f_PhP_s = F_PhP_s (scope) >>> F_PhP_z = MF3_E.Entity.Auto (scope.PAP.Person_has_Phone, id_prefix = "Z", attr_spec = { "left" : dict (allow_new = True, attr_selector = MOM.Attr.Selector.editable), "right" : dict (attr_selector = MOM.Attr.Selector.editable)}) >>> f_PhP_z = F_PhP_z (scope) >>> show_elements_x (f_PhP_s, "id", "allow_new", filter = (Q.allow_new != None)) Type id allow_new ============================ F_E Y-122:left False F_E Y-122:right True >>> show_elements_x (f_PhP_z, "id", "allow_new", filter = (Q.allow_new != None)) Type id allow_new =========================== F_E Z-122:left True F_E Z-122:right True >>> show_elements (f_PhP_z, "Entity") <Entity Z-122> <Entity Z-122> <Field_Entity Z-122:left> <Entity Z-122> <Field Z-122:left.last_name> <Field_Entity Z-122:left> <Field Z-122:left.first_name> <Field_Entity Z-122:left> <Field Z-122:left.middle_name> <Field_Entity Z-122:left> <Field Z-122:left.title> <Field_Entity Z-122:left> <Field_Composite Z-122:left.lifetime> <Field_Entity Z-122:left> <Field Z-122:left.lifetime.start> <Field_Entity Z-122:left> <Field Z-122:left.lifetime.finish> <Field_Entity Z-122:left> <Field Z-122:left.sex> <Field_Entity Z-122:left> <Field_Entity Z-122:right> <Entity Z-122> <Field Z-122:right.cc> <Field_Entity Z-122:right> <Field Z-122:right.ndc> <Field_Entity Z-122:right> <Field Z-122:right.sn> <Field_Entity Z-122:right> <Field Z-122:right.desc> <Field_Entity Z-122:right> <Field Z-122:extension> <Entity Z-122> <Field Z-122:desc> <Entity Z-122> >>> show_elements (f_PhP_z, "q_name") <Entity Z-122> None <Field_Entity Z-122:left> left <Field Z-122:left.last_name> left.last_name <Field Z-122:left.first_name> left.first_name <Field Z-122:left.middle_name> left.middle_name <Field Z-122:left.title> left.title <Field_Composite Z-122:left.lifetime> left.lifetime <Field Z-122:left.lifetime.start> left.lifetime.start <Field Z-122:left.lifetime.finish> left.lifetime.finish <Field Z-122:left.sex> left.sex <Field_Entity Z-122:right> right <Field Z-122:right.cc> right.cc <Field Z-122:right.ndc> right.ndc <Field Z-122:right.sn> right.sn <Field Z-122:right.desc> right.desc <Field Z-122:extension> extension <Field Z-122:desc> desc >>> show_elements (f_PhP_z, "r_name") <Entity Z-122> --- <Field_Entity Z-122:left> left <Field Z-122:left.last_name> last_name <Field Z-122:left.first_name> first_name <Field Z-122:left.middle_name> middle_name <Field Z-122:left.title> title <Field_Composite Z-122:left.lifetime> lifetime <Field Z-122:left.lifetime.start> lifetime.start <Field Z-122:left.lifetime.finish> lifetime.finish <Field Z-122:left.sex> sex <Field_Entity Z-122:right> right <Field Z-122:right.cc> cc <Field Z-122:right.ndc> ndc <Field Z-122:right.sn> sn <Field Z-122:right.desc> desc <Field Z-122:extension> extension <Field Z-122:desc> desc >>> show_elements (f_PhP, "root") <Entity X-122> <Entity X-122> <Field_Entity X-122:left> <Entity X-122> <Field_Entity X-122:right> <Entity X-122> <Field X-122:right.cc> <Entity X-122> <Field X-122:right.ndc> <Entity X-122> <Field X-122:right.sn> <Entity X-122> <Field X-122:extension> <Entity X-122> <Field X-122:desc> <Entity X-122> >>> show_elements (f_PhP, "Entity") <Entity X-122> <Entity X-122> <Field_Entity X-122:left> <Entity X-122> <Field_Entity X-122:right> <Entity X-122> <Field X-122:right.cc> <Field_Entity X-122:right> <Field X-122:right.ndc> <Field_Entity X-122:right> <Field X-122:right.sn> <Field_Entity X-122:right> <Field X-122:extension> <Entity X-122> <Field X-122:desc> <Entity X-122> >>> show_elements (f_PhP, "Entity.E_Type.type_name") <Entity X-122> PAP.Person_has_Phone <Field_Entity X-122:left> PAP.Person_has_Phone <Field_Entity X-122:right> PAP.Person_has_Phone <Field X-122:right.cc> PAP.Phone <Field X-122:right.ndc> PAP.Phone <Field X-122:right.sn> PAP.Phone <Field X-122:extension> PAP.Person_has_Phone <Field X-122:desc> PAP.Person_has_Phone >>> show_elements (f_PhP, "E_Type.type_name") <Entity X-122> PAP.Person_has_Phone <Field_Entity X-122:left> PAP.Person <Field_Entity X-122:right> PAP.Phone <Field X-122:right.cc> PAP.Phone <Field X-122:right.ndc> PAP.Phone <Field X-122:right.sn> PAP.Phone <Field X-122:extension> PAP.Person_has_Phone <Field X-122:desc> PAP.Person_has_Phone >>> show_elements (f_PhP, "attr.E_Type.type_name") <Entity X-122> --- <Field_Entity X-122:left> PAP.Person <Field_Entity X-122:right> PAP.Phone <Field X-122:right.cc> --- <Field X-122:right.ndc> --- <Field X-122:right.sn> --- <Field X-122:extension> --- <Field X-122:desc> --- >>> show_elements (F_PhP, "parent") <class Entity X-122> None <class Field_Entity X-122:left> <class Entity X-122> <class Field X-122:left.last_name> <class Field_Entity X-122:left> <class Field X-122:left.first_name> <class Field_Entity X-122:left> <class Field X-122:left.middle_name> <class Field_Entity X-122:left> <class Field X-122:left.title> <class Field_Entity X-122:left> <class Field_Entity X-122:right> <class Entity X-122> <class Field X-122:right.cc> <class Field_Entity X-122:right> <class Field X-122:right.ndc> <class Field_Entity X-122:right> <class Field X-122:right.sn> <class Field_Entity X-122:right> <class Field X-122:extension> <class Entity X-122> <class Field X-122:desc> <class Entity X-122> >>> show_elements (f_PhP, "parent") <Entity X-122> None <Field_Entity X-122:left> <Entity X-122> <Field_Entity X-122:right> <Entity X-122> <Field X-122:right.cc> <Field_Entity X-122:right> <Field X-122:right.ndc> <Field_Entity X-122:right> <Field X-122:right.sn> <Field_Entity X-122:right> <Field X-122:extension> <Entity X-122> <Field X-122:desc> <Entity X-122> >>> for e in f_PhP.entity_elements : ... print (e) <Entity X-122> <Field_Entity X-122:right> >>> for e in f_PhP.field_elements : ... print (e) <Field_Entity X-122:left> <Field_Entity X-122:right> <Field X-122:extension> <Field X-122:desc> >>> show_elements (F_PhP, "input_widget") <class Entity X-122> --- <class Field_Entity X-122:left> mf3_input, id_entity <class Field X-122:left.last_name> mf3_input, string <class Field X-122:left.first_name> mf3_input, string <class Field X-122:left.middle_name> mf3_input, string <class Field X-122:left.title> mf3_input, string <class Field_Entity X-122:right> mf3_input, id_entity <class Field X-122:right.cc> mf3_input, number <class Field X-122:right.ndc> mf3_input, number <class Field X-122:right.sn> mf3_input, number <class Field X-122:extension> mf3_input, number <class Field X-122:desc> mf3_input, string >>> show_elements (f_PhP, "input_widget") <Entity X-122> --- <Field_Entity X-122:left> mf3_input, id_entity <Field_Entity X-122:right> mf3_input, id_entity <Field X-122:right.cc> mf3_input, number <Field X-122:right.ndc> mf3_input, number <Field X-122:right.sn> mf3_input, number <Field X-122:extension> mf3_input, number <Field X-122:desc> mf3_input, string >>> show_elements (f_PhP, "template_macro") <Entity X-122> Entity_Form <Field_Entity X-122:left> Field_Entity <Field_Entity X-122:right> Field_Entity <Field X-122:right.cc> Field <Field X-122:right.ndc> Field <Field X-122:right.sn> Field <Field X-122:extension> Field <Field X-122:desc> Field >>> show_elements (f_PhP, "cooked") <Entity X-122> --- <Field_Entity X-122:left> None <Field_Entity X-122:right> None <Field X-122:right.cc> 43 <Field X-122:right.ndc> <Field X-122:right.sn> <Field X-122:extension> <Field X-122:desc> >>> show_elements (f_PhP, "edit") <Entity X-122> --- <Field_Entity X-122:left> <Field_Entity X-122:right> <Field X-122:right.cc> +43 <Field X-122:right.ndc> <Field X-122:right.sn> <Field X-122:extension> <Field X-122:desc> >>> show_elements (f_PhP_s, "edit") <Entity Y-122> --- <Field_Entity Y-122:left> <Field_Entity Y-122:right> <Field Y-122:right.cc> 49 <Field Y-122:right.ndc> <Field Y-122:right.sn> <Field Y-122:extension> <Field Y-122:desc> >>> show_elements (f_PhP_s, "prefilled") <Entity Y-122> --- <Field_Entity Y-122:left> False <Field_Entity Y-122:right> False <Field Y-122:right.cc> 1 <Field Y-122:right.ndc> False <Field Y-122:right.sn> False <Field Y-122:extension> False <Field Y-122:desc> False >>> show_elements (f_pph, "cooked") <Entity X-122> --- <Field_Entity X-122:left> ('tanzer', 'christian', '', '') <Field_Entity X-122:right> ('43', '1', '98765432') <Field X-122:right.cc> 43 <Field X-122:right.ndc> 1 <Field X-122:right.sn> 98765432 <Field X-122:extension> 42 <Field X-122:desc> example >>> show_elements (f_pph, "edit") <Entity X-122> --- <Field_Entity X-122:left> 1 <Field_Entity X-122:right> 2 <Field X-122:right.cc> +43 <Field X-122:right.ndc> 1 <Field X-122:right.sn> 98765432 <Field X-122:extension> 42 <Field X-122:desc> example >>> show_elements (f_pph, "ui_display") <Entity X-122> Tanzer Christian, +43-1-987 654 32, 42 <Field_Entity X-122:left> Tanzer Christian <Field_Entity X-122:right> +43-1-987 654 32 <Field X-122:right.cc> +43 <Field X-122:right.ndc> 1 <Field X-122:right.sn> 98765432 <Field X-122:extension> 42 <Field X-122:desc> example >>> show_elements (f_pph, "essence") <Entity X-122> (('tanzer', 'christian', '', ''), ('43', '1', '98765432'), '42') <Field_Entity X-122:left> ('tanzer', 'christian', '', '') <Field_Entity X-122:right> ('43', '1', '98765432') <Field X-122:right.cc> ('43', '1', '98765432') <Field X-122:right.ndc> ('43', '1', '98765432') <Field X-122:right.sn> ('43', '1', '98765432') <Field X-122:extension> (('tanzer', 'christian', '', ''), ('43', '1', '98765432'), '42') <Field X-122:desc> (('tanzer', 'christian', '', ''), ('43', '1', '98765432'), '42') >>> show_elements (f_pph, "q_name") <Entity X-122> None <Field_Entity X-122:left> left <Field_Entity X-122:right> right <Field X-122:right.cc> right.cc <Field X-122:right.ndc> right.ndc <Field X-122:right.sn> right.sn <Field X-122:extension> extension <Field X-122:desc> desc >>> show_elements (f_pph, "prefilled") <Entity X-122> --- <Field_Entity X-122:left> False <Field_Entity X-122:right> False <Field X-122:right.cc> False <Field X-122:right.ndc> False <Field X-122:right.sn> False <Field X-122:extension> False <Field X-122:desc> False >>> show_field_values (f_pph) { 'X-122:desc' : {'init' : 'example'} , 'X-122:extension' : {'init' : '42'} , 'X-122:left' : { 'init' : { 'cid' : 1 , 'display' : 'Tanzer Christian' , 'pid' : 1 } } , 'X-122:right' : { 'init' : { 'cid' : 2 , 'display' : '+43-1-987 654 32' , 'pid' : 2 } } , 'X-122:right.cc' : {'init' : '+43'} , 'X-122:right.ndc' : {'init' : '1'} , 'X-122:right.sn' : {'init' : '98765432'} } >>> show_field_values (f_PhP_s) { 'Y-122:desc' : {} , 'Y-122:extension' : {} , 'Y-122:left' : {'init' : {}} , 'Y-122:right' : {'init' : {}} , 'Y-122:right.cc' : {'edit' : '49'} , 'Y-122:right.ndc' : {} , 'Y-122:right.sn' : {} } >>> show_field_values (f_PhP_z) { 'Z-122:desc' : {} , 'Z-122:extension' : {} , 'Z-122:left' : {'init' : {}} , 'Z-122:left.first_name' : {} , 'Z-122:left.last_name' : {} , 'Z-122:left.lifetime.finish' : {} , 'Z-122:left.lifetime.start' : {} , 'Z-122:left.middle_name' : {} , 'Z-122:left.sex' : {} , 'Z-122:left.title' : {} , 'Z-122:right' : {'init' : {}} , 'Z-122:right.cc' : {'edit' : '+43'} , 'Z-122:right.desc' : {} , 'Z-122:right.ndc' : {} , 'Z-122:right.sn' : {} } >>> set (x.id for x in F_PhP.elements_transitive ()) >= set (x.id for x in f_PhP.elements_transitive ()) True >>> list (x.id for x in f_pph.elements_transitive ()) == list (x.id for x in f_PhP.elements_transitive ()) True >>> SRM = scope.SRM >>> F_BiR = MF3_E.Entity.Auto (scope.SRM.Boat_in_Regatta, id_prefix = "R") >>> f_bir = F_BiR (scope, attr_spec = { "right" : dict (allow_new = True) }) >>> f_bir_n = F_BiR (scope) >>> show_elements_x (f_bir, "id", "allow_new", filter = (Q.allow_new != None)) Type id allow_new ====================================== F_E R-108:left True F_E R-108:left.left True F_E R-108:right True F_E R-108:right.left True F_E R-108:right.boat_class True F_E R-108:skipper True F_E R-108:skipper.left True F_E R-108:skipper.club True >>> show_elements_x (f_bir_n, "id", "allow_new", filter = (Q.allow_new != None)) Type id allow_new =================================== F_E R-108:left True F_E R-108:left.left True F_E R-108:right False F_E R-108:skipper True F_E R-108:skipper.left True F_E R-108:skipper.club True >>> show_elements_x (f_bir, "id", "Entity.id") Type id Entity.id ================================================================ E R-108 R-108 F_E R-108:left R-108 F_E R-108:left.left R-108:left F R-108:left.left.name R-108:left.left F R-108:left.sail_number R-108:left F R-108:left.nation R-108:left F R-108:left.sail_number_x R-108:left F_E R-108:right R-108 F_E R-108:right.left R-108:right F R-108:right.left.name R-108:right.left F_C R-108:right.left.date R-108:right.left F R-108:right.left.date.start R-108:right.left F R-108:right.left.date.finish R-108:right.left F_E R-108:right.boat_class R-108:right F R-108:right.boat_class.name R-108:right.boat_class F_E R-108:skipper R-108 F_E R-108:skipper.left R-108:skipper F R-108:skipper.left.last_name R-108:skipper.left F R-108:skipper.left.first_name R-108:skipper.left F R-108:skipper.left.middle_name R-108:skipper.left F R-108:skipper.left.title R-108:skipper.left F R-108:skipper.nation R-108:skipper F R-108:skipper.mna_number R-108:skipper F_E R-108:skipper.club R-108:skipper F R-108:skipper.club.name R-108:skipper.club F R-108:place R-108 F R-108:points R-108 F R-108:yardstick R-108 >>> show_elements_x (f_bir, "q_name", "r_name", "E_Type.type_name") Type q_name r_name E_Type.type_name ====================================================================== E None None SRM.Boat_in_Regatta F_E left left SRM.Boat F_E left.left left SRM.Boat_Class F left.left.name name SRM.Boat_Class F left.sail_number sail_number SRM.Boat F left.nation nation SRM.Boat F left.sail_number_x sail_number_x SRM.Boat F_E right right SRM.Regatta F_E right.left left SRM.Regatta_Event F right.left.name name SRM.Regatta_Event F_C right.left.date date MOM.Date_Interval_C F right.left.date.start date.start MOM.Date_Interval_C F right.left.date.finish date.finish MOM.Date_Interval_C F_E right.boat_class boat_class SRM._Boat_Class_ F right.boat_class.name name SRM._Boat_Class_ F_E skipper skipper SRM.Sailor F_E skipper.left left PAP.Person F skipper.left.last_name last_name PAP.Person F skipper.left.first_name first_name PAP.Person F skipper.left.middle_name middle_name PAP.Person F skipper.left.title title PAP.Person F skipper.nation nation SRM.Sailor F skipper.mna_number mna_number SRM.Sailor F_E skipper.club club SRM.Club F skipper.club.name name SRM.Club F place place SRM.Boat_in_Regatta F points points SRM.Boat_in_Regatta F yardstick yardstick SRM.Boat_in_Regatta >>> show_elements_x (f_bir, "attr.e_type.type_name", "parent.E_Type.type_name") Type attr.e_type.type_name parent.E_Type.type_name ================================================== E None None F_E SRM.Boat_in_Regatta SRM.Boat_in_Regatta F_E SRM.Boat SRM.Boat F SRM.Boat_Class SRM.Boat_Class F SRM.Boat SRM.Boat F SRM.Boat SRM.Boat F SRM.Boat SRM.Boat F_E SRM.Boat_in_Regatta SRM.Boat_in_Regatta F_E SRM.Regatta SRM.Regatta F SRM.Regatta_Event SRM.Regatta_Event F_C SRM.Regatta_Event SRM.Regatta_Event F MOM.Date_Interval MOM.Date_Interval_C F MOM.Date_Interval_C MOM.Date_Interval_C F_E SRM.Regatta SRM.Regatta F SRM._Boat_Class_ SRM._Boat_Class_ F_E SRM.Boat_in_Regatta SRM.Boat_in_Regatta F_E SRM.Sailor SRM.Sailor F PAP.Person PAP.Person F PAP.Person PAP.Person F PAP.Person PAP.Person F PAP.Person PAP.Person F SRM.Sailor SRM.Sailor F SRM.Sailor SRM.Sailor F_E SRM.Sailor SRM.Sailor F SRM.Club SRM.Club F SRM.Boat_in_Regatta SRM.Boat_in_Regatta F SRM.Boat_in_Regatta SRM.Boat_in_Regatta F SRM.Boat_in_Regatta SRM.Boat_in_Regatta >>> show_completers (f_bir, "q_name", "attr.completer.kind") Type q_name attr.completer.kind ============================================= F left.left.name Atom F left.sail_number Atom F left.sail_number_x Atom F right.left.name Atom F_C right.left.date Composite F right.left.date.start Atom F right.left.date.finish Atom F right.boat_class.name Atom F_E skipper.left Id_Entity F skipper.left.last_name Atom F skipper.left.first_name Atom F skipper.left.middle_name Atom F skipper.left.title Atom F skipper.mna_number Atom F_E skipper.club Id_Entity F skipper.club.name Atom >>> show_completers (f_bir, "q_name", "attr.completer.names") Type q_name attr.completer.names ======================================================================================= F left.left.name ('name',) F left.sail_number ('sail_number', 'left', 'nation', 'sail_number_x') F left.sail_number_x ('sail_number_x', 'left', 'sail_number', 'nation') F right.left.name ('name', 'date') F_C right.left.date ('date', 'name') F right.left.date.start ('start',) F right.left.date.finish ('finish',) F right.boat_class.name ('name',) F_E skipper.left ('left', 'nation', 'mna_number', 'club') F skipper.left.last_name ('last_name', 'first_name', 'middle_name', 'title') F skipper.left.first_name ('first_name', 'last_name', 'middle_name', 'title') F skipper.left.middle_name ('middle_name', 'last_name', 'first_name', 'title') F skipper.left.title ('title',) F skipper.mna_number ('mna_number', 'left', 'nation', 'club') F_E skipper.club ('club', 'left', 'nation', 'mna_number') F skipper.club.name ('name',) >>> show_completers (f_bir, "q_name", "completer.embedder") Type q_name completer.embedder =================================================================================================================== F left.left.name None F left.sail_number None F left.sail_number_x None F right.left.name None F_C right.left.date None F right.left.date.start None F right.left.date.finish None F right.boat_class.name None F_E skipper.left None F skipper.left.last_name <E_Completer for <Field_Entity R-108:skipper.left>, treshold = 1, entity_p = 1> F skipper.left.first_name <E_Completer for <Field_Entity R-108:skipper.left>, treshold = 1, entity_p = 1> F skipper.left.middle_name <E_Completer for <Field_Entity R-108:skipper.left>, treshold = 1, entity_p = 1> F skipper.left.title None F skipper.mna_number None F_E skipper.club None F skipper.club.name <E_Completer for <Field_Entity R-108:skipper.club>, treshold = 1, entity_p = 1> >>> show_completers (f_bir, "q_name", "completer.elems") Type q_name completer.elems ================================================================================================================================================================================================================================================================================================================================================================== F left.left.name (<Field R-108:left.left.name>,) F left.sail_number (<Field R-108:left.sail_number>, <Field_Entity R-108:left.left>, <Field R-108:left.nation>, <Field R-108:left.sail_number_x>) F left.sail_number_x (<Field R-108:left.sail_number_x>, <Field_Entity R-108:left.left>, <Field R-108:left.sail_number>, <Field R-108:left.nation>) F right.left.name (<Field R-108:right.left.name>, <Field R-108:right.left.date.start>, <Field R-108:right.left.date.finish>) F_C right.left.date (<Field R-108:right.left.date.start>, <Field R-108:right.left.date.finish>, <Field R-108:right.left.name>) F right.left.date.start (<Field R-108:right.left.date.start>,) F right.left.date.finish (<Field R-108:right.left.date.finish>,) F right.boat_class.name (<Field R-108:right.boat_class.name>,) F_E skipper.left (<Field_Entity R-108:skipper.left>, <Field R-108:skipper.left.last_name>, <Field R-108:skipper.left.first_name>, <Field R-108:skipper.left.middle_name>, <Field R-108:skipper.left.title>, <Field R-108:skipper.nation>, <Field R-108:skipper.mna_number>, <Field R-108:skipper.club.name>, <Field_Entity R-108:skipper.club>) F skipper.left.last_name (<Field R-108:skipper.left.last_name>, <Field_Entity R-108:skipper.left>, <Field R-108:skipper.left.first_name>, <Field R-108:skipper.left.middle_name>, <Field R-108:skipper.left.title>, <Field R-108:skipper.nation>, <Field R-108:skipper.mna_number>, <Field R-108:skipper.club.name>, <Field_Entity R-108:skipper.club>) F skipper.left.first_name (<Field R-108:skipper.left.first_name>, <Field_Entity R-108:skipper.left>, <Field R-108:skipper.left.last_name>, <Field R-108:skipper.left.middle_name>, <Field R-108:skipper.left.title>, <Field R-108:skipper.nation>, <Field R-108:skipper.mna_number>, <Field R-108:skipper.club.name>, <Field_Entity R-108:skipper.club>) F skipper.left.middle_name (<Field R-108:skipper.left.middle_name>, <Field_Entity R-108:skipper.left>, <Field R-108:skipper.left.last_name>, <Field R-108:skipper.left.first_name>, <Field R-108:skipper.left.title>, <Field R-108:skipper.nation>, <Field R-108:skipper.mna_number>, <Field R-108:skipper.club.name>, <Field_Entity R-108:skipper.club>) F skipper.left.title (<Field R-108:skipper.left.title>,) F skipper.mna_number (<Field R-108:skipper.mna_number>, <Field R-108:skipper.left.last_name>, <Field R-108:skipper.left.first_name>, <Field R-108:skipper.left.middle_name>, <Field R-108:skipper.left.title>, <Field_Entity R-108:skipper.left>, <Field R-108:skipper.nation>, <Field R-108:skipper.club.name>, <Field_Entity R-108:skipper.club>) F_E skipper.club (<Field_Entity R-108:skipper.club>, <Field R-108:skipper.club.name>, <Field R-108:skipper.left.last_name>, <Field R-108:skipper.left.first_name>, <Field R-108:skipper.left.middle_name>, <Field R-108:skipper.left.title>, <Field_Entity R-108:skipper.left>, <Field R-108:skipper.nation>, <Field R-108:skipper.mna_number>) F skipper.club.name (<Field R-108:skipper.club.name>, <Field_Entity R-108:skipper.club>, <Field R-108:skipper.left.last_name>, <Field R-108:skipper.left.first_name>, <Field R-108:skipper.left.middle_name>, <Field R-108:skipper.left.title>, <Field_Entity R-108:skipper.left>, <Field R-108:skipper.nation>, <Field R-108:skipper.mna_number>) >>> show_completers (f_bir, "q_name", "attr.completer.as_json_cargo") Type q_name attr.completer.as_json_cargo ============================================================================================================================== F left.left.name entity_p = True, names = ['name'], treshold = 1 F left.sail_number entity_p = True, names = ['sail_number', 'left', 'nation', 'sail_number_x'], treshold = 1 F left.sail_number_x entity_p = True, names = ['sail_number_x', 'left', 'sail_number', 'nation'], treshold = 1 F right.left.name entity_p = True, names = ['name', 'date'], treshold = 1 F_C right.left.date entity_p = True, names = ['date', 'name'], treshold = 1 F right.left.date.start entity_p = False, names = ['start'], treshold = 4 F right.left.date.finish entity_p = False, names = ['finish'], treshold = 4 F right.boat_class.name entity_p = True, names = ['name'], treshold = 1 F_E skipper.left entity_p = True, names = ['left', 'nation', 'mna_number', 'club'], treshold = 1 F skipper.left.last_name entity_p = True, names = ['last_name', 'first_name', 'middle_name', 'title'], treshold = 1 F skipper.left.first_name entity_p = True, names = ['first_name', 'last_name', 'middle_name', 'title'], treshold = 1 F skipper.left.middle_name entity_p = True, names = ['middle_name', 'last_name', 'first_name', 'title'], treshold = 1 F skipper.left.title entity_p = False, names = ['title'], treshold = 0 F skipper.mna_number entity_p = True, names = ['mna_number', 'left', 'nation', 'club'], treshold = 1 F_E skipper.club entity_p = True, names = ['club', 'left', 'nation', 'mna_number'], treshold = 1 F skipper.club.name entity_p = True, names = ['name'], treshold = 1 >>> show_completers (f_bir, "q_name", "completer.id", "completer.as_json_cargo") Type q_name completer.id completer.as_json_cargo ======================================================================================== F left.left.name 2 buddies_id = 2, entity_p = True, treshold = 1 F left.sail_number 3 buddies_id = 3, entity_p = True, treshold = 1 F left.sail_number_x 3 buddies_id = 3, entity_p = True, treshold = 1 F right.left.name 4 buddies_id = 4, entity_p = True, treshold = 1 F_C right.left.date None buddies_id = 4, entity_p = True, treshold = 1 F right.left.date.start 5 buddies_id = 5, entity_p = False, treshold = 4 F right.left.date.finish 6 buddies_id = 6, entity_p = False, treshold = 4 F right.boat_class.name 7 buddies_id = 7, entity_p = True, treshold = 1 F_E skipper.left 1 buddies_id = 1, entity_p = True, treshold = 1 F skipper.left.last_name 1 buddies_id = 1, entity_p = True, treshold = 1 F skipper.left.first_name 1 buddies_id = 1, entity_p = True, treshold = 1 F skipper.left.middle_name 1 buddies_id = 1, entity_p = True, treshold = 1 F skipper.left.title 8 buddies_id = 8, entity_p = False, treshold = 0 F skipper.mna_number 1 buddies_id = 1, entity_p = True, treshold = 1 F_E skipper.club 1 buddies_id = 1, entity_p = True, treshold = 1 F skipper.club.name 1 buddies_id = 1, entity_p = True, treshold = 1 >>> show_completers (f_bir, "q_name", "completer.id", "completer.sig") Type q_name completer.id completer.sig ======================================================= F left.left.name 2 (2, 1, True) F left.sail_number 3 (3, 1, True) F left.sail_number_x 3 (3, 1, True) F right.left.name 4 (4, 1, True) F_C right.left.date None (4, 1, True) F right.left.date.start 5 (5, 4, False) F right.left.date.finish 6 (6, 4, False) F right.boat_class.name 7 (7, 1, True) F_E skipper.left 1 (1, 1, True) F skipper.left.last_name 1 (1, 1, True) F skipper.left.first_name 1 (1, 1, True) F skipper.left.middle_name 1 (1, 1, True) F skipper.left.title 8 (8, 0, False) F skipper.mna_number 1 (1, 1, True) F_E skipper.club 1 (1, 1, True) F skipper.club.name 1 (1, 1, True) >>> show_completers (f_bir, "q_name", "completer.buddies_id", "polisher.id") Type q_name completer.buddies_id polisher.id =========================================== F left.left.name 2 None F left.sail_number 3 9 F left.sail_number_x 3 None F right.left.name 4 None F_C right.left.date 4 None F right.left.date.start 5 10 F right.left.date.finish 6 10 F right.boat_class.name 7 None F_E skipper.left 1 None F skipper.left.last_name 1 11 F skipper.left.first_name 1 12 F skipper.left.middle_name 1 13 F skipper.left.title 8 8 F skipper.mna_number 1 None F_E skipper.club 1 None F skipper.club.name 1 None >>> print (formatted (f_bir.as_json_cargo ["buddies"])) { 1 : [ 'R-108:skipper.club' , 'R-108:skipper.club.name' , 'R-108:skipper.left' , 'R-108:skipper.left.first_name' , 'R-108:skipper.left.last_name' , 'R-108:skipper.left.middle_name' , 'R-108:skipper.left.title' , 'R-108:skipper.mna_number' , 'R-108:skipper.nation' ] , 2 : ['R-108:left.left.name'] , 3 : [ 'R-108:left.left' , 'R-108:left.nation' , 'R-108:left.sail_number' , 'R-108:left.sail_number_x' ] , 4 : [ 'R-108:right.left.date.finish' , 'R-108:right.left.date.start' , 'R-108:right.left.name' ] , 5 : ['R-108:right.left.date.start'] , 6 : ['R-108:right.left.date.finish'] , 7 : ['R-108:right.boat_class.name'] , 8 : ['R-108:skipper.left.title'] , 9 : [ 'R-108:left.nation' , 'R-108:left.sail_number' , 'R-108:left.sail_number_x' ] , 10 : [ 'R-108:right.left.date.finish' , 'R-108:right.left.date.start' ] , 11 : ['R-108:skipper.left.last_name'] , 12 : ['R-108:skipper.left.first_name'] , 13 : ['R-108:skipper.left.middle_name'] } >>> show_completers_js (f_bir) { 1 : { 'buddies_id' : 1 , 'entity_p' : True , 'treshold' : 1 } , 2 : { 'buddies_id' : 2 , 'entity_p' : True , 'treshold' : 1 } , 3 : { 'buddies_id' : 3 , 'entity_p' : True , 'treshold' : 1 } , 4 : { 'buddies_id' : 4 , 'entity_p' : True , 'treshold' : 1 } , 5 : { 'buddies_id' : 5 , 'entity_p' : False , 'treshold' : 4 } , 6 : { 'buddies_id' : 6 , 'entity_p' : False , 'treshold' : 4 } , 7 : { 'buddies_id' : 7 , 'entity_p' : True , 'treshold' : 1 } , 8 : { 'buddies_id' : 8 , 'entity_p' : False , 'treshold' : 0 } } >>> show_completers (f_bir, "q_name", "completer.entity_p") Type q_name completer.entity_p ========================================= F left.left.name True F left.sail_number True F left.sail_number_x True F right.left.name True F_C right.left.date True F right.left.date.start False F right.left.date.finish False F right.boat_class.name True F_E skipper.left True F skipper.left.last_name True F skipper.left.first_name True F skipper.left.middle_name True F skipper.left.title False F skipper.mna_number True F_E skipper.club True F skipper.club.name True >>> show_completers (f_bir, "q_name", "completer.anchor") Type q_name completer.anchor ========================================================================= F left.left.name <Field_Entity R-108:left.left> F left.sail_number <Field_Entity R-108:left> F left.sail_number_x <Field_Entity R-108:left> F right.left.name <Field_Entity R-108:right.left> F_C right.left.date <Field_Entity R-108:right.left> F right.left.date.start <Field_Entity R-108:right.left> F right.left.date.finish <Field_Entity R-108:right.left> F right.boat_class.name <Field_Entity R-108:right.boat_class> F_E skipper.left <Field_Entity R-108:skipper> F skipper.left.last_name <Field_Entity R-108:skipper> F skipper.left.first_name <Field_Entity R-108:skipper> F skipper.left.middle_name <Field_Entity R-108:skipper> F skipper.left.title <Field_Entity R-108:skipper.left> F skipper.mna_number <Field_Entity R-108:skipper> F_E skipper.club <Field_Entity R-108:skipper> F skipper.club.name <Field_Entity R-108:skipper> >>> show_completers (f_bir, "q_name", "completer.fields") Type q_name completer.fields ============================================================================================================================================================================================================================================================================================== F left.left.name ('R-108:left.left.name',) F left.sail_number ('R-108:left.left', 'R-108:left.nation', 'R-108:left.sail_number', 'R-108:left.sail_number_x') F left.sail_number_x ('R-108:left.left', 'R-108:left.nation', 'R-108:left.sail_number', 'R-108:left.sail_number_x') F right.left.name ('R-108:right.left.date.finish', 'R-108:right.left.date.start', 'R-108:right.left.name') F_C right.left.date ('R-108:right.left.date.finish', 'R-108:right.left.date.start', 'R-108:right.left.name') F right.left.date.start ('R-108:right.left.date.start',) F right.left.date.finish ('R-108:right.left.date.finish',) F right.boat_class.name ('R-108:right.boat_class.name',) F_E skipper.left ('R-108:skipper.club', 'R-108:skipper.club.name', 'R-108:skipper.left', 'R-108:skipper.left.first_name', 'R-108:skipper.left.last_name', 'R-108:skipper.left.middle_name', 'R-108:skipper.left.title', 'R-108:skipper.mna_number', 'R-108:skipper.nation') F skipper.left.last_name ('R-108:skipper.club', 'R-108:skipper.club.name', 'R-108:skipper.left', 'R-108:skipper.left.first_name', 'R-108:skipper.left.last_name', 'R-108:skipper.left.middle_name', 'R-108:skipper.left.title', 'R-108:skipper.mna_number', 'R-108:skipper.nation') F skipper.left.first_name ('R-108:skipper.club', 'R-108:skipper.club.name', 'R-108:skipper.left', 'R-108:skipper.left.first_name', 'R-108:skipper.left.last_name', 'R-108:skipper.left.middle_name', 'R-108:skipper.left.title', 'R-108:skipper.mna_number', 'R-108:skipper.nation') F skipper.left.middle_name ('R-108:skipper.club', 'R-108:skipper.club.name', 'R-108:skipper.left', 'R-108:skipper.left.first_name', 'R-108:skipper.left.last_name', 'R-108:skipper.left.middle_name', 'R-108:skipper.left.title', 'R-108:skipper.mna_number', 'R-108:skipper.nation') F skipper.left.title ('R-108:skipper.left.title',) F skipper.mna_number ('R-108:skipper.club', 'R-108:skipper.club.name', 'R-108:skipper.left', 'R-108:skipper.left.first_name', 'R-108:skipper.left.last_name', 'R-108:skipper.left.middle_name', 'R-108:skipper.left.title', 'R-108:skipper.mna_number', 'R-108:skipper.nation') F_E skipper.club ('R-108:skipper.club', 'R-108:skipper.club.name', 'R-108:skipper.left', 'R-108:skipper.left.first_name', 'R-108:skipper.left.last_name', 'R-108:skipper.left.middle_name', 'R-108:skipper.left.title', 'R-108:skipper.mna_number', 'R-108:skipper.nation') F skipper.club.name ('R-108:skipper.club', 'R-108:skipper.club.name', 'R-108:skipper.left', 'R-108:skipper.left.first_name', 'R-108:skipper.left.last_name', 'R-108:skipper.left.middle_name', 'R-108:skipper.left.title', 'R-108:skipper.mna_number', 'R-108:skipper.nation') >>> show_completers (f_bir, "q_name", "completer.field_ids") Type q_name completer.field_ids ============================================================================================================================================================================================================================================================================================== F left.left.name ('R-108:left.left.name',) F left.sail_number ('R-108:left.sail_number', 'R-108:left.left', 'R-108:left.nation', 'R-108:left.sail_number_x') F left.sail_number_x ('R-108:left.sail_number_x', 'R-108:left.left', 'R-108:left.sail_number', 'R-108:left.nation') F right.left.name ('R-108:right.left.name', 'R-108:right.left.date.start', 'R-108:right.left.date.finish') F_C right.left.date ('R-108:right.left.date.start', 'R-108:right.left.date.finish', 'R-108:right.left.name') F right.left.date.start ('R-108:right.left.date.start',) F right.left.date.finish ('R-108:right.left.date.finish',) F right.boat_class.name ('R-108:right.boat_class.name',) F_E skipper.left ('R-108:skipper.left', 'R-108:skipper.left.last_name', 'R-108:skipper.left.first_name', 'R-108:skipper.left.middle_name', 'R-108:skipper.left.title', 'R-108:skipper.nation', 'R-108:skipper.mna_number', 'R-108:skipper.club.name', 'R-108:skipper.club') F skipper.left.last_name ('R-108:skipper.left.last_name', 'R-108:skipper.left', 'R-108:skipper.left.first_name', 'R-108:skipper.left.middle_name', 'R-108:skipper.left.title', 'R-108:skipper.nation', 'R-108:skipper.mna_number', 'R-108:skipper.club.name', 'R-108:skipper.club') F skipper.left.first_name ('R-108:skipper.left.first_name', 'R-108:skipper.left', 'R-108:skipper.left.last_name', 'R-108:skipper.left.middle_name', 'R-108:skipper.left.title', 'R-108:skipper.nation', 'R-108:skipper.mna_number', 'R-108:skipper.club.name', 'R-108:skipper.club') F skipper.left.middle_name ('R-108:skipper.left.middle_name', 'R-108:skipper.left', 'R-108:skipper.left.last_name', 'R-108:skipper.left.first_name', 'R-108:skipper.left.title', 'R-108:skipper.nation', 'R-108:skipper.mna_number', 'R-108:skipper.club.name', 'R-108:skipper.club') F skipper.left.title ('R-108:skipper.left.title',) F skipper.mna_number ('R-108:skipper.mna_number', 'R-108:skipper.left.last_name', 'R-108:skipper.left.first_name', 'R-108:skipper.left.middle_name', 'R-108:skipper.left.title', 'R-108:skipper.left', 'R-108:skipper.nation', 'R-108:skipper.club.name', 'R-108:skipper.club') F_E skipper.club ('R-108:skipper.club', 'R-108:skipper.club.name', 'R-108:skipper.left.last_name', 'R-108:skipper.left.first_name', 'R-108:skipper.left.middle_name', 'R-108:skipper.left.title', 'R-108:skipper.left', 'R-108:skipper.nation', 'R-108:skipper.mna_number') F skipper.club.name ('R-108:skipper.club.name', 'R-108:skipper.club', 'R-108:skipper.left.last_name', 'R-108:skipper.left.first_name', 'R-108:skipper.left.middle_name', 'R-108:skipper.left.title', 'R-108:skipper.left', 'R-108:skipper.nation', 'R-108:skipper.mna_number') >>> show_completers (f_bir, "q_name", "completer.etn", "completer.attr_names") Type q_name completer.etn completer.attr_names =================================================================================================================================================================================== F left.left.name SRM.Boat_Class ('name',) F left.sail_number SRM.Boat ('sail_number', 'left', 'nation', 'sail_number_x') F left.sail_number_x SRM.Boat ('sail_number_x', 'left', 'sail_number', 'nation') F right.left.name SRM.Regatta_Event ('name', 'date.start', 'date.finish') F_C right.left.date SRM.Regatta_Event ('date.start', 'date.finish', 'name') F right.left.date.start SRM.Regatta_Event ('date.start',) F right.left.date.finish SRM.Regatta_Event ('date.finish',) F right.boat_class.name SRM._Boat_Class_ ('name',) F_E skipper.left SRM.Sailor ('left', 'left.last_name', 'left.first_name', 'left.middle_name', 'left.title', 'nation', 'mna_number', 'club.name', 'club') F skipper.left.last_name SRM.Sailor ('left.last_name', 'left', 'left.first_name', 'left.middle_name', 'left.title', 'nation', 'mna_number', 'club.name', 'club') F skipper.left.first_name SRM.Sailor ('left.first_name', 'left', 'left.last_name', 'left.middle_name', 'left.title', 'nation', 'mna_number', 'club.name', 'club') F skipper.left.middle_name SRM.Sailor ('left.middle_name', 'left', 'left.last_name', 'left.first_name', 'left.title', 'nation', 'mna_number', 'club.name', 'club') F skipper.left.title PAP.Person ('title',) F skipper.mna_number SRM.Sailor ('mna_number', 'left.last_name', 'left.first_name', 'left.middle_name', 'left.title', 'left', 'nation', 'club.name', 'club') F_E skipper.club SRM.Sailor ('club', 'club.name', 'left.last_name', 'left.first_name', 'left.middle_name', 'left.title', 'left', 'nation', 'mna_number') F skipper.club.name SRM.Sailor ('club.name', 'club', 'left.last_name', 'left.first_name', 'left.middle_name', 'left.title', 'left', 'nation', 'mna_number') >>> show_elements_x (f_p, "q_name", "completer.id", "completer.as_json_cargo") Type q_name completer.id completer.as_json_cargo =============================================================================== E None None None F last_name 1 buddies_id = 1, entity_p = True, treshold = 1 F first_name 1 buddies_id = 1, entity_p = True, treshold = 1 F middle_name 1 buddies_id = 1, entity_p = True, treshold = 1 F title 2 buddies_id = 2, entity_p = False, treshold = 0 F_C lifetime None None F lifetime.start 3 buddies_id = 3, entity_p = False, treshold = 4 F lifetime.finish 4 buddies_id = 4, entity_p = False, treshold = 4 F sex None None >>> show_elements_x (f_p, "q_name", "completer.id", "completer.sig") Type q_name completer.id completer.sig ============================================== E None None None F last_name 1 (1, 1, True) F first_name 1 (1, 1, True) F middle_name 1 (1, 1, True) F title 2 (2, 0, False) F_C lifetime None None F lifetime.start 3 (3, 4, False) F lifetime.finish 4 (4, 4, False) F sex None None >>> show_completers (f_p, "q_name", "completer.name", "completer.entity_p") Type q_name completer.name completer.entity_p ================================================ F last_name last_name True F first_name first_name True F middle_name middle_name True F title title False F lifetime.start lifetime.start False F lifetime.finish lifetime.finish False >>> show_completers (f_p, "q_name", "completer.anchor") Type q_name completer.anchor ======================================= F last_name <Entity X-26> F first_name <Entity X-26> F middle_name <Entity X-26> F title <Entity X-26> F lifetime.start <Entity X-26> F lifetime.finish <Entity X-26> >>> show_completers (f_p, "q_name", "completer.field_ids") Type q_name completer.field_ids ================================================================================================= F last_name ('X-26:last_name', 'X-26:first_name', 'X-26:middle_name', 'X-26:title') F first_name ('X-26:first_name', 'X-26:last_name', 'X-26:middle_name', 'X-26:title') F middle_name ('X-26:middle_name', 'X-26:last_name', 'X-26:first_name', 'X-26:title') F title ('X-26:title',) F lifetime.start ('X-26:lifetime.start',) F lifetime.finish ('X-26:lifetime.finish',) >>> show_completers (f_p, "q_name", "completer.etn", "completer.attr_names") Type q_name completer.etn completer.attr_names ========================================================================================= F last_name PAP.Person ('last_name', 'first_name', 'middle_name', 'title') F first_name PAP.Person ('first_name', 'last_name', 'middle_name', 'title') F middle_name PAP.Person ('middle_name', 'last_name', 'first_name', 'title') F title PAP.Person ('title',) F lifetime.start PAP.Person ('lifetime.start',) F lifetime.finish PAP.Person ('lifetime.finish',) >>> show_completers_js (f_p) { 1 : { 'buddies_id' : 1 , 'entity_p' : True , 'treshold' : 1 } , 2 : { 'buddies_id' : 2 , 'entity_p' : False , 'treshold' : 0 } , 3 : { 'buddies_id' : 3 , 'entity_p' : False , 'treshold' : 4 } , 4 : { 'buddies_id' : 4 , 'entity_p' : False , 'treshold' : 4 } } >>> show_elements_x (f_bir, "q_name", "field_elements") Type q_name field_elements ============================================================================================================================================================================================= E None (<Field_Entity R-108:left>, <Field_Entity R-108:right>, <Field_Entity R-108:skipper>, <Field R-108:place>, <Field R-108:points>, <Field R-108:yardstick>) F_E left (<Field_Entity R-108:left.left>, <Field R-108:left.sail_number>, <Field R-108:left.nation>, <Field R-108:left.sail_number_x>) F_E left.left (<Field R-108:left.left.name>,) F left.left.name () F left.sail_number () F left.nation () F left.sail_number_x () F_E right (<Field_Entity R-108:right.left>, <Field_Entity R-108:right.boat_class>) F_E right.left (<Field R-108:right.left.name>, <Field R-108:right.left.date.start>, <Field R-108:right.left.date.finish>) F right.left.name () F_C right.left.date (<Field R-108:right.left.date.start>, <Field R-108:right.left.date.finish>) F right.left.date.start () F right.left.date.finish () F_E right.boat_class (<Field R-108:right.boat_class.name>,) F right.boat_class.name () F_E skipper (<Field_Entity R-108:skipper.left>, <Field R-108:skipper.nation>, <Field R-108:skipper.mna_number>, <Field_Entity R-108:skipper.club>) F_E skipper.left (<Field R-108:skipper.left.last_name>, <Field R-108:skipper.left.first_name>, <Field R-108:skipper.left.middle_name>, <Field R-108:skipper.left.title>) F skipper.left.last_name () F skipper.left.first_name () F skipper.left.middle_name () F skipper.left.title () F skipper.nation () F skipper.mna_number () F_E skipper.club (<Field R-108:skipper.club.name>,) F skipper.club.name () F place () F points () F yardstick () >>> show_elements_x (f_p, "q_name", "field_elements") Type q_name field_elements ======================================================================================================================================================================================================= E None (<Field X-26:last_name>, <Field X-26:first_name>, <Field X-26:middle_name>, <Field X-26:title>, <Field X-26:lifetime.start>, <Field X-26:lifetime.finish>, <Field X-26:sex>) F last_name () F first_name () F middle_name () F title () F_C lifetime (<Field X-26:lifetime.start>, <Field X-26:lifetime.finish>) F lifetime.start () F lifetime.finish () F sex () >>> show_completers (f_bir, "q_name", "completer.own_elems") Type q_name completer.own_elems ================================================================================================================================================================================================================================================================================================================================================================== F left.left.name (<Field R-108:left.left.name>,) F left.sail_number (<Field R-108:left.sail_number>, <Field_Entity R-108:left.left>, <Field R-108:left.nation>, <Field R-108:left.sail_number_x>) F left.sail_number_x (<Field R-108:left.sail_number_x>, <Field_Entity R-108:left.left>, <Field R-108:left.sail_number>, <Field R-108:left.nation>) F right.left.name (<Field R-108:right.left.name>, <Field R-108:right.left.date.start>, <Field R-108:right.left.date.finish>) F_C right.left.date (<Field R-108:right.left.date.start>, <Field R-108:right.left.date.finish>) F right.left.date.start (<Field R-108:right.left.date.start>,) F right.left.date.finish (<Field R-108:right.left.date.finish>,) F right.boat_class.name (<Field R-108:right.boat_class.name>,) F_E skipper.left (<Field_Entity R-108:skipper.left>, <Field R-108:skipper.left.last_name>, <Field R-108:skipper.left.first_name>, <Field R-108:skipper.left.middle_name>, <Field R-108:skipper.left.title>, <Field R-108:skipper.nation>, <Field R-108:skipper.mna_number>, <Field R-108:skipper.club.name>, <Field_Entity R-108:skipper.club>) F skipper.left.last_name (<Field R-108:skipper.left.last_name>, <Field R-108:skipper.left.first_name>, <Field R-108:skipper.left.middle_name>, <Field R-108:skipper.left.title>) F skipper.left.first_name (<Field R-108:skipper.left.first_name>, <Field R-108:skipper.left.last_name>, <Field R-108:skipper.left.middle_name>, <Field R-108:skipper.left.title>) F skipper.left.middle_name (<Field R-108:skipper.left.middle_name>, <Field R-108:skipper.left.last_name>, <Field R-108:skipper.left.first_name>, <Field R-108:skipper.left.title>) F skipper.left.title (<Field R-108:skipper.left.title>,) F skipper.mna_number (<Field R-108:skipper.mna_number>, <Field R-108:skipper.left.last_name>, <Field R-108:skipper.left.first_name>, <Field R-108:skipper.left.middle_name>, <Field R-108:skipper.left.title>, <Field_Entity R-108:skipper.left>, <Field R-108:skipper.nation>, <Field R-108:skipper.club.name>, <Field_Entity R-108:skipper.club>) F_E skipper.club (<Field_Entity R-108:skipper.club>, <Field R-108:skipper.club.name>, <Field R-108:skipper.left.last_name>, <Field R-108:skipper.left.first_name>, <Field R-108:skipper.left.middle_name>, <Field R-108:skipper.left.title>, <Field_Entity R-108:skipper.left>, <Field R-108:skipper.nation>, <Field R-108:skipper.mna_number>) F skipper.club.name (<Field R-108:skipper.club.name>,) >>> show_completers (f_p, "q_name", "completer.own_elems") Type q_name completer.own_elems ========================================================================================================================= F last_name (<Field X-26:last_name>, <Field X-26:first_name>, <Field X-26:middle_name>, <Field X-26:title>) F first_name (<Field X-26:first_name>, <Field X-26:last_name>, <Field X-26:middle_name>, <Field X-26:title>) F middle_name (<Field X-26:middle_name>, <Field X-26:last_name>, <Field X-26:first_name>, <Field X-26:title>) F title (<Field X-26:title>,) F lifetime.start (<Field X-26:lifetime.start>,) F lifetime.finish (<Field X-26:lifetime.finish>,) >>> EVT = scope.EVT >>> RR = EVT.Recurrence_Rule >>> RS = EVT.Recurrence_Spec >>> ev = EVT.Event (p, p.lifetime, ("00:00", "23:59"), raw = True) >>> rs = RS (ev) >>> rr = RR (rs, desc = "Birthday", unit = "Yearly", raw = True) >>> F_E = MF3_E.Entity.Auto (EVT.Event, id_prefix = "E", attr_spec = dict (recurrence = dict (include_rev_refs = ("rules", ))), include_rev_refs = ("recurrence", )) >>> f_e = F_E (scope, ev) >>> show_elements (f_e, "ui_display") <Entity E-64> Tanzer Christian, 1959-09-26, 00:00 - 23:59 <Field_Entity E-64:left> Tanzer Christian <Field_Composite E-64:date> 1959-09-26 <Field E-64:date.start> 1959-09-26 <Field E-64:date.finish> <Field_Composite E-64:time> 00:00 - 23:59 <Field E-64:time.start> 00:00 <Field E-64:time.finish> 23:59 <Field_Entity E-64:calendar> <Field E-64:detail> <Field E-64:short_title> <Field_Rev_Ref E-64:recurrence> Birthday, 1959-09-26, 1, Yearly <Entity_Rev_Ref E-64:recurrence@6> Birthday, 1959-09-26, 1, Yearly <Field E-64:recurrence::dates@6> <Field E-64:recurrence::date_exceptions@6> <Field_Rev_Ref E-64:recurrence::rules@6> Birthday, 1959-09-26, 1, Yearly <Entity_Rev_Ref E-64:recurrence::rules@6@7> Birthday, 1959-09-26, 1, Yearly <Field E-64:recurrence::rules::is_exception@6@7> <Field E-64:recurrence::rules::desc@6@7> Birthday <Field E-64:recurrence::rules::start@6@7> 1959-09-26 <Field E-64:recurrence::rules::finish@6@7> <Field E-64:recurrence::rules::period@6@7> 1 <Field E-64:recurrence::rules::unit@6@7> Yearly <Field E-64:recurrence::rules::week_day@6@7> <Field E-64:recurrence::rules::count@6@7> <Field E-64:recurrence::rules::restrict_pos@6@7> <Field E-64:recurrence::rules::month_day@6@7> <Field E-64:recurrence::rules::month@6@7> <Field E-64:recurrence::rules::week@6@7> <Field E-64:recurrence::rules::year_day@6@7> <Field E-64:recurrence::rules::easter_offset@6@7> <Field_Ref_Hidden E-64:recurrence::rules::left@6@7> <Field_Ref_Hidden E-64:recurrence::left@6> """ _test_max_rev_ref = r""" >>> scope = Scaffold.scope (%(p1)s, %(n1)s) # doctest:+ELLIPSIS Creating new scope MOMT__... >>> scope.db_meta_data.dbid = '2d802327-5c99-49ca-9af7-2ddc6b4c648b' >>> EVT = scope.EVT >>> F_E = MF3_E.Entity.Auto (EVT.Event, id_prefix = "E", attr_spec = dict (recurrence = dict (include_rev_refs = ("rules", ))), include_rev_refs = ("recurrence", )) >>> f_e = F_E (scope) >>> _ = f_e ["recurrence"].add () >>> show_elements (f_e, "max_rev_ref") <Entity E-64> --- <Field_Entity E-64:left> --- <Field_Composite E-64:date> --- <Field E-64:date.start> --- <Field E-64:date.finish> --- <Field_Composite E-64:time> --- <Field E-64:time.start> --- <Field E-64:time.finish> --- <Field_Entity E-64:calendar> --- <Field E-64:detail> --- <Field E-64:short_title> --- <Field_Rev_Ref E-64:recurrence> 1 <Entity_Rev_Ref E-64:recurrence/1> --- <Field E-64:recurrence::dates/1> --- <Field E-64:recurrence::date_exceptions/1> --- <Field_Rev_Ref E-64:recurrence::rules/1> 2147483648 <Field_Ref_Hidden E-64:recurrence::left/1> --- """ _test_single_primary = r""" >>> scope = Scaffold.scope (%(p1)s, %(n1)s) # doctest:+ELLIPSIS Creating new scope MOMT__... >>> scope.db_meta_data.dbid = '2d802327-5c99-49ca-9af7-2ddc6b4c648b' >>> SRM = scope.SRM >>> attr_spec = dict (left = dict (allow_new = False)) >>> F_B = MF3_E.Entity.Auto (scope.SRM.Boat, id_prefix = "S") >>> F_B_r = MF3_E.Entity.Auto (scope.SRM.Boat, id_prefix = "S_r", attr_spec = attr_spec) >>> F_RiR = MF3_E.Entity.Auto (scope.SRM.Regatta_in_Ranking, id_prefix = "S") >>> f_b = F_B (scope) >>> f_b_r = F_B_r (scope) >>> f_rir = F_RiR (scope) >>> print (formatted (f_b.as_json_cargo)) { 'buddies' : { 1 : ['S-78:left.name'] , 2 : [ 'S-78:left' , 'S-78:nation' , 'S-78:sail_number' , 'S-78:sail_number_x' ] , 3 : [ 'S-78:nation' , 'S-78:sail_number' , 'S-78:sail_number_x' ] } , 'cargo' : { 'field_values' : { 'S-78:left' : {'init' : {}} , 'S-78:left.name' : {} , 'S-78:name' : {} , 'S-78:nation' : {} , 'S-78:sail_number' : {} , 'S-78:sail_number_x' : {} } , 'sid' : 0 , 'sigs' : { 'S-78' : '93Qk3j2q66JPVdcXUx-J9kTeOwLfyS-ky5Dcsw' , 'S-78:left' : 'O38NylwL4xRzTHppwxAnIFbWUuSZkiurFOLBrQ' } } , 'checkers' : {} , 'completers' : { 1 : { 'buddies_id' : 1 , 'entity_p' : True , 'treshold' : 1 } , 2 : { 'buddies_id' : 2 , 'entity_p' : True , 'treshold' : 1 } } } >>> print (formatted (f_b_r.as_json_cargo)) { 'buddies' : { 1 : [ 'S_r-78:left' , 'S_r-78:nation' , 'S_r-78:sail_number' , 'S_r-78:sail_number_x' ] , 2 : [ 'S_r-78:nation' , 'S_r-78:sail_number' , 'S_r-78:sail_number_x' ] } , 'cargo' : { 'field_values' : { 'S_r-78:left' : {'init' : {}} , 'S_r-78:name' : {} , 'S_r-78:nation' : {} , 'S_r-78:sail_number' : {} , 'S_r-78:sail_number_x' : {} } , 'sid' : 0 , 'sigs' : {'S_r-78' : 'ZR3qceE4XUgpNj19f1vwXC14K1nM57IlV-7_aA'} } , 'checkers' : {} , 'completers' : { 1 : { 'buddies_id' : 1 , 'entity_p' : True , 'treshold' : 1 } } } >>> print (formatted (f_rir.as_json_cargo)) { 'buddies' : { 1 : [ 'S-114:left' , 'S-114:right' ] } , 'cargo' : { 'field_values' : { 'S-114:factor' : {'edit' : '1.0'} , 'S-114:left' : {'init' : {}} , 'S-114:right' : {'init' : {}} } , 'sid' : 0 , 'sigs' : {'S-114' : 'Cx5I_f7OxY-6Jdly8D1vlMSITfjN05JUFQKIOw'} } , 'checkers' : {} , 'completers' : { 1 : { 'buddies_id' : 1 , 'entity_p' : True , 'treshold' : 1 } , 2 : { 'buddies_id' : 1 , 'entity_p' : True , 'treshold' : 0 } } } >>> show_elements (f_b, "Entity") <Entity S-78> <Entity S-78> <Field_Entity S-78:left> <Entity S-78> <Field S-78:left.name> <Field_Entity S-78:left> <Field S-78:sail_number> <Entity S-78> <Field S-78:nation> <Entity S-78> <Field S-78:sail_number_x> <Entity S-78> <Field S-78:name> <Entity S-78> >>> show_elements (f_b_r, "Entity") <Entity S_r-78> <Entity S_r-78> <Field_Entity S_r-78:left> <Entity S_r-78> <Field S_r-78:sail_number> <Entity S_r-78> <Field S_r-78:nation> <Entity S_r-78> <Field S_r-78:sail_number_x> <Entity S_r-78> <Field S_r-78:name> <Entity S_r-78> >>> sorted (f_b_r ["S_r-78:left"].attr_map.items ()) [('name', <Field S_r-78:left.name>)] >>> f_b_r_l_n = f_b_r ["S_r-78:left.name"] >>> f_b_r_l_n.attr.completer.MF3 (f_b_r_l_n) <Completer for <Field S_r-78:left.name>, treshold = 1, entity_p = 1> >>> show_elements (f_rir, "completer") <Entity S-114> None <Field_Entity S-114:left> <E_Completer for <Field_Entity S-114:left>, treshold = 1, entity_p = 1> <Field_Entity S-114:right> <E_Completer for <Field_Entity S-114:right>, treshold = 0, entity_p = 1> <Field S-114:factor> None >>> f_rir_r = f_rir ["S-114:right"] >>> f_rir_r.completer <E_Completer for <Field_Entity S-114:right>, treshold = 0, entity_p = 1> >>> f_rir_r.completer.elems (<Field_Entity S-114:right>, <Field_Entity S-114:left>) >>> f_rir_r_n = f_rir ["S-114:right.name"] >>> f_rir_r_n.attr.completer.MF3 (f_rir_r_n) <Completer for <Field S-114:right.name>, treshold = 0, entity_p = 1> """ _test_skip = r""" >>> scope = Scaffold.scope (%(p1)s, %(n1)s) # doctest:+ELLIPSIS Creating new scope MOMT__... >>> scope.db_meta_data.dbid = '2d802327-5c99-49ca-9af7-2ddc6b4c648b' >>> def skip_snx (v) : ... return dict (attr_spec = { "left.sail_number_x" : dict (skip = v) }) >>> F_BiR_X = MF3_E.Entity.Auto (scope.SRM.Boat_in_Regatta, id_prefix = "X") >>> F_BiR_N = MF3_E.Entity.Auto (scope.SRM.Boat_in_Regatta, id_prefix = "N", ** skip_snx (1)) >>> f_bir_x_x = F_BiR_X (scope, ** skip_snx (0)) >>> f_bir_x_n = F_BiR_X (scope, ** skip_snx (1)) >>> f_bir_n_x = F_BiR_N (scope, ** skip_snx (0)) >>> f_bir_n_n = F_BiR_N (scope, ** skip_snx (1)) >>> list (F_BiR_X ["left"].elements_transitive ()) [-1] <class Field X-108:left.sail_number_x> >>> list (F_BiR_N ["left"].elements_transitive ()) [-1] <class Field N-108:left.nation> >>> list (f_bir_x_x ["left"].elements_transitive ()) [-1] <Field X-108:left.sail_number_x> >>> list (f_bir_x_n ["left"].elements_transitive ()) [-1] <Field X-108:left.nation> >>> list (f_bir_n_x ["left"].elements_transitive ()) [-1] <Field N-108:left.sail_number_x> >>> list (f_bir_n_n ["left"].elements_transitive ()) [-1] <Field N-108:left.nation> """ __test__ = Scaffold.create_test_dict \ ( dict ( element = _test_element , max_rev_ref = _test_max_rev_ref , single_primary = _test_single_primary , skip = _test_skip ) ) ### __END__ GTW.__test__.MF3
PypiClean
/ASAPy-1.1.2.tar.gz/ASAPy-1.1.2/ASAP/AutomaticSurfaceAdsorbateStructureProvider.py
import operator from pymatgen.core.sites import PeriodicSite from pymatgen.io.vasp.inputs import Poscar import numpy as np import math from sympy.geometry import Plane, Point3D import sympy as sym from sympy import re from itertools import combinations class ASAP(): # ASAP can currently handle all coordnation enviroments >= 2 # Still need to test the TL:3 Trigonal Plane and S:4 Square Plane environmetns def __init__( self, CEs: list, CNs, Usites: list, Ads, Struc, bonding_atom: int, cutoff: float, bonding_dist: float, dope_list: list, diatomic: float = None ): self.Usites = Usites # list of unique sites in the adsorbing structer self.CNs = CNs # Cordnation numbers ascoited with the CEs self.CEs = CEs # list of coordnatiosn environemets for each Usite self.Adsorbate = Ads # geometry file of adsorbate self.Adsorbent = Struc # geometry file of adsorbign structure self.bonding_atom = bonding_atom # the atom in the adsorbate that is bonding self.bonding_dist = bonding_dist # the distance of the bonding adsorbent self.cutoff = cutoff # the cutoff threshold for adsorbate atoms to lattice atoms self.dope_list = dope_list # List of subsitution to be made at each adsorbate site (pass an empty array if no subtitutions are desired) self.diatomic = diatomic # Bond distance for a diatomic bindentate bond, if inlcuded def get_neighbors(self): NN = [] for x in self.Usites: nn = self.Adsorbent.get_neighbors(self.Adsorbent.sites[x],5.0) nn.sort(key=lambda x: x.nn_distance) NN.append(nn) return NN def create_all_adsorbates(self): spot = 1 NNs = self.get_neighbors() for counter,x in enumerate(self.CEs): if x == 'L:2' or x == 'A:2': inserts, non_per = self.L2_A2(self.Usites[counter],NNs[counter],self.CNs[counter]) if self.diatomic: spot = self.get_bidentate_adsorbate(inserts,non_per,self.Usites[counter],spot) else: spot = self.get_adsorbate(inserts,non_per,self.Usites[counter],spot) continue elif x == 'S:1': inserts, non_per = self.S1(self.Usites[counter],NNs[counter],self.CNs[counter]) if self.diatomic: spot = self.get_bidentate_adsorbate(inserts,non_per,self.Usites[counter],spot) else: spot = self.get_adsorbate(inserts,non_per,self.Usites[counter],spot) continue else: inserts, non_per = self.New_Sites(self.Usites[counter],NNs[counter],self.CNs[counter]) if self.diatomic: spot = self.get_bidentate_adsorbate(inserts,non_per,self.Usites[counter],spot) else: spot = self.get_adsorbate(inserts,non_per,self.Usites[counter],spot) def dope_me(self,usite,spot,name): og_species = self.Adsorbent[usite].species_string str_name = '{}{}'.format(spot,name) for d in self.dope_list: if (d != og_species): self.Adsorbent.replace(usite,d) pos = Poscar(self.Adsorbent.get_sorted_structure()) pos.write_file('POSCAR_{}_Site_{}'.format(self.Adsorbent[usite].species_string,str_name)) else: pass self.Adsorbent.replace(usite,og_species) def New_Sites(self,usite,nn,cn): X = self.Adsorbent[usite].x Y = self.Adsorbent[usite].y Z = self.Adsorbent[usite].z sort_nn = sorted(nn, key=operator.itemgetter(1)) # redundent from get_neighbors neighbors = sort_nn[:cn] vectors = [] for f in range(cn): coords = neighbors[f][0].coords V = coords - [X,Y,Z] vectors.append(V) vectors = np.asarray(vectors) combos = list(combinations(range(cn),3)) combos = np.asarray(combos) sites = [] n_cords = [] for c in combos: vec_check = [] n_cords = [] Vj = [0, 0, 0] for x in c: Vj += vectors[x] vec_check.append(vectors[x]) n_cords.append(neighbors[x].coords) vec_check = np.asarray(vec_check) v1 = n_cords[0] - n_cords[1] v2 = n_cords[0] - n_cords[2] cp1 = np.cross(v2, v1) cp1 = cp1*(np.linalg.norm(Vj)/np.linalg.norm(cp1)) angle1 = np.arccos(np.dot(Vj,cp1)/(np.linalg.norm(Vj)*np.linalg.norm(cp1))) angle1 = np.degrees(np.abs(angle1)) newsite = 'bad' if np.abs(angle1) < 20 or np.abs(angle1-180) < 20 : newsite = 'good' Ang = np.zeros((len(vectors),)) for count,V in enumerate(vectors): Ang[count] = np.degrees(np.arccos(np.round(np.dot(Vj,V)/(np.linalg.norm(Vj)*np.linalg.norm(V)),8))) # For large cn CEs the diff in angles can be <10 and still be a wrong Vj if (Ang <= 8).any(): newsite =' bad' if (np.linalg.norm(Vj) < vec_check.all()): newsite =' bad' if newsite == 'good': # adjust length of Vj to desired bond distance factor_adjust = self.bonding_dist/np.linalg.norm(Vj) Vj = factor_adjust*Vj # add back adsorbent site XYZ for cartesien points of adsorbate sites.append(Vj+[X,Y,Z]) combos = list(combinations(range(cn),4)) combos = np.asarray(combos) for c in combos: vec_check = [] n_cords = [] Vj = [0, 0, 0] for x in c: Vj += vectors[x] vec_check.append(vectors[x]) n_cords.append(neighbors[x].coords) vec_check = np.asarray(vec_check) v1 = n_cords[0] - n_cords[1] v2 = n_cords[0] - n_cords[2] v3 = n_cords[0] - n_cords[3] cp1 = np.cross(v1, v2) cp1 = cp1*(np.linalg.norm(Vj)/np.linalg.norm(cp1)) cp2 = np.cross(v1, v3) cp2 = cp2*(np.linalg.norm(Vj)/np.linalg.norm(cp2)) angle1 = np.arccos(np.dot(Vj,cp1)/(np.linalg.norm(Vj)*np.linalg.norm(cp1))) angle2 = np.arccos(np.dot(Vj,cp2)/(np.linalg.norm(Vj)*np.linalg.norm(cp2))) angle1 = np.degrees(angle1) angle2 = np.degrees(angle2) newsite = 'bad' if (np.abs(angle1) < 10 or np.abs(angle1-180) < 10) and (np.abs(angle2) < 10 or np.abs(angle2-180) < 0) : newsite = 'good' Ang = np.zeros((len(vectors),)) for count,V in enumerate(vectors): Ang[count] = np.degrees(np.round(np.arccos(np.dot(Vj,V)/(np.linalg.norm(Vj)*np.linalg.norm(V))),8)) # For large cn CEs the diff in angles can be < 10 and still be a wrong Vj # Thus we impose a check to ensure that Vj is not close to any vector # inline with a NN if (Ang <= 8).any(): newsite =' bad' if (np.linalg.norm(Vj) < vec_check.all()): newsite =' bad' if newsite == 'good': # adjust length of Vj to desired bond distance factor_adjust = self.bonding_dist/np.linalg.norm(Vj) Vj = factor_adjust*Vj # add back adsorbent site XYZ for cartesien points of adsorbate sites.append(Vj+[X,Y,Z]) # fast way to find planes but does not capture all posibilties only perfectly square ones # max_shape = [] # makes_plane = [] # for x in range(3,cn+1): # combos = list(combinations(range(cn),x)) # for y in combos: # print(y) # is_zero = np.array([0, 0, 0,]) # for z in y: # is_zero = is_zero + vectors[z] # print(np.abs(is_zero)) # print((np.abs(is_zero)<.1).all()) # if (np.abs(is_zero)<.1).all(): # makes_plane.append(y) ## Lastly check for Planer CEs 4 or greater trigonal planes shoudl already be catpured has_planes = False for x in range(4,cn+1): combos = list(combinations(range(cn),x)) [N,M] = np.shape(combos) makes_plane = [] for y in combos: myplane = Plane(Point3D(vectors[y[0]]),Point3D(vectors[y[1]]),Point3D(vectors[y[2]])) new_dist = 0 for z in range(3,M): new_dist = new_dist + myplane.distance(Point3D(vectors[y[z]])) if new_dist < .001: makes_plane.append(y) if np.size(makes_plane) > 0: has_planes = True my_planes = makes_plane #print(has_planes) if has_planes: myplane = Plane(Point3D(vectors[my_planes[0][0]]), Point3D(vectors[my_planes[0][1]]), Point3D(vectors[my_planes[0][2]])) #vector orthogonal to plane of CE orthovec = np.double(myplane.normal_vector) factor_adjust = self.bonding_dist/np.linalg.norm(orthovec) orthovec = factor_adjust*orthovec angels = [] resite = np.asarray(sites) - [X,Y,Z] for x in resite: angels.append(np.degrees(np.arccos(np.dot(x,orthovec)/(np.linalg.norm(x)*np.linalg.norm(orthovec))))) if (np.asarray(angels) < 90).all(): sites.append(np.asarray(orthovec)*-1+[X,Y,Z]) else: sites.append(np.asarray(orthovec)+[X,Y,Z]) SITES = [] for s in sites: SITES.append(PeriodicSite('O',s,self.Adsorbent.lattice,to_unit_cell=True,coords_are_cartesian=True)) inserts = SITES non_per = sites return inserts, non_per def L2_A2(self,usite,nn,cn): # Liner CE, we find sites around thins CE usisng a rotaion around the # adsorabte site in a plane that is normal to the neighboring atoms that # make up the CE X = self.Adsorbent[usite].x Y = self.Adsorbent[usite].y Z = self.Adsorbent[usite].z sort_nn = sorted(nn, key=operator.itemgetter(1)) # redundent from get_neighbors neighbors = sort_nn[:cn] vectors = [] for f in range(cn): coords = neighbors[f][0].coords V = coords - [X,Y,Z] vectors.append(V) vectors = np.asarray(vectors) pie = math.pi val = [0, pie/4,pie/2,pie*3/4,pie,pie*5/4,pie*6/4,pie*7/4]#,-pie] if (np.abs(np.cross(vectors[0],vectors[1])) < .1).all(): #L2 # If the corss product is close to zero for the two corridnated # atoms then the CE is L2 t = sym.Symbol('t', real=True) # Create plane through unique site with normal vector = the new oxygen site pln = Plane(Point3D(0,0,0), normal_vector=vectors[0]) # Create arbitrary point OBJECT as function of t from new plane pln_ap = pln.arbitrary_point(t) sites = [] axis = [] for x in val: axis.append(pln_ap.subs(t,x)) axis = np.asarray(axis,dtype=float) #for y in axis: factor_adjust = self.bonding_dist/np.linalg.norm(axis) Vj = factor_adjust*axis # add back adsorbent site XYZ for cartesien points of adsorbate sites.append(Vj+[X,Y,Z]) SITES = [] for s in sites: SITES.append(PeriodicSite('O',s,self.Adsorbent.lattice,to_unit_cell=True,coords_are_cartesian=True)) inserts = SITES non_per = sites else: #A2 pln1 = Plane(Point3D(0,0,0), normal_vector=vectors[0]) pln2 = Plane(Point3D(0,0,0), normal_vector=vectors[1]) # Create arbitrary point OBJECT as function of t from new plane t = sym.Symbol('t', real=True) pln_ap1 = pln1.arbitrary_point(t) pln_ap2 = pln2.arbitrary_point(t) sites = [] axis = [] for x in val: ax1 = pln_ap1.subs(t,x) ax2 = pln_ap2.subs(t,x) xo = (ax1[0] + ax2[0])/2 yo = (ax1[1] + ax2[1])/2 zo = (ax1[2] + ax2[2])/2 axis.append([xo,yo,zo]) axis = np.asarray(ax2,dtype=float) #for y in axis: factor_adjust = self.bonding_dist/np.linalg.norm(axis) Vj = factor_adjust*axis # add back adsorbent site XYZ for cartesien points of adsorbate sites.append(Vj+[X,Y,Z]) SITES = [] for s in sites: SITES.append(PeriodicSite('O',s,self.Adsorbent.lattice,to_unit_cell=True,coords_are_cartesian=True)) inserts = SITES non_per = sites return inserts, non_per def S1(self,usite,nn,cn): # Single Neighbor Bond Cn must == 1 X = self.Adsorbent[usite].x Y = self.Adsorbent[usite].y Z = self.Adsorbent[usite].z sort_nn = sorted(nn, key=operator.itemgetter(1)) neighbors = sort_nn[:cn] vectors = [] for f in range(cn): coords = neighbors[f][0].coords V = coords - [X,Y,Z] vectors.append(V) vectors = np.asarray(vectors) sites = [] axis = -vectors # the bonding vecor is inline with the adsorbent neighbor vector for y in axis: factor_adjust = self.bonding_dist/np.linalg.norm(y) Vj = factor_adjust*y # add back adsorbent site XYZ for cartesien points of adsorbate sites.append(Vj+[X,Y,Z]) SITES = [] for s in sites: SITES.append(PeriodicSite('O',s,self.Adsorbent.lattice,to_unit_cell=True,coords_are_cartesian=True)) inserts = SITES non_per = sites return inserts, non_per def make_pln(self,usite,vec): a = vec[0] b = vec[1] c = vec[2] # to make plane go therough new site and not adsorbent xo = self.Adsorbent[usite].x + a yo = self.Adsorbent[usite].y + b zo = self.Adsorbent[usite].z + c t = sym.Symbol('t', real=True) # Create plane through unique site with normal vector = the new oxygen site pln = Plane(Point3D(xo,yo,zo), normal_vector=(a,b,c)) # Create arbitrary point OBJECT as function of t from new plane ap = pln.arbitrary_point(t) return ap # we must loop through t = 0 to pi to find best spot of arbitrary line rotation def arbitraty_axis(self,ap,val,vec,usite): a = vec[0] b = vec[1] c = vec[2] xo = self.Adsorbent[usite].x + a yo = self.Adsorbent[usite].y + b zo = self.Adsorbent[usite].z + c t = sym.Symbol('t', real=True) axis = [re(ap.x.subs(t, val)) - xo, re(ap.y.subs(t, val)) - yo, re(ap.z.subs(t, val)) - zo] return axis def rotation_matrix(self,axis, theta): """ Return the rotation matrix associated with counterclockwise rotation about the given axis by theta radians. """ #dot = sym.Function('dot') axis = np.asarray(axis) axis = axis / math.sqrt(np.dot(axis, axis)) a = math.cos(theta / 2) b, c, d = -axis * math.sin(theta / 2) aa, bb, cc, dd = a * a, b * b, c * c, d * d bc, ad, ac, ab, bd, cd = b * c, a * d, a * c, a * b, b * d, c * d return np.array([[aa + bb - cc - dd, 2 * (bc + ad), 2 * (bd - ac)], [2 * (bc - ad), aa + cc - bb - dd, 2 * (cd + ab)], [2 * (bd + ac), 2 * (cd - ab), aa + dd - bb - cc]]) def dist_from_lattice(self,period_sites): dist = 0 for x in period_sites: self.Adsorbent.append(x.species_string,x.coords,coords_are_cartesian=True) nn = self.Adsorbent.get_neighbors(self.Adsorbent.sites[-1],5.0) sort_nn = sorted(nn, key=operator.itemgetter(1)) try: dist += sort_nn[0][1] except IndexError: dist += 5.0 self.Adsorbent.remove(self.Adsorbent.sites[-1]) return dist def shortest_dist_from_lattice(self,period_sites): dist = 100 for x in period_sites: self.Adsorbent.append(x.species_string,x.coords,coords_are_cartesian=True) nn = self.Adsorbent.get_neighbors(self.Adsorbent.sites[-1],5.0) sort_nn = sorted(nn, key=operator.itemgetter(1)) try: if sort_nn[0][1] < dist: dist = sort_nn[0][1] except IndexError: dist += 5.0 self.Adsorbent.remove(self.Adsorbent.sites[-1]) return dist def get_adsorbate(self,inserts,non_per,usite,spot): start_spot = spot remove = [] remove_non_per = [] for counter,site in enumerate(inserts): for K,y in enumerate(self.Adsorbent): if (K != usite) and (site.distance(y) < self.cutoff): # 2.4 is a cutoff remove.append(site) remove_non_per.append(non_per[counter]) break new_inserts = [x for x in inserts if x not in remove] new_non_per = [] for y in non_per: check = True for z in remove_non_per: if (z==y).all(): check = False break if check: new_non_per.append(y) # This method below does not work for 2D lists # new_non_per = [x for x in non_per if x not in (remove_non_per).ant()] sorb_vecs,adsorbate_types = self.adsorbate_vectors() if len(sorb_vecs) > 1: for counter,site in enumerate(new_non_per): # enumerte is used so .remove can be used non_p_site = new_non_per[counter] pie = math.pi val = [0, pie/4, pie/2, pie*3/4, pie, -pie/4, -pie/2, -pie*3/4]#,-pie] vec = non_p_site - self.Adsorbent[usite].coords pln_ap = self.make_pln(usite,vec) axis = [] for x in val: axis.append(self.arbitraty_axis(pln_ap, x, vec, usite)) dist = 0 rot_angles = np.asarray(np.linspace(0,360,9,dtype='int')) np.delete(rot_angles,-1) # dont need 360 degree rotations for arb_rot in rot_angles: for angle in rot_angles: # count10 = 0 # used for vizualizing all rotations # First rotaion matrix is a rotation around the vec rotate_mx = self.rotation_matrix(vec, np.radians(angle)) for x in axis: atom_spots = [] # Second rotaion is around an arbitrary axis through # the new adsorbate site rotate_mx2 = self.rotation_matrix(x, np.radians(arb_rot)) for z in sorb_vecs: # z is the arbitrary axis (do not add vec here) first_rot = np.dot(rotate_mx2,z) atom_spots.append(np.dot(rotate_mx,first_rot+vec)) period_sites = [] for count,new in enumerate(atom_spots): atom = np.array(new, dtype=np.float64) + self.Adsorbent[usite].coords ATOM = PeriodicSite(adsorbate_types[count],atom,self.Adsorbent.lattice,to_unit_cell=True,coords_are_cartesian=True) period_sites.append(ATOM) ################################################## # # Strickly for visualizing all rotation of adsorbate tested # for g in period_sites: # self.Adsorbent.append(g.species_string,g.coords,coords_are_cartesian=True) # pos = Poscar(self.Adsorbent) # pos.write_file('POSCAR_{}_Site_{}_{}_{}_{}'.format(self.Adsorbent[usite].species_string,spot,arb_rot,angle,count10)) # count10 += 1 # for h in period_sites: # just usign x as a counter # self.Adsorbent.remove(self.Adsorbent.sites[-1]) ################################################## tot = self.dist_from_lattice(period_sites) if (tot > dist): best_sites = period_sites dist = tot # # check one more time to ensure adsrobate is not sitting too cloase to any oher atoms final_check = self.shortest_dist_from_lattice(best_sites) if final_check >= self.cutoff: for x in best_sites: self.Adsorbent.append(x.species_string,x.coords,coords_are_cartesian=True) pos = Poscar(self.Adsorbent) pos.write_file('{}_{}_Site_{}'.format('POSCAR',self.Adsorbent[usite].species_string,spot)) if len(self.dope_list) > 0: self.dope_me(usite,spot,'') # Create same site but doped for x in best_sites: # just usign x as a counter self.Adsorbent.remove(self.Adsorbent.sites[-1]) else: pass spot = spot + 1 if len(self.dope_list) > 0: self.dope_me(usite,start_spot,'_{}_no_O2'.format(int(spot)-1)) #creates doped usite with no O2 elif len(sorb_vecs) == 1: ## need to rottae single atom work in progress # for counter,site in enumerate(new_inserts): # enumerte is used so .remove can be used # non_p_site = new_non_per[counter] # pie = math.pi # val = [0, pie/4, pie/2, pie*3/4, pie, -pie/4, -pie/2, -pie*3/4]#,-pie] # vec = non_p_site - self.Adsorbent[usite].coords # pln_ap = self.make_pln(usite,vec) # axis = [] # for x in val: # axis.append(self.arbitraty_axis(pln_ap, x, vec, usite)) # dist = 0 # rot_angles = np.asarray(np.linspace(0,360,9,dtype='int')) # np.delete(rot_angles,-1) # dont need 360 degree rotations # for arb_rot in rot_angles: # for x in axis: # atom_spots = [] # # the new adsorbate site # rotate_mx2 = self.rotation_matrix(x, np.radians(arb_rot)) # first_rot = np.dot(rotate_mx2,vec) # atom_spots.append(np.dot(rotate_mx2,first_rot)) # period_sites = [] # for count,new in enumerate(atom_spots): # atom = np.array(new, dtype=np.float64) + self.Adsorbent[usite].coords # ATOM = PeriodicSite(adsorbate_types[count],atom,self.Adsorbent.lattice,to_unit_cell=True,coords_are_cartesian=True) # period_sites.append(ATOM) # ################################################## # # # Strickly for visualizing all rotation of adsorbate tested # # for g in period_sites: # # self.Adsorbent.append(g.species_string,g.coords,coords_are_cartesian=True) # # pos = Poscar(self.Adsorbent) # # pos.write_file('POSCAR_{}_Site_{}_{}_{}_{}'.format(self.Adsorbent[usite].species_string,spot,arb_rot,angle,count10)) # # count10 += 1 # # for h in period_sites: # just usign x as a counter # # self.Adsorbent.remove(self.Adsorbent.sites[-1]) # ################################################## # tot = self.dist_from_lattice(period_sites) # if (tot > dist): # best_sites = period_sites # dist = tot # # # check one more time to ensure adsrobate is not sitting too cloase to any oher atoms # final_check = self.shortest_dist_from_lattice(best_sites) # if final_check > 0.5: # for x in best_sites: # self.Adsorbent.append(x.species_string,x.coords,coords_are_cartesian=True) # pos = Poscar(self.Adsorbent) # pos.write_file('{}_{}_Site_{}'.format('POSCAR',self.Adsorbent[usite].species_string,spot)) # if len(self.dope_list) > 0: # self.dope_me(usite,spot,'') # Create same site but doped # for x in best_sites: # just usign x as a counter # self.Adsorbent.remove(self.Adsorbent.sites[-1]) # else: # pass # spot = spot + 1 # if len(self.dope_list) > 0: # self.dope_me(usite,start_spot,'_{}_no_O2'.format(int(spot)-1)) #creates doped usite with no O2 for x in new_inserts: self.Adsorbent.append(adsorbate_types[0],x.coords,coords_are_cartesian=True) pos = Poscar(self.Adsorbent) pos.write_file('{}_{}_Site_{}'.format('POSCAR',self.Adsorbent[usite].species_string,spot)) if len(self.dope_list) > 0: self.dope_me(usite,spot,'') # Create same site but doped spot = spot + 1 self.Adsorbent.remove(self.Adsorbent.sites[-1]) if len(self.dope_list) > 0: self.dope_me(usite,start_spot,'_{}_no_O2'.format(int(spot)-1)) #creates doped usite with no adsorbate return spot def get_bidentate_adsorbate(self,inserts,non_per,usite,spot): start_spot = spot remove = [] remove_non_per = [] for counter,site in enumerate(inserts): for K,y in enumerate(self.Adsorbent): if (K != usite) and (site.distance(y) < self.cutoff): # remove.append(site) remove_non_per.append(non_per[counter]) break new_inserts = [x for x in inserts if x not in remove] new_non_per = [x for x in non_per if x not in np.asarray(remove_non_per)] sorb_vecs,adsorbate_types = self.adsorbate_vectors() if len(sorb_vecs) > 1: for counter,site in enumerate(new_inserts): #enumerte is used so .remove can be used non_p_site = new_non_per[counter] pie = math.pi val = [0, pie/4, pie/2, pie*3/4, pie, -pie/4, -pie/2, -pie*3/4]#,-pie] vec = non_p_site - self.Adsorbent[usite].coords pln_ap = self.make_pln(usite,vec) axis = [] for x in val: axis.append(self.arbitraty_axis(pln_ap, x, vec, usite)) dist = 0 rot_angles = np.asarray(np.linspace(0,360,9,dtype='int')) np.delete(rot_angles,-1) # dont need 360 degree rotations # count10 = 0 # used for vizualizing all rotations for x in axis: atom_spots = [] theta_up = np.arcsin(0.5*(self.diatomic/self.bonding_dist)) theta_down = np.arcsin(-0.5*(self.diatomic/self.bonding_dist)) rotate_up = self.rotation_matrix(x, theta_up) rotate_down = self.rotation_matrix(x, theta_down) atom_spots.append(np.dot(rotate_up,vec)) atom_spots.append(np.dot(rotate_down,vec)) period_sites = [] for count,new in enumerate(atom_spots): atom = np.array(new, dtype=np.float64) + self.Adsorbent[usite].coords ATOM = PeriodicSite(adsorbate_types[count],atom,self.Adsorbent.lattice,to_unit_cell=True,coords_are_cartesian=True) period_sites.append(ATOM) ########################################################## # # Strickly for visualizing all rotation of adsorbate tested # for g in period_sites: # self.Adsorbent.append(g.species_string,g.coords,coords_are_cartesian=True) # pos = Poscar(self.Adsorbent) # pos.write_file('POSCAR_{}_Site_{}_{}'.format(self.Adsorbent[usite].species_string,spot,count10)) # count10 += 1 # for h in period_sites: # just usign x as a counter # self.Adsorbent.remove(self.Adsorbent.sites[-1]) ########################################################## tot = self.dist_from_lattice(period_sites) if (tot > dist): best_sites = period_sites dist = tot ## check one more time to ensure adsrobate is not sitting too cloase to any oher atoms final_check = self.shortest_dist_from_lattice(best_sites) if final_check > self.cutoff: for x in best_sites: self.Adsorbent.append(x.species_string,x.coords,coords_are_cartesian=True) pos = Poscar(self.Adsorbent) pos.write_file('{}_{}_Site_{}'.format('POSCAR',self.Adsorbent[usite].species_string,spot)) if len(self.dope_list) > 0: self.dope_me(usite,spot,'') # Create same site but doped for x in best_sites: # just usign x as a counter self.Adsorbent.remove(self.Adsorbent.sites[-1]) else: pass spot = spot + 1 if len(self.dope_list) > 0: self.dope_me(usite,start_spot,'_{}_no_O2'.format(int(spot)-1)) #creates doped usite with no O2 return spot def adsorbate_vectors(self): origin_site = self.Adsorbate[self.bonding_atom-1] # this is assiging the pymatgen PeriodicSite origin = origin_site.coords # this is getting the cart coords for the site sorb_vecs = np.zeros((len(self.Adsorbate),3)) # intilize vector # loop through adsorbate and create a list of vectros from the origin site # which will be the site that bonds to the absorbent adsorbate_types = [] for count,x in enumerate(self.Adsorbate): sorb_vecs[count,:] = x.coords - origin adsorbate_types.append(x.species_string) return sorb_vecs , adsorbate_types
PypiClean
/DLTA-AI-1.1.tar.gz/DLTA-AI-1.1/DLTA_AI_app/mmdetection/mmdet/models/necks/dyhead.py
import torch.nn as nn import torch.nn.functional as F from mmcv.cnn import (build_activation_layer, build_norm_layer, constant_init, normal_init) from mmcv.ops.modulated_deform_conv import ModulatedDeformConv2d from mmcv.runner import BaseModule from ..builder import NECKS from ..utils import DyReLU # Reference: # https://github.com/microsoft/DynamicHead # https://github.com/jshilong/SEPC class DyDCNv2(nn.Module): """ModulatedDeformConv2d with normalization layer used in DyHead. This module cannot be configured with `conv_cfg=dict(type='DCNv2')` because DyHead calculates offset and mask from middle-level feature. Args: in_channels (int): Number of input channels. out_channels (int): Number of output channels. stride (int | tuple[int], optional): Stride of the convolution. Default: 1. norm_cfg (dict, optional): Config dict for normalization layer. Default: dict(type='GN', num_groups=16, requires_grad=True). """ def __init__(self, in_channels, out_channels, stride=1, norm_cfg=dict(type='GN', num_groups=16, requires_grad=True)): super().__init__() self.with_norm = norm_cfg is not None bias = not self.with_norm self.conv = ModulatedDeformConv2d( in_channels, out_channels, 3, stride=stride, padding=1, bias=bias) if self.with_norm: self.norm = build_norm_layer(norm_cfg, out_channels)[1] def forward(self, x, offset, mask): """Forward function.""" x = self.conv(x.contiguous(), offset.contiguous(), mask) if self.with_norm: x = self.norm(x) return x class DyHeadBlock(nn.Module): """DyHead Block with three types of attention. HSigmoid arguments in default act_cfg follow official code, not paper. https://github.com/microsoft/DynamicHead/blob/master/dyhead/dyrelu.py Args: in_channels (int): Number of input channels. out_channels (int): Number of output channels. zero_init_offset (bool, optional): Whether to use zero init for `spatial_conv_offset`. Default: True. act_cfg (dict, optional): Config dict for the last activation layer of scale-aware attention. Default: dict(type='HSigmoid', bias=3.0, divisor=6.0). """ def __init__(self, in_channels, out_channels, zero_init_offset=True, act_cfg=dict(type='HSigmoid', bias=3.0, divisor=6.0)): super().__init__() self.zero_init_offset = zero_init_offset # (offset_x, offset_y, mask) * kernel_size_y * kernel_size_x self.offset_and_mask_dim = 3 * 3 * 3 self.offset_dim = 2 * 3 * 3 self.spatial_conv_high = DyDCNv2(in_channels, out_channels) self.spatial_conv_mid = DyDCNv2(in_channels, out_channels) self.spatial_conv_low = DyDCNv2(in_channels, out_channels, stride=2) self.spatial_conv_offset = nn.Conv2d( in_channels, self.offset_and_mask_dim, 3, padding=1) self.scale_attn_module = nn.Sequential( nn.AdaptiveAvgPool2d(1), nn.Conv2d(out_channels, 1, 1), nn.ReLU(inplace=True), build_activation_layer(act_cfg)) self.task_attn_module = DyReLU(out_channels) self._init_weights() def _init_weights(self): for m in self.modules(): if isinstance(m, nn.Conv2d): normal_init(m, 0, 0.01) if self.zero_init_offset: constant_init(self.spatial_conv_offset, 0) def forward(self, x): """Forward function.""" outs = [] for level in range(len(x)): # calculate offset and mask of DCNv2 from middle-level feature offset_and_mask = self.spatial_conv_offset(x[level]) offset = offset_and_mask[:, :self.offset_dim, :, :] mask = offset_and_mask[:, self.offset_dim:, :, :].sigmoid() mid_feat = self.spatial_conv_mid(x[level], offset, mask) sum_feat = mid_feat * self.scale_attn_module(mid_feat) summed_levels = 1 if level > 0: low_feat = self.spatial_conv_low(x[level - 1], offset, mask) sum_feat = sum_feat + \ low_feat * self.scale_attn_module(low_feat) summed_levels += 1 if level < len(x) - 1: # this upsample order is weird, but faster than natural order # https://github.com/microsoft/DynamicHead/issues/25 high_feat = F.interpolate( self.spatial_conv_high(x[level + 1], offset, mask), size=x[level].shape[-2:], mode='bilinear', align_corners=True) sum_feat = sum_feat + high_feat * \ self.scale_attn_module(high_feat) summed_levels += 1 outs.append(self.task_attn_module(sum_feat / summed_levels)) return outs @NECKS.register_module() class DyHead(BaseModule): """DyHead neck consisting of multiple DyHead Blocks. See `Dynamic Head: Unifying Object Detection Heads with Attentions <https://arxiv.org/abs/2106.08322>`_ for details. Args: in_channels (int): Number of input channels. out_channels (int): Number of output channels. num_blocks (int, optional): Number of DyHead Blocks. Default: 6. zero_init_offset (bool, optional): Whether to use zero init for `spatial_conv_offset`. Default: True. init_cfg (dict or list[dict], optional): Initialization config dict. Default: None. """ def __init__(self, in_channels, out_channels, num_blocks=6, zero_init_offset=True, init_cfg=None): assert init_cfg is None, 'To prevent abnormal initialization ' \ 'behavior, init_cfg is not allowed to be set' super().__init__(init_cfg=init_cfg) self.in_channels = in_channels self.out_channels = out_channels self.num_blocks = num_blocks self.zero_init_offset = zero_init_offset dyhead_blocks = [] for i in range(num_blocks): in_channels = self.in_channels if i == 0 else self.out_channels dyhead_blocks.append( DyHeadBlock( in_channels, self.out_channels, zero_init_offset=zero_init_offset)) self.dyhead_blocks = nn.Sequential(*dyhead_blocks) def forward(self, inputs): """Forward function.""" assert isinstance(inputs, (tuple, list)) outs = self.dyhead_blocks(inputs) return tuple(outs)
PypiClean
/Dooders-0.0.3.tar.gz/Dooders-0.0.3/dooders/sdk/core/step.py
from abc import ABC, abstractmethod from dooders.sdk.core.core import Core class Step(Core): """ A step is a point in the simulation to execute logic for a specific object or model during its turn. Attributes ---------- registry : dict A dictionary of all the registered step flows, by the type of python object Methods ------- forward(name: str, object: str) -> None Execute a step flow """ @classmethod def forward(cls, name: str, object: object) -> None: """ Execute a step flow Parameters ---------- name : str, (move, consume, etc.) The name of the step flow to execute object : object The object to execute the step flow on Examples -------- >>> from sdk.core.step import Step >>> >>> Step.forward('move', agent) """ steps = Core.get_component( 'step', object.__class__.__name__.lower()) step_flow = steps[name].function step_flow.step(object) class StepLogic(ABC): """ Abstract Class for every step flow Methods ------- react(object: object) -> None Optional method to react to the environment before acting act(object: object) -> None Required method to execute the action phase of the step flow sleep(object: object) -> None Optional method to sleep after acting step(object: object) -> None Execute the step flow """ def react(self, *args, **kwargs) -> None: """ Optional method to react to the environment before acting Parameters ---------- object : object The object that needs to react before acting """ pass @abstractmethod def act(self, *args, **kwargs) -> None: """ Required method to execute the action phase of the step flow Parameters ---------- object : object The object that needs to act. Passed during the objects turn Raises ------ NotImplementedError If the act method is not implemented """ raise NotImplementedError('act method not implemented') def sleep(self, *args, **kwargs) -> None: """ Optional method to sleep after acting Parameters ---------- object : object The object that needs to sleep after acting """ pass @classmethod def step(cls, object: object) -> None: """ Execute the step flow Parameters ---------- object : object The object to execute the step flow on Examples -------- >>> from sdk.core.step import StepLogic >>> >>> class Move(StepLogic): >>> def act(self, object): >>> print('Moving') >>> >>> Move.step(agent) Moving """ cls.react(object) cls.act(object) cls.sleep(object)
PypiClean
/NeodroidVision-0.3.0-py36-none-any.whl/neodroidvision/data/neodroid_environments/classification/data.py
from typing import Tuple, Union import neodroid from PIL import Image from draugr.multiprocessing_utilities import PooledQueueProcessor, PooledQueueTask from draugr.torch_utilities import global_torch_device from torch.utils.data import Dataset from torchvision import transforms __author__ = "Christian Heider Nielsen" import torch default_torch_transform = transforms.Compose( [ transforms.RandomHorizontalFlip(), transforms.RandomRotation(180), # transforms.Resize(224), transforms.CenterCrop(224), transforms.ToTensor(), ] ) default_torch_retransform = transforms.Compose([transforms.ToPILImage("RGB")]) __all__ = [ "neodroid_env_classification_generator", "pooled_neodroid_env_classification_generator", ] def neodroid_env_classification_generator(env, batch_size=64) -> Tuple: """ :param env: :param batch_size: """ while True: predictors = [] class_responses = [] while len(predictors) < batch_size: state = env.update() rgb_arr = state.sensor("RGB").value rgb_arr = Image.open(rgb_arr).convert("RGB") a_class = state.sensor("Class").value predictors.append(default_torch_transform(rgb_arr)) class_responses.append(int(a_class)) a = torch.stack(predictors).to(global_torch_device()) b = torch.LongTensor(class_responses).to(global_torch_device()) yield a, b def pooled_neodroid_env_classification_generator(env, device, batch_size=64) -> Tuple: """ :param env: :param device: :param batch_size: :return: """ class FetchConvert(PooledQueueTask): """description""" def __init__( self, env, device: Union[str, torch.device] = "cpu", batch_size: int = 64, *args, **kwargs ): """ :param env: :param device: :param batch_size: :param args: :param kwargs:""" super().__init__(*args, **kwargs) self.env = env self.batch_size = batch_size self.device = device def call(self, *args, **kwargs) -> Tuple: """ Args: *args: **kwargs: Returns: """ predictors = [] class_responses = [] while len(predictors) < self.batch_size: state = self.env.update() rgb_arr = state.sensor("RGB").value rgb_arr = Image.open(rgb_arr).convert("RGB") a_class = state.sensor("Class").value predictors.append(default_torch_transform(rgb_arr)) class_responses.append(int(a_class)) return ( torch.stack(predictors).to(self.device), torch.LongTensor(class_responses).to(self.device), ) task = FetchConvert(env, device=device, batch_size=batch_size) processor = PooledQueueProcessor( task, fill_at_construction=True, max_queue_size=16, n_proc=None ) for a in zip(processor): yield a if __name__ == "__main__": def asdadsad(): """description""" neodroid_generator = neodroid_env_classification_generator(neodroid.connect()) train_loader = torch.utils.data.DataLoader( dataset=neodroid_generator, batch_size=12, shuffle=True ) for p, r in train_loader: print(r) asdadsad()
PypiClean
/EasyLEED-2.5.2.tar.gz/EasyLEED-2.5.2/easyleed/base.py
import numpy as np from scipy import optimize import math from . import config from . import kalman from . import logger class SpotModel: """ Data model for a Spot that stores all the information in various lists. """ def __init__(self): self.x = [] self.y = [] self.intensity = [] self.energy = [] self.radius = [] def update(self, x, y, intensity, energy, radius): self.x.append(x) self.y.append(y) self.intensity.append(intensity) self.energy.append(energy) self.radius.append(radius) class Tracker: """ Tracks spots through intensity information and velocity prediction. """ def __init__(self, x_in, y_in, radius, energy, x_c=None, y_c=None, input_precision=1, window_scaling=False): """ x_in, y_in: start position of spot """ self.radius = radius self.init_tracker(x_in, y_in, radius, energy, x_c, y_c, input_precision, window_scaling) def init_tracker(self, x_in, y_in, radius, energy, x_c, y_c, input_precision, window_scaling): if x_c and y_c: self.x, self.y = x_in - x_c, y_in - y_c self.r = (self.x**2 + self.y**2)**.5 self.v = - 0.5 * self.r / energy # calculate std. dev. of velocity guess # by propagation of uncertainty from the input precision v_precision = 2**.5 * 0.5 * input_precision / energy self.phi = np.arctan2(self.y, self.x) cov_input = np.diag([input_precision, input_precision, v_precision, v_precision])**2 self.kalman = kalman.PVKalmanFilter2(x_in, y_in, cov_input, energy, vx_in=self.v * np.cos(self.phi), vy_in=self.v * np.sin(self.phi)) else: cov_input = np.diag([input_precision, input_precision, 1000, 1000]) self.kalman = kalman.PVKalmanFilter2(x_in, y_in, cov_input, energy) self.window_scaling = window_scaling if self.window_scaling: self.c_size = energy**0.5 * self.radius def feed_image(self, image): npimage, energy = image if (not config.GraphicsScene_intensTimeOn) and self.window_scaling: self.radius = self.c_size / energy**0.5 if self.radius < config.Tracking_minWindowSize: self.radius = config.Tracking_minWindowSize if not config.GraphicsScene_intensTimeOn: processNoise = np.diag([config.Tracking_processNoisePosition, config.Tracking_processNoisePosition, config.Tracking_processNoiseVelocity, config.Tracking_processNoiseVelocity]) self.kalman.predict(energy, processNoise) x_p, y_p = self.kalman.get_position() guess = guesser(npimage, x_p, y_p, self.radius) if guess is not None: x_th, y_th, guess_cov = guess # spot in validation region? (based on residual covariance) if self.kalman.measurement_distance((x_th, y_th), guess_cov) > config.Tracking_gamma: print(" No spot in validation gate") else: self.kalman.update([x_th, y_th], guess_cov) x, y = self.kalman.get_position() intensity = calc_intensity(npimage, x, y, self.radius, background_substraction=config.Processing_backgroundSubstractionOn) return x, y, intensity, energy, self.radius def guess_from_Gaussian(image, *args, **kwargs): """ Guess position of spot from a Gaussian fit. """ # construct circle where data is fit radius = 0.5 * min(image.shape) distances = calc_distances(image.shape, radius-0.5, radius-0.5, radius) circle = distances <= radius**2 # generate good guesses for the Gaussian distribution background = np.min(image) params = moments(image-background) params.append(background) errfunc = lambda p: np.ravel(gaussian2d(*p)(*np.indices(image.shape))[circle] - image[circle]) # fit Gaussian maxfev = 200 try: output = optimize.leastsq(errfunc, params, full_output=True, maxfev=maxfev) except: return None p_opt = output[0] p_cov = output[1] infodict = output[2] if infodict["nfev"] >= maxfev or p_cov is None: print(" Fit failed") return None # residual sum of squares sum (x_i - f_i)^2 sum_of_squares_regression = (errfunc(p_opt)**2).sum() # variance of the data sum (x_i - <x>)^2 sum_of_squares_total = ((image[circle]-np.mean(image[circle]))**2).sum() # calculate R^2 Rsq = 1 - sum_of_squares_regression / sum_of_squares_total if Rsq < config.Tracking_minRsq: print(" R^2 too low") return None # estimate sigma^2 from a chi^2 equivalent s_sq = sum_of_squares_regression/(len(image[circle].flatten())-len(params)) p_cov *= s_sq p_cov = p_cov[1:3, 1:3] x_res = p_opt[1] y_res = p_opt[2] return (x_res, y_res), p_cov guesser_routines = {'Gaussian fit' : guess_from_Gaussian} try: import skimage.feature logger.info('imported scikit image package') def guess_from_blob_dog(image, *args, **kwargs): A = skimage.feature.blob_dog(image) if not A.shape[0]: print(" No blob found") return None print(' Blobs found', A) return (A[0, 1], A[0, 0]), np.diag([2, 2]) def guess_from_blob_log(image, *args, **kwargs): A = skimage.feature.blob_log(image, threshold=0.1) if not A.shape[0]: print(" No blob found") return None print(' Blobs found', A) return (A[0, 1], A[0, 0]), np.diag([2, 2]) guesser_routines['Blob dog'] = guess_from_blob_dog guesser_routines['Blob log'] = guess_from_blob_log except ImportError: pass def guesser(npimage, x_in, y_in, radius): def failure(reason): logger.info(" No guess, because " + reason) print(reason) return None # try to get patch from image around estimated position try: func=guesser_routines[config.Tracking_guessFunc] fit_region_factor=config.Tracking_fitRegionFactor x_min, x_max, y_min, y_max = adjust_slice(npimage, x_in-fit_region_factor*radius, x_in+fit_region_factor*radius+1, y_in-fit_region_factor*radius, y_in+fit_region_factor*radius+1) except IndexError: return failure(" Position outside image") image = npimage[y_min:y_max, x_min:x_max] result = func(image, x_mid=x_in-x_min, y_mid=y_in-y_min, size=radius) if result is None: return failure(" Fit failed") pos, cov = result y_res, x_res = pos x_res += x_min y_res += y_min return x_res, y_res, cov def gaussian2d(height, center_x, center_y, width_x, width_y=None, offset=0): """Returns a two dimensional gaussian function with the given parameters""" if width_y is None: width_y = width_x return lambda x, y: np.asarray(height * np.exp(-(((center_x - x) / width_x)**2 + ((center_y - y) / width_y)**2) / 2)) + \ offset def moments(data): """ Calculates the moments of 2d data. Returns [height, x, y, width_x, width_y] the gaussian parameters of a 2D distribution by calculating its moments. """ total = data.sum() X, Y = np.indices(data.shape) x = (X*data).sum()/total y = (Y*data).sum()/total if math.isnan(x): x = 0 if math.isnan(y): y = 0 col = data[:, int(y)] width_x = np.sqrt(abs((np.arange(col.size)-y)**2*col).sum()/col.sum()) row = data[int(x), :] width_y = np.sqrt(abs((np.arange(row.size)-x)**2*row).sum()/row.sum()) height = data.max() return [height, x, y, width_x, width_y] def adjust_slice(image, x_sl_min, x_sl_max, y_sl_min, y_sl_max): """ Adjusts slice if it is trying to get pieces outside the image. >>> image = np.ones((2, 2)) >>> adjust_slice(image, 0, 1.5, 0, 2) (0, 1, 0, 2) >>> adjust_slice(image, -5.5, 2, -0.5, 10) (0, 2, 0, 2) """ ymax, xmax = image.shape adjusted = False indices = [int(x_sl_min), int(x_sl_max), int(y_sl_min), int(y_sl_max)] for i, value in enumerate(indices): if value < 0: indices[i] = 0 adjusted = True for i, value in enumerate(indices): if i < 2: if value > xmax: indices[i] = xmax adjusted = True else: if value > ymax: indices[i] = ymax adjusted = True if adjusted: logger.warning(" slice had to be adjusted to fit image.") if not int(indices[0] - indices[1]) or not int(indices[2] - indices[3]): raise IndexError() return tuple(indices) def calc_distances(shape, x, y, squared=True): """ Helper function that returns an array of distances to x, y. This array can be useful for fancy indexing of numpy arrays. squared: return the squared distance (default: True) """ yind, xind = np.indices(shape) distSquare = ((yind - y)**2 + (xind - x)**2) if not squared: return distSquare**.5 return distSquare def signal_to_background(npimage, x, y, radius): distSquare = calc_distances(npimage.shape, x, y) signal = np.mean(npimage[distSquare <= radius**2]) # average background intensity over annulus with equal area background = np.mean(npimage[np.logical_and(distSquare >= radius**2, distSquare <= 2 * radius**2)]) return signal/background def calc_intensity(npimage, x, y, radius, background_substraction=config.Processing_backgroundSubstractionOn): """ Calculates the intensity of a spot. npimage: numpy array of intensity values x, y: position of the spot radius: radius of the spot background_substraction: boolean to turn substraction on/off """ distSquare = calc_distances(npimage.shape, x, y, squared=True) intensities = npimage[distSquare <= radius**2] intensity = np.sum(intensities) if background_substraction: # average background intensity over annulus with approximately equal area background_intensities = npimage[np.logical_and(distSquare >= radius**2, distSquare <= 2 * radius**2)] area = len(intensities) intensity -= np.mean(background_intensities) * area return intensity
PypiClean
/Flask-MAuth-1.1.tar.gz/Flask-MAuth-1.1/docs/classes.rst
Class Reference *************** flask_mauth =========== .. automodule:: flask_mauth.auth :members: :undoc-members: flask_mauth.cacher ================== .. automodule:: flask_mauth.cacher.security_token_cacher :members: :undoc-members: flask_mauth.mauth ================= .. automodule:: flask_mauth.mauth.authenticators :members: :undoc-members: .. automodule:: flask_mauth.mauth.signature :members: :undoc-members: flask_mauth.rsa_public_decrypt ============================== .. automodule:: flask_mauth.rsa_public_decrypt.rsa_decrypt :members: :undoc-members:
PypiClean
/Flask-Edits-0.8.tar.gz/Flask-Edits-0.8/README
Flask-Edits ~~~~~~~~~~~ "Can't you just, rewrite it to sound more *edgy?*" Clients blowing up your phone to change some copy on the ``/about`` page? Enter **Flask-Edits**. Mark sections of your templates with ``{% editable %}`` and their content is exposed in a slick admin panel. Never worry about tweaking copy again. .. figure:: http://i.imgur.com/kgAlUAV.png :alt: Screenshot Screenshot Installation ^^^^^^^^^^^^ :: $ pip install flask-edits Usage ^^^^^ :: from flask.ext.edits import Edits app = Flask(__name__) edits = Edits(app) All edits are saved to the disk as JSON, so configure a path to save the edits. Edits can be commited to version control along with the rest of the app. :: import os.path as op app.config['EDITS_PATH'] = op.join(op.dirname(op.abspath(__file__)), 'edits.json') Mark sections of your Jinja templates as editable. The section name is required, it's used as the section label when editing and the key that the edits are stored under. :: {% editable 'Section name' %} Python is a programming language that lets you work quickly and integrate systems more effectively. {% endeditable %} **Important:** There is no automatic detection of editable sections (yet). You have to visit the URL that renders the template to register it as editable. It will not show up in the admin panel until it has been rendered with ``render_template``. Admin ^^^^^ The Flask-Edits admin view is exposed by default at ``/edits``. The base URL can be changed in the configuration: :: app.config['EDITS_URL'] = '/edits' **Note on security:** Like Flask-Admin, Flask-Edits does not make any assumptions about the authentication system you might be using. So, by default, the admin interface is completely open. Protect it behind basic auth or another form of authentication. Editing ^^^^^^^ All pages that have registered editable sections are available to edit in the interface. At this time, only static HTML is supported. Support for Jinja2 is on the roadmap. The `Summernote <http://hackerwins.github.io/summernote/>`__ HTML editor is included but not used by default. To enable it: :: app.config['EDITS_SUMMERNOTE'] = True When a page is saved it instantly updates the Jinja context and writes to the JSON file on the server. Within a page, multiple sections with the same name will only show up once in the admin panel, but the edits will be applied to each section. Clearing a section will revert it to the original content in the template. Previews ^^^^^^^^ Preview mode is on by default. Edits will not show up on pages unless ``?preview=true`` is passed in the URL. This allows for easy editing before content is live. Toggle the preview mode in the admin panel. If previews are turned off, edits become live as they are saved. Preview mode can be toggled in the configuration. To turn off previews by default: :: app.config['EDITS_PREVIEW'] = False Turning off previews is recommended for use in production. Roadmap ^^^^^^^ - Automatically register editable sections - Jinja2 content with context evaluation - Multiple database backends - In-place page editing
PypiClean
/AltAnalyze-2.1.3.15.tar.gz/AltAnalyze-2.1.3.15/altanalyze/misopy/misc_utils.py
import os import sys import time import shelve COMPRESS_PREFIX = "misocomp" def inv_dict(mydict): """ Reverse key -> val into val -> key. """ new_dict = {} for k in mydict: new_dict[mydict[k]] = k return new_dict def load_compressed_ids_to_genes(compressed_filename): """ Load mapping from compressed IDs to genes. """ if not os.path.exists(compressed_filename): print "Error: %s compressed file does not exist." \ %(compressed_filename) sys.exit(1) compressed_ids_to_genes = {} # Load mapping from gene IDs to their hashes compressed_ids_to_genes = shelve.open(compressed_filename) return compressed_ids_to_genes def is_compressed_name(event_name): return str(event_name).startswith(COMPRESS_PREFIX) def is_compressed_index(index_filename): """ Check if the given index filename uses a compressed (hash) ID or not. """ basename = os.path.basename(index_filename) if is_compressed_name(basename): return True return False def make_dir(dirpath): if os.path.isfile(dirpath): print "Error: %s is a file!" %(dirpath) sys.exit(1) # Try to make the directory try: os.makedirs(dirpath) except OSError: pass def pathify(f): return os.path.abspath(os.path.expanduser(f)) def which(program): """ Check if program exists on path. """ def is_exe(fpath): if not os.path.isfile(fpath): return False elif not os.access(fpath, os.X_OK): # If the file exists but is not executable, warn # the user print "WARNING: Found %s but it is not executable." %(fpath) print "Please ensure %s is executable." %(fpath) print "On Unix, use something like: " print " chmod +x %s" %(fpath) time.sleep(10) return False return True fpath, fname = os.path.split(program) if fpath: if is_exe(program): return program else: for path in os.environ["PATH"].split(os.pathsep): exe_file = os.path.join(path, program) if is_exe(exe_file): return exe_file return None def main(): pass if __name__ == "__main__": main()
PypiClean
/Muntjac-1.1.2.tar.gz/Muntjac-1.1.2/muntjac/ui/panel.py
from warnings import warn from muntjac.terminal.scrollable import IScrollable from muntjac.event.action_manager import ActionManager from muntjac.event import action from muntjac.ui.vertical_layout import VerticalLayout from muntjac.ui.abstract_component_container import AbstractComponentContainer from muntjac.ui import component_container from muntjac.ui.component import IFocusable from muntjac.ui.layout import ILayout from muntjac.event.mouse_events import ClickEvent, IClickListener from muntjac.terminal.gwt.client.mouse_event_details import MouseEventDetails from muntjac.terminal.gwt.client.ui.v_panel import VPanel class Panel(AbstractComponentContainer, IScrollable, component_container.IComponentAttachListener, component_container.IComponentDetachListener, action.INotifier, IFocusable): """Panel - a simple single component container. @author: Vaadin Ltd. @author: Richard Lincoln @version: 1.1.2 """ CLIENT_WIDGET = None #ClientWidget(VPanel, LoadStyle.EAGER) _CLICK_EVENT = VPanel.CLICK_EVENT_IDENTIFIER # Removes extra decorations from the Panel. # # @deprecated: this style is no longer part of the core framework and this # component, even though most built-in themes implement this # style. Use the constant specified in the theme class file # that you're using, if it provides one, e.g. # L{Reindeer#PANEL_LIGHT} or L{Runo#PANEL_LIGHT} . STYLE_LIGHT = 'light' def __init__(self, *args): """Creates a new panel with caption and or content. A VerticalLayout is used as content by default. @param args: tuple of the form - (content) 1. the content for the panel (HTML/XHTML). - (caption) 1. the caption used in the panel (HTML/XHTML). - (caption, content) 1. the caption of the panel. 2. the content used in the panel (HTML/XHTML). """ super(Panel, self).__init__() #: Content of the panel. self._content = None #: Scroll X position. self._scrollOffsetX = 0 #: Scroll Y position. self._scrollOffsetY = 0 #: Scrolling mode. self._scrollable = False #: Keeps track of the Actions added to this component, and manages # the painting and handling as well. self.actionManager = None #: By default the Panel is not in the normal document focus flow and # can only be focused by using the focus()-method. Change this to 0 # if you want to have the Panel in the normal focus flow. self._tabIndex = -1 nargs = len(args) if nargs == 0: Panel.__init__(self, None) elif nargs == 1: if isinstance(args[0], basestring): caption, = args Panel.__init__(self, caption, None) self.setCaption(caption) else: content, = args self.setContent(content) self.setWidth(100, self.UNITS_PERCENTAGE) elif nargs == 2: caption, content = args Panel.__init__(self, content) self.setCaption(caption) else: raise ValueError, 'too many arguments' def setCaption(self, caption): """Sets the caption of the panel. Note that the caption is interpreted as HTML/XHTML and therefore care should be taken not to enable HTML injection and XSS attacks using panel captions. This behavior may change in future versions. @see L{AbstractComponent.setCaption} """ super(Panel, self).setCaption(caption) def getLayout(self): """Gets the current layout of the panel. @return: the Current layout of the panel. @deprecated: A Panel can now contain a IComponentContainer which is not necessarily a ILayout. Use L{getContent} instead. """ warn('Use getContent() instead', DeprecationWarning) if isinstance(self._content, ILayout): return self._content elif self._content is None: return None raise ValueError, ('Panel does not contain a ILayout. ' 'Use getContent() instead of getLayout().') def setLayout(self, newLayout): """Sets the layout of the panel. If given layout is null, a VerticalLayout with margins set is used as a default. Components from old layout are not moved to new layout by default. Use function in ILayout interface manually. @param newLayout: the New layout of the panel. @deprecated: A Panel can now contain a IComponentContainer which is not necessarily a ILayout. Use L{setContent} instead. """ self.setContent(newLayout) def getContent(self): """Returns the content of the Panel. """ return self._content def setContent(self, newContent): """Set the content of the Panel. If null is given as the new content then a layout is automatically created and set as the content. @param newContent: The new content """ # If the content is null we create the default content if newContent is None: newContent = self.createDefaultContent() # if newContent is None: # raise ValueError, "Content cannot be null" if newContent == self._content: return # don't set the same content twice # detach old content if present if self._content is not None: self._content.setParent(None) self._content.removeListener(self, component_container.IComponentAttachListener) self._content.removeListener(self, component_container.IComponentDetachListener) # Sets the panel to be parent for the content newContent.setParent(self) # Sets the new content self._content = newContent # Adds the event listeners for new content newContent.addListener(self, component_container.IComponentAttachListener) newContent.addListener(self, component_container.IComponentDetachListener) self._content = newContent def createDefaultContent(self): """Create a IComponentContainer which is added by default to the Panel if user does not specify any content. """ layout = VerticalLayout() # Force margins by default layout.setMargin(True) return layout def paintContent(self, target): self._content.paint(target) target.addVariable(self, 'tabindex', self.getTabIndex()) if self.isScrollable(): target.addVariable(self, 'scrollLeft', self.getScrollLeft()) target.addVariable(self, 'scrollTop', self.getScrollTop()) if self.actionManager is not None: self.actionManager.paintActions(None, target) def requestRepaintAll(self): # Panel has odd structure, delegate to layout self.requestRepaint() if self.getContent() is not None: self.getContent().requestRepaintAll() def addComponent(self, c): """Adds the component into this container. @param c: the component to be added. @see: L{AbstractComponentContainer.addComponent} """ self._content.addComponent(c) # No repaint request is made as we except the underlying # container to request repaints def removeComponent(self, c): """Removes the component from this container. @param c: The component to be removed. @see: L{AbstractComponentContainer.removeComponent} """ self._content.removeComponent(c) # No repaint request is made as we except the underlying # container to request repaints def getComponentIterator(self): """Gets the component container iterator for going through all the components in the container. @return: the iterator of the components inside the container. @see: L{IComponentContainer.getComponentIterator} """ return self._content.getComponentIterator() def changeVariables(self, source, variables): """Called when one or more variables handled by the implementing class are changed. @see: L{muntjac.terminal.VariableOwner.changeVariables} """ super(Panel, self).changeVariables(source, variables) if self._CLICK_EVENT in variables: self.fireClick(variables[self._CLICK_EVENT]) # Get new size newWidth = variables.get('width') newHeight = variables.get('height') if newWidth is not None and newWidth != self.getWidth(): self.setWidth(newWidth, self.UNITS_PIXELS) if newHeight is not None and newHeight != self.getHeight(): self.setHeight(newHeight, self.UNITS_PIXELS) # Scrolling newScrollX = variables.get('scrollLeft') newScrollY = variables.get('scrollTop') if newScrollX is not None and newScrollX != self.getScrollLeft(): # set internally, not to fire request repaint self._scrollOffsetX = newScrollX if newScrollY is not None and newScrollY != self.getScrollTop(): # set internally, not to fire request repaint self._scrollOffsetY = newScrollY # Actions if self.actionManager is not None: self.actionManager.handleActions(variables, self) def getScrollLeft(self): return self._scrollOffsetX def getScrollOffsetX(self): """@deprecated: use L{getScrollLeft} instead""" warn('use getScrollLeft() instead', DeprecationWarning) return self.getScrollLeft() def getScrollTop(self): return self._scrollOffsetY def getScrollOffsetY(self): """@deprecated: use L{getScrollTop} instead""" warn('use getScrollTop() instead', DeprecationWarning) return self.getScrollTop() def isScrollable(self): return self._scrollable def setScrollable(self, isScrollingEnabled): """Sets the panel as programmatically scrollable. Panel is by default not scrollable programmatically with L{setScrollLeft} and L{setScrollTop}, so if you use those methods, you need to enable scrolling with this method. Components that extend Panel may have a different default for the programmatic scrollability. @see: L{IScrollable.setScrollable} """ if self._scrollable != isScrollingEnabled: self._scrollable = isScrollingEnabled self.requestRepaint() def setScrollLeft(self, pixelsScrolled): """Sets the horizontal scroll position. Setting the horizontal scroll position with this method requires that programmatic scrolling of the component has been enabled. For Panel it is disabled by default, so you have to call L{setScrollable}. Components extending Panel may have a different default for programmatic scrollability. @see: L{IScrollable.setScrollable} @see: L{setScrollable} """ if pixelsScrolled < 0: raise ValueError, 'Scroll offset must be at least 0' if self._scrollOffsetX != pixelsScrolled: self._scrollOffsetX = pixelsScrolled self.requestRepaint() def setScrollOffsetX(self, pixels): """@deprecated: use setScrollLeft() method instead""" warn('use setScrollLeft() method instead', DeprecationWarning) self.setScrollLeft(pixels) def setScrollTop(self, pixelsScrolledDown): """Sets the vertical scroll position. Setting the vertical scroll position with this method requires that programmatic scrolling of the component has been enabled. For Panel it is disabled by default, so you have to call L{setScrollable}. Components extending Panel may have a different default for programmatic scrollability. @see: L{IScrollable.setScrollTop} @see: L{setScrollable} """ if pixelsScrolledDown < 0: raise ValueError, 'Scroll offset must be at least 0' if self._scrollOffsetY != pixelsScrolledDown: self._scrollOffsetY = pixelsScrolledDown self.requestRepaint() def setScrollOffsetY(self, pixels): """@deprecated: use setScrollTop() method instead""" warn('use setScrollTop() method instead', DeprecationWarning) self.setScrollTop(pixels) def replaceComponent(self, oldComponent, newComponent): self._content.replaceComponent(oldComponent, newComponent) def componentAttachedToContainer(self, event): """A new component is attached to container. @see: L{IComponentAttachListener.componentAttachedToContainer} """ if event.getContainer() == self._content: self.fireComponentAttachEvent(event.getAttachedComponent()) def componentDetachedFromContainer(self, event): """A component has been detached from container. @see: L{IComponentDetachListener.componentDetachedFromContainer} """ if event.getContainer() == self._content: self.fireComponentDetachEvent(event.getDetachedComponent()) def attach(self): """Notifies the component that it is connected to an application. @see: L{IComponent.attach} """ # can't call parent here as this is Panels hierarchy is a hack self.requestRepaint() if self._content is not None: self._content.attach() def detach(self): """Notifies the component that it is detached from the application. @see: L{IComponent.detach} """ # can't call parent here as this is Panels hierarchy is a hack if self._content is not None: self._content.detach() def removeAllComponents(self): """Removes all components from this container. @see: L{IComponentContainer.removeAllComponents} """ self._content.removeAllComponents() def getActionManager(self): if self.actionManager is None: self.actionManager = ActionManager(self) return self.actionManager def addAction(self, action): self.getActionManager().addAction(action) def removeAction(self, action): if self.actionManager is not None: self.actionManager.removeAction(action) def addActionHandler(self, actionHandler): self.getActionManager().addActionHandler(actionHandler) def removeActionHandler(self, actionHandler): if self.actionManager is not None: self.actionManager.removeActionHandler(actionHandler) def removeAllActionHandlers(self): """Removes all action handlers""" if self.actionManager is not None: self.actionManager.removeAllActionHandlers() def addListener(self, listener, iface=None): """Add a click listener to the Panel. The listener is called whenever the user clicks inside the Panel. Also when the click targets a component inside the Panel, provided the targeted component does not prevent the click event from propagating. Use L{removeListener} to remove the listener. @param listener: The listener to add """ if (isinstance(listener, IClickListener) and (iface is None or issubclass(iface, IClickListener))): self.registerListener(self._CLICK_EVENT, ClickEvent, listener, IClickListener.clickMethod) super(Panel, self).addListener(listener, iface) def addCallback(self, callback, eventType=None, *args): if eventType is None: eventType = callback._eventType if issubclass(eventType, ClickEvent): self.registerCallback(ClickEvent, callback, self._CLICK_EVENT, *args) else: super(Panel, self).addCallback(callback, eventType, *args) def removeListener(self, listener, iface=None): """Remove a click listener from the Panel. The listener should earlier have been added using L{addListener}. @param listener: The listener to remove """ if (isinstance(listener, IClickListener) and (iface is None or issubclass(iface, IClickListener))): self.withdrawListener(self._CLICK_EVENT, ClickEvent, listener) super(Panel, self).removeListener(listener, iface) def removeCallback(self, callback, eventType=None): if eventType is None: eventType = callback._eventType if issubclass(eventType, ClickEvent): self.withdrawCallback(ClickEvent, callback, self._CLICK_EVENT) else: super(Panel, self).removeCallback(callback, eventType) def fireClick(self, parameters): """Fire a click event to all click listeners. @param parameters: The raw "value" of the variable change from the client side """ params = parameters.get('mouseDetails') mouseDetails = MouseEventDetails.deSerialize(params) self.fireEvent( ClickEvent(self, mouseDetails) ) def getTabIndex(self): return self._tabIndex def setTabIndex(self, tabIndex): self._tabIndex = tabIndex self.requestRepaint() def focus(self): """Moves keyboard focus to the component. @see: L{IFocusable.focus} """ super(Panel, self).focus()
PypiClean
/FEADRE_AI-1.0.7.tar.gz/FEADRE_AI-1.0.7/FEADRE_AI/ai/fmodels/model_modules.py
import torch from torch import nn import torch.nn.functional as F import numpy as np from FEADRE_AI.ai.fmodels.model_subassembly import FConv2d class Bottleneck(nn.Module): # Standard bottleneck def __init__(self, in_ch, dilation=1, e=0.5): super(Bottleneck, self).__init__() inter_ch = int(in_ch * e) self.branch = nn.Sequential( FConv2d(in_ch, inter_ch, k=1, act='relu', is_bias=False), FConv2d(inter_ch, inter_ch, k=3, act='relu', p=dilation, d=dilation, is_bias=False), FConv2d(inter_ch, in_ch, k=1, act='relu', is_bias=False) ) def forward(self, x): return x + self.branch(x) class DilateEncoder(nn.Module): """ DilateEncoder 与SPP类似加在C5后面 """ def __init__(self, in_ch, out_ch, dilation_list=[2, 4, 6, 8]): super(DilateEncoder, self).__init__() self.projector = nn.Sequential( FConv2d(in_ch, out_ch, k=1, act='identity', is_bias=False), FConv2d(out_ch, out_ch, k=3, p=1, act='identity', is_bias=False) ) encoders = [] for d in dilation_list: encoders.append(Bottleneck(in_ch=out_ch, dilation=d)) self.encoders = nn.Sequential(*encoders) def forward(self, x): x = self.projector(x) x = self.encoders(x) return x class UpSample(nn.Module): def __init__(self, size=None, scale_factor=None, mode='nearest', align_corner=None): super(UpSample, self).__init__() self.size = size self.scale_factor = scale_factor self.mode = mode self.align_corner = align_corner def forward(self, x): return torch.nn.functional.interpolate(x, size=self.size, scale_factor=self.scale_factor, mode=self.mode, align_corners=self.align_corner) class ResizeConv(nn.Module): def __init__(self, in_ch, out_ch, size=None, scale_factor=None, mode='nearest', align_corner=None): super(ResizeConv, self).__init__() self.upsample = UpSample(size=size, scale_factor=scale_factor, mode=mode, align_corner=align_corner) self.conv = FConv2d(in_ch, out_ch, k=1, act='relu', is_bias=False) def forward(self, x): x = self.conv(self.upsample(x)) return x class SPPv2(torch.nn.Module): ''' 同尺寸同维 多池化混合 通道增加4倍 尺寸不变 一个输入 由多个尺寸的核 池化后 再进行堆叠 增加感受野,分离出最显著的上下文特征 ''' def __init__(self, seq='asc'): super(SPPv2, self).__init__() assert seq in ['desc', 'asc'] self.seq = seq def __call__(self, x): x_1 = x x_2 = F.max_pool2d(input=x, kernel_size=5, stride=1, padding=2) # x_3 = F.max_pool2d(x, pool_size, 1, pool_size//2) x_3 = F.max_pool2d(x, 9, 1, 4) x_4 = F.max_pool2d(x, 13, 1, 6) if self.seq == 'desc': out = torch.cat([x_4, x_3, x_2, x_1], dim=1) else: out = torch.cat([x_1, x_2, x_3, x_4], dim=1) return out class SPPv3(torch.nn.Module): ''' SPPv2 加入通道 压缩转换 参数量更少 ''' def __init__(self, in_channels, seq='asc'): super(SPPv3, self).__init__() assert seq in ['desc', 'asc'] self.seq = seq _t = in_channels // 2 self.cbs1 = FConv2d(in_channels, _t, k=1, act='silu') self.cbs2 = FConv2d(_t * (3 + 1), in_channels, k=1, act='silu') def __call__(self, x): x = self.cbs1(x) x_1 = x x_2 = F.max_pool2d(input=x, kernel_size=5, stride=1, padding=2) x_3 = F.max_pool2d(x, 9, 1, 4) x_4 = F.max_pool2d(x, 13, 1, 6) if self.seq == 'desc': out = torch.cat([x_4, x_3, x_2, x_1], dim=1) else: out = torch.cat([x_1, x_2, x_3, x_4], dim=1) out = self.cbs2(out) return out class SELayer(nn.Module): def __init__(self, channel, reduction=16): super(SELayer, self).__init__() self.avg_pool = nn.AdaptiveAvgPool2d(1) self.fc = nn.Sequential( nn.Linear(channel, channel // reduction, bias=False), nn.ReLU(inplace=True), nn.Linear(channel // reduction, channel, bias=False), nn.Sigmoid() ) def forward(self, x): b, c, _, _ = x.shape_hwc() y = self.avg_pool(x).view(b, c) y = self.fc(y).view(b, c, 1, 1) return x * y.expand_as(x) class ChannelAttention(nn.Module): ''' CBAM 通道注意力机制 ''' def __init__(self, in_planes, rotio=16): super(ChannelAttention, self).__init__() self.avg_pool = nn.AdaptiveAvgPool2d(1) self.max_pool = nn.AdaptiveMaxPool2d(1) self.sharedMLP = nn.Sequential( nn.Conv2d(in_planes, in_planes // rotio, 1, bias=False), nn.ReLU(), nn.Conv2d(in_planes // rotio, in_planes, 1, bias=False)) self.sigmoid = nn.Sigmoid() def forward(self, x): avgout = self.sharedMLP(self.avg_pool(x)) maxout = self.sharedMLP(self.max_pool(x)) return self.sigmoid(avgout + maxout) class SpatialAttention(nn.Module): ''' CBAM 空间注意力机制 ''' def __init__(self, kernel_size=7): super(SpatialAttention, self).__init__() assert kernel_size in (3, 7), "kernel size must be 3 or 7" padding = 3 if kernel_size == 7 else 1 self.conv = nn.Conv2d(2, 1, kernel_size, padding=padding, bias=False) self.sigmoid = nn.Sigmoid() def forward(self, x): avgout = torch.mean(x, dim=1, keepdim=True) maxout, _ = torch.max(x, dim=1, keepdim=True) x = torch.cat([avgout, maxout], dim=1) x = self.conv(x) return self.sigmoid(x) class SAM(nn.Module): """ conv -> sigmoid * x -> 对原值进行缩放 Parallel CBAM from yolov4""" def __init__(self, in_ch): super(SAM, self).__init__() self.conv = nn.Sequential( nn.Conv2d(in_ch, in_ch, 1), nn.Sigmoid() ) def forward(self, x): """ Spatial Attention Module """ x_attention = self.conv(x) return x * x_attention class _Bottleneck(nn.Module): ''' Standard bottleneck 结构见 Bottleneck.jpg 减小通道后 恢复 ''' def __init__(self, c1, c2, shortcut=True, g=1, e=0.5): # ch_in, ch_out, shortcut, groups, expansion super(_Bottleneck, self).__init__() c_ = int(c2 * e) # hidden channels self.cv1 = FConv2d(c1, c_, k=1, is_bias=False) self.cv2 = FConv2d(c_, c2, k=3, p=1, g=g, is_bias=False) self.add = shortcut and c1 == c2 def forward(self, x): return x + self.cv2(self.cv1(x)) if self.add else self.cv2(self.cv1(x)) class BottleneckCSP(nn.Module): ''' 结构见 BottleneckCSP.jpg BottleneckCSP-CSP瓶颈层 CSP Bottleneck https://github.com/WongKinYiu/CrossStagePartialNetworks ''' def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5): # ch_in, ch_out, number, shortcut, groups, expansion super(BottleneckCSP, self).__init__() c_ = int(c2 * e) # hidden channels self.cv1 = FConv2d(c1, c_, k=1, is_bias=False) self.cv2 = nn.Conv2d(c1, c_, kernel_size=1, bias=False) self.cv3 = nn.Conv2d(c_, c_, kernel_size=1, bias=False) self.cv4 = FConv2d(2 * c_, c2, k=1, is_bias=False) self.bn = nn.BatchNorm2d(2 * c_) # applied to cat(cv2, cv3) self.act = nn.LeakyReLU(0.1, inplace=True) self.m = nn.Sequential(*[_Bottleneck(c_, c_, shortcut, g, e=1.0) for _ in range(n)]) def forward(self, x): y1 = self.cv3(self.m(self.cv1(x))) y2 = self.cv2(x) return self.cv4(self.act(self.bn(torch.cat((y1, y2), dim=1)))) class DCNv2(torch.nn.Module): ''' 咩酱自实现的DCNv2,咩酱的得意之作,Pytorch的纯python接口实现,效率极高。 ''' def __init__(self, input_dim, filters, filter_size, stride=1, padding=0, bias_attr=False, distribution='normal', gain=1): super(DCNv2, self).__init__() assert distribution in ['uniform', 'normal'] self.input_dim = input_dim self.filters = filters self.filter_size = filter_size self.stride = stride self.padding = padding self.conv_offset = torch.nn.Conv2d(input_dim, filter_size * filter_size * 3, kernel_size=filter_size, stride=stride, padding=padding, bias=True) # 初始化代码摘抄自SOLOv2 mmcv/cnn/weight_init.py 里的代码 torch.nn.init.constant_(self.conv_offset.weight, 0.0) torch.nn.init.constant_(self.conv_offset.bias, 0.0) self.sigmoid = torch.nn.Sigmoid() self.dcn_weight = torch.nn.Parameter(torch.randn(filters, input_dim, filter_size, filter_size)) self.dcn_bias = None if bias_attr: self.dcn_bias = torch.nn.Parameter(torch.randn(filters, )) torch.nn.init.constant_(self.dcn_bias, 0.0) if distribution == 'uniform': torch.nn.init.xavier_uniform_(self.dcn_weight, gain=gain) else: torch.nn.init.xavier_normal_(self.dcn_weight, gain=gain) def gather_nd(self, input, index): # 不被合并的后面的维 keep_dims = [] # 被合并的维 first_dims = [] dim_idx = [] dims = index.shape[1] for i, number in enumerate(input.shape): if i < dims: dim_ = index[:, i] dim_idx.append(dim_) first_dims.append(number) else: keep_dims.append(number) # 为了不影响输入index的最后一维,避免函数副作用 target_dix = torch.zeros((index.shape[0],), dtype=torch.long, device=input.DEVICE) + dim_idx[-1] new_shape = (-1,) + tuple(keep_dims) input2 = torch.reshape(input, new_shape) mul2 = 1 for i in range(dims - 1, 0, -1): mul2 *= first_dims[i] target_dix += mul2 * dim_idx[i - 1] o = input2[target_dix] return o def forward(self, x): filter_size = self.filter_size stride = self.stride padding = self.padding dcn_weight = self.dcn_weight dcn_bias = self.dcn_bias offset_mask = self.conv_offset(x) offset = offset_mask[:, :filter_size ** 2 * 2, :, :] mask = offset_mask[:, filter_size ** 2 * 2:, :, :] mask = self.sigmoid(mask) # =================================== N, in_C, H, W = x.shape out_C, in_C, kH, kW = dcn_weight.shape out_W = (W + 2 * padding - (kW - 1)) // stride out_H = (H + 2 * padding - (kH - 1)) // stride # 1.先对图片x填充得到填充后的图片pad_x pad_x_H = H + padding * 2 + 1 pad_x_W = W + padding * 2 + 1 pad_x = torch.zeros((N, in_C, pad_x_H, pad_x_W), dtype=torch.float32, device=x.DEVICE) pad_x[:, :, padding:padding + H, padding:padding + W] = x # 卷积核中心点在pad_x中的位置 rows = torch.arange(0, out_W, dtype=torch.float32, device=dcn_weight.device) * stride + padding cols = torch.arange(0, out_H, dtype=torch.float32, device=dcn_weight.device) * stride + padding rows = rows[np.newaxis, np.newaxis, :, np.newaxis, np.newaxis].repeat((1, out_H, 1, 1, 1)) cols = cols[np.newaxis, :, np.newaxis, np.newaxis, np.newaxis].repeat((1, 1, out_W, 1, 1)) start_pos_yx = torch.cat([cols, rows], dim=-1) # [1, out_H, out_W, 1, 2] 仅仅是卷积核中心点在pad_x中的位置 start_pos_yx = start_pos_yx.repeat((N, 1, 1, kH * kW, 1)) # [N, out_H, out_W, kH*kW, 2] 仅仅是卷积核中心点在pad_x中的位置 start_pos_y = start_pos_yx[:, :, :, :, :1] # [N, out_H, out_W, kH*kW, 1] 仅仅是卷积核中心点在pad_x中的位置 start_pos_x = start_pos_yx[:, :, :, :, 1:] # [N, out_H, out_W, kH*kW, 1] 仅仅是卷积核中心点在pad_x中的位置 # 卷积核内部的偏移 half_W = (kW - 1) // 2 half_H = (kW - 1) // 2 rows2 = torch.arange(0, kW, dtype=torch.float32, device=dcn_weight.device) - half_W cols2 = torch.arange(0, kH, dtype=torch.float32, device=dcn_weight.device) - half_H rows2 = rows2[np.newaxis, :, np.newaxis].repeat((kH, 1, 1)) cols2 = cols2[:, np.newaxis, np.newaxis].repeat((1, kW, 1)) filter_inner_offset_yx = torch.cat([cols2, rows2], dim=-1) # [kH, kW, 2] 卷积核内部的偏移 filter_inner_offset_yx = torch.reshape(filter_inner_offset_yx, (1, 1, 1, kH * kW, 2)) # [1, 1, 1, kH*kW, 2] 卷积核内部的偏移 filter_inner_offset_yx = filter_inner_offset_yx.repeat( (N, out_H, out_W, 1, 1)) # [N, out_H, out_W, kH*kW, 2] 卷积核内部的偏移 filter_inner_offset_y = filter_inner_offset_yx[:, :, :, :, :1] # [N, out_H, out_W, kH*kW, 1] 卷积核内部的偏移 filter_inner_offset_x = filter_inner_offset_yx[:, :, :, :, 1:] # [N, out_H, out_W, kH*kW, 1] 卷积核内部的偏移 mask = mask.permute(0, 2, 3, 1) # [N, out_H, out_W, kH*kW*1] offset = offset.permute(0, 2, 3, 1) # [N, out_H, out_W, kH*kW*2] offset_yx = torch.reshape(offset, (N, out_H, out_W, kH * kW, 2)) # [N, out_H, out_W, kH*kW, 2] offset_y = offset_yx[:, :, :, :, :1] # [N, out_H, out_W, kH*kW, 1] offset_x = offset_yx[:, :, :, :, 1:] # [N, out_H, out_W, kH*kW, 1] # 最终位置。其实也不是最终位置,为了更快速实现DCNv2,还要给y坐标(代表行号)加上图片的偏移来一次性抽取,避免for循环遍历每一张图片。 start_pos_y.requires_grad = False start_pos_x.requires_grad = False filter_inner_offset_y.requires_grad = False filter_inner_offset_x.requires_grad = False pos_y = start_pos_y + filter_inner_offset_y + offset_y # [N, out_H, out_W, kH*kW, 1] pos_x = start_pos_x + filter_inner_offset_x + offset_x # [N, out_H, out_W, kH*kW, 1] pos_y = torch.clamp(pos_y, 0.0, H + padding * 2 - 1.0) pos_x = torch.clamp(pos_x, 0.0, W + padding * 2 - 1.0) ytxt = torch.cat([pos_y, pos_x], -1) # [N, out_H, out_W, kH*kW, 2] pad_x = pad_x.permute(0, 2, 3, 1) # [N, pad_x_H, pad_x_W, C] pad_x = torch.reshape(pad_x, (N * pad_x_H, pad_x_W, in_C)) # [N*pad_x_H, pad_x_W, C] ytxt = torch.reshape(ytxt, (N * out_H * out_W * kH * kW, 2)) # [N*out_H*out_W*kH*kW, 2] _yt = ytxt[:, :1] # [N*out_H*out_W*kH*kW, 1] _xt = ytxt[:, 1:] # [N*out_H*out_W*kH*kW, 1] # 为了避免使用for循环遍历每一张图片,还要给y坐标(代表行号)加上图片的偏移来一次性抽取出更兴趣的像素。 row_offset = torch.arange(0, N, dtype=torch.float32, device=dcn_weight.device) * pad_x_H # [N, ] row_offset = row_offset[:, np.newaxis, np.newaxis].repeat( (1, out_H * out_W * kH * kW, 1)) # [N, out_H*out_W*kH*kW, 1] row_offset = torch.reshape(row_offset, (N * out_H * out_W * kH * kW, 1)) # [N*out_H*out_W*kH*kW, 1] row_offset.requires_grad = False _yt += row_offset _y1 = torch.floor(_yt) _x1 = torch.floor(_xt) _y2 = _y1 + 1.0 _x2 = _x1 + 1.0 _y1x1 = torch.cat([_y1, _x1], -1) _y1x2 = torch.cat([_y1, _x2], -1) _y2x1 = torch.cat([_y2, _x1], -1) _y2x2 = torch.cat([_y2, _x2], -1) _y1x1_int = _y1x1.long() # [N*out_H*out_W*kH*kW, 2] v1 = self.gather_nd(pad_x, _y1x1_int) # [N*out_H*out_W*kH*kW, in_C] _y1x2_int = _y1x2.long() # [N*out_H*out_W*kH*kW, 2] v2 = self.gather_nd(pad_x, _y1x2_int) # [N*out_H*out_W*kH*kW, in_C] _y2x1_int = _y2x1.long() # [N*out_H*out_W*kH*kW, 2] v3 = self.gather_nd(pad_x, _y2x1_int) # [N*out_H*out_W*kH*kW, in_C] _y2x2_int = _y2x2.long() # [N*out_H*out_W*kH*kW, 2] v4 = self.gather_nd(pad_x, _y2x2_int) # [N*out_H*out_W*kH*kW, in_C] lh = _yt - _y1 # [N*out_H*out_W*kH*kW, 1] lw = _xt - _x1 hh = 1 - lh hw = 1 - lw w1 = hh * hw w2 = hh * lw w3 = lh * hw w4 = lh * lw value = w1 * v1 + w2 * v2 + w3 * v3 + w4 * v4 # [N*out_H*out_W*kH*kW, in_C] mask = torch.reshape(mask, (N * out_H * out_W * kH * kW, 1)) value = value * mask value = torch.reshape(value, (N, out_H, out_W, kH, kW, in_C)) new_x = value.permute(0, 1, 2, 5, 3, 4) # [N, out_H, out_W, in_C, kH, kW] # 旧的方案,使用逐元素相乘,慢! # new_x = torch.reshape(new_x, (N, out_H, out_W, in_C * kH * kW)) # [N, out_H, out_W, in_C * kH * kW] # new_x = new_x.permute(0, 3, 1, 2) # [N, in_C*kH*kW, out_H, out_W] # exp_new_x = new_x.unsqueeze(1) # 增加1维,[N, 1, in_C*kH*kW, out_H, out_W] # reshape_w = torch.reshape(dcn_weight, (1, out_C, in_C * kH * kW, 1, 1)) # [1, out_C, in_C*kH*kW, 1, 1] # out = exp_new_x * reshape_w # 逐元素相乘,[N, out_C, in_C*kH*kW, out_H, out_W] # out = out.sum((2,)) # 第2维求和,[N, out_C, out_H, out_W] # 新的方案,用等价的1x1卷积代替逐元素相乘,快! new_x = torch.reshape(new_x, (N, out_H, out_W, in_C * kH * kW)) # [N, out_H, out_W, in_C * kH * kW] new_x = new_x.permute(0, 3, 1, 2) # [N, in_C*kH*kW, out_H, out_W] rw = torch.reshape(dcn_weight, ( out_C, in_C * kH * kW, 1, 1)) # [out_C, in_C, kH, kW] -> [out_C, in_C*kH*kW, 1, 1] 变成1x1卷积核 out = F.conv2d(new_x, rw, stride=1) # [N, out_C, out_H, out_W] return out class Focus(nn.Module): ''' Focus wh information into c-space from yolo4 亚像素卷积的反向操作版本 作用: 将数据通道扩充4倍,尺寸缩小一倍,完成后进行CBS进行通道转换 横竖每隔一个像素拿到一个值, 形成4份(2倍下采样) 再通道堆叠变成4倍通道 每4个小方块分为一组, 每组1234号, 所有组的1展平,所有组的2展平... 把宽度w和高度h的信息整合到c空间中 ''' def __init__(self, c1, c2, k=1): # ch_in, ch_out, kernel, stride, padding, groups super(Focus, self).__init__() self.conv = FConv2d(c1 * 4, c2, k=1, act='silu') # self.contract = Contract(gain=2) def forward(self, x): # x(b,c,w,h) -> y(b,4c,w/2,h/2) # 间隔为2取 return self.conv(torch.cat([x[..., ::2, ::2], x[..., 1::2, ::2], x[..., ::2, 1::2], x[..., 1::2, 1::2]], 1)) # return self.conv(self.contract(x)) class DeConv(nn.Module): ''' 反向卷积 增加尺寸 ''' def __init__(self, in_channels, out_channels, ksize, stride=2, leaky=False): super(DeConv, self).__init__() # deconv basic config if ksize == 4: padding = 1 output_padding = 0 elif ksize == 3: padding = 1 output_padding = 1 elif ksize == 2: padding = 0 output_padding = 0 self.convs = nn.Sequential( nn.ConvTranspose2d(in_channels, out_channels, ksize, stride=stride, padding=padding, output_padding=output_padding), nn.BatchNorm2d(out_channels), nn.LeakyReLU(0.1, inplace=True) if leaky else nn.ReLU(inplace=True) ) def forward(self, x): return self.convs(x)
PypiClean
/LibRecommender-1.3.0-cp38-cp38-macosx_10_9_x86_64.whl/libreco/algorithms/ncf.py
from ..bases import ModelMeta, TfBase from ..layers import dense_nn, embedding_lookup, tf_dense from ..tfops import dropout_config, reg_config, tf from ..torchops import hidden_units_config class NCF(TfBase, metaclass=ModelMeta): """*Neural Collaborative Filtering* algorithm. Parameters ---------- task : {'rating', 'ranking'} Recommendation task. See :ref:`Task`. data_info : :class:`~libreco.data.DataInfo` object Object that contains useful information for training and inference. loss_type : {'cross_entropy', 'focal'}, default: 'cross_entropy' Loss for model training. embed_size: int, default: 16 Vector size of embeddings. n_epochs: int, default: 10 Number of epochs for training. lr : float, default 0.001 Learning rate for training. lr_decay : bool, default: False Whether to use learning rate decay. epsilon : float, default: 1e-5 A small constant added to the denominator to improve numerical stability in Adam optimizer. According to the `official comment <https://github.com/tensorflow/tensorflow/blob/v1.15.0/tensorflow/python/training/adam.py#L64>`_, default value of `1e-8` for `epsilon` is generally not good, so here we choose `1e-5`. Users can try tuning this hyperparameter if the training is unstable. reg : float or None, default: None Regularization parameter, must be non-negative or None. batch_size : int, default: 256 Batch size for training. sampler : {'random', 'unconsumed', 'popular'}, default: 'random' Negative sampling strategy. - ``'random'`` means random sampling. - ``'unconsumed'`` samples items that the target user did not consume before. - ``'popular'`` has a higher probability to sample popular items as negative samples. .. versionadded:: 1.1.0 num_neg : int, default: 1 Number of negative samples for each positive sample, only used in `ranking` task. use_bn : bool, default: True Whether to use batch normalization. dropout_rate : float or None, default: None Probability of an element to be zeroed. If it is None, dropout is not used. hidden_units : int, list of int or tuple of (int,), default: (128, 64, 32) Number of layers and corresponding layer size in MLP. .. versionchanged:: 1.0.0 Accept type of ``int``, ``list`` or ``tuple``, instead of ``str``. seed : int, default: 42 Random seed. lower_upper_bound : tuple or None, default: None Lower and upper score bound for `rating` task. tf_sess_config : dict or None, default: None Optional TensorFlow session config, see `ConfigProto options <https://github.com/tensorflow/tensorflow/blob/v2.10.0/tensorflow/core/protobuf/config.proto#L431>`_. References ---------- *Xiangnan He et al.* `Neural Collaborative Filtering <https://arxiv.org/pdf/1708.05031.pdf>`_. """ user_variables = ("embedding/user_embeds_var",) item_variables = ("embedding/item_embeds_var",) def __init__( self, task, data_info, loss_type="cross_entropy", embed_size=16, n_epochs=20, lr=0.01, lr_decay=False, epsilon=1e-5, reg=None, batch_size=256, sampler="random", num_neg=1, use_bn=True, dropout_rate=None, hidden_units=(128, 64, 32), seed=42, lower_upper_bound=None, tf_sess_config=None, ): super().__init__(task, data_info, lower_upper_bound, tf_sess_config) self.all_args = locals() self.loss_type = loss_type self.embed_size = embed_size self.n_epochs = n_epochs self.lr = lr self.lr_decay = lr_decay self.epsilon = epsilon self.reg = reg_config(reg) self.batch_size = batch_size self.sampler = sampler self.num_neg = num_neg self.use_bn = use_bn self.dropout_rate = dropout_config(dropout_rate) self.hidden_units = hidden_units_config(hidden_units) self.seed = seed def build_model(self): tf.set_random_seed(self.seed) self.user_indices = tf.placeholder(tf.int32, shape=[None]) self.item_indices = tf.placeholder(tf.int32, shape=[None]) self.labels = tf.placeholder(tf.float32, shape=[None]) self.is_training = tf.placeholder_with_default(False, shape=[]) user_embeds = embedding_lookup( indices=self.user_indices, var_name="user_embeds_var", var_shape=(self.n_users + 1, self.embed_size), initializer=tf.glorot_uniform_initializer(), regularizer=self.reg, ) item_embeds = embedding_lookup( indices=self.item_indices, var_name="item_embeds_var", var_shape=(self.n_items + 1, self.embed_size), initializer=tf.glorot_uniform_initializer(), regularizer=self.reg, ) gmf_layer = tf.multiply(user_embeds, item_embeds) mlp_input = tf.concat([user_embeds, item_embeds], axis=1) mlp_layer = dense_nn( mlp_input, self.hidden_units, use_bn=self.use_bn, dropout_rate=self.dropout_rate, is_training=self.is_training, ) concat_layer = tf.concat([gmf_layer, mlp_layer], axis=1) self.output = tf.reshape(tf_dense(units=1)(concat_layer), [-1]) self.serving_topk = self.build_topk(self.output)
PypiClean
/FreeClimb-4.5.0-py3-none-any.whl/freeclimb/rest.py
import io import json import logging import re import ssl from urllib.parse import urlencode from urllib.parse import urlparse from urllib.request import proxy_bypass_environment import urllib3 import ipaddress from freeclimb.exceptions import ApiException, UnauthorizedException, ForbiddenException, NotFoundException, ServiceException, ApiValueError logger = logging.getLogger(__name__) class RESTResponse(io.IOBase): def __init__(self, resp): self.urllib3_response = resp self.status = resp.status self.reason = resp.reason self.data = resp.data def getheaders(self): """Returns a dictionary of the response headers.""" return self.urllib3_response.getheaders() def getheader(self, name, default=None): """Returns a given response header.""" return self.urllib3_response.getheader(name, default) class RESTClientObject(object): def __init__(self, configuration, pools_size=4, maxsize=None): # urllib3.PoolManager will pass all kw parameters to connectionpool # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501 # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501 # maxsize is the number of requests to host that are allowed in parallel # noqa: E501 # Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501 # cert_reqs if configuration.verify_ssl: cert_reqs = ssl.CERT_REQUIRED else: cert_reqs = ssl.CERT_NONE addition_pool_args = {} if configuration.assert_hostname is not None: addition_pool_args['assert_hostname'] = configuration.assert_hostname # noqa: E501 if configuration.retries is not None: addition_pool_args['retries'] = configuration.retries if configuration.socket_options is not None: addition_pool_args['socket_options'] = configuration.socket_options if maxsize is None: if configuration.connection_pool_maxsize is not None: maxsize = configuration.connection_pool_maxsize else: maxsize = 4 # https pool manager if configuration.proxy and not should_bypass_proxies(configuration.host, no_proxy=configuration.no_proxy or ''): self.pool_manager = urllib3.ProxyManager( num_pools=pools_size, maxsize=maxsize, cert_reqs=cert_reqs, ca_certs=configuration.ssl_ca_cert, cert_file=configuration.cert_file, key_file=configuration.key_file, proxy_url=configuration.proxy, proxy_headers=configuration.proxy_headers, **addition_pool_args ) else: self.pool_manager = urllib3.PoolManager( num_pools=pools_size, maxsize=maxsize, cert_reqs=cert_reqs, ca_certs=configuration.ssl_ca_cert, cert_file=configuration.cert_file, key_file=configuration.key_file, **addition_pool_args ) def request(self, method, url, query_params=None, headers=None, body=None, post_params=None, _preload_content=True, _request_timeout=None): """Perform requests. :param method: http request method :param url: http request url :param query_params: query parameters in the url :param headers: http request headers :param body: request json body, for `application/json` :param post_params: request post parameters, `application/x-www-form-urlencoded` and `multipart/form-data` :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. """ method = method.upper() assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT', 'PATCH', 'OPTIONS'] if post_params and body: raise ApiValueError( "body parameter cannot be used with post_params parameter." ) post_params = post_params or {} headers = headers or {} timeout = None if _request_timeout: if isinstance(_request_timeout, (int, float)): # noqa: E501,F821 timeout = urllib3.Timeout(total=_request_timeout) elif (isinstance(_request_timeout, tuple) and len(_request_timeout) == 2): timeout = urllib3.Timeout( connect=_request_timeout[0], read=_request_timeout[1]) try: # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE` if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']: # Only set a default Content-Type for POST, PUT, PATCH and OPTIONS requests if (method != 'DELETE') and ('Content-Type' not in headers): headers['Content-Type'] = 'application/json' if query_params: url += '?' + urlencode(query_params) if ('Content-Type' not in headers) or (re.search('json', headers['Content-Type'], re.IGNORECASE)): request_body = None if body is not None: request_body = json.dumps(body) r = self.pool_manager.request( method, url, body=request_body, preload_content=_preload_content, timeout=timeout, headers=headers) elif headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501 r = self.pool_manager.request( method, url, fields=post_params, encode_multipart=False, preload_content=_preload_content, timeout=timeout, headers=headers) elif headers['Content-Type'] == 'multipart/form-data': # must del headers['Content-Type'], or the correct # Content-Type which generated by urllib3 will be # overwritten. del headers['Content-Type'] r = self.pool_manager.request( method, url, fields=post_params, encode_multipart=True, preload_content=_preload_content, timeout=timeout, headers=headers) # Pass a `string` parameter directly in the body to support # other content types than Json when `body` argument is # provided in serialized form elif isinstance(body, str) or isinstance(body, bytes): request_body = body r = self.pool_manager.request( method, url, body=request_body, preload_content=_preload_content, timeout=timeout, headers=headers) else: # Cannot generate the request from given parameters msg = """Cannot prepare a request message for provided arguments. Please check that your arguments match declared content type.""" raise ApiException(status=0, reason=msg) # For `GET`, `HEAD` else: r = self.pool_manager.request(method, url, fields=query_params, preload_content=_preload_content, timeout=timeout, headers=headers) except urllib3.exceptions.SSLError as e: msg = "{0}\n{1}".format(type(e).__name__, str(e)) raise ApiException(status=0, reason=msg) if _preload_content: r = RESTResponse(r) # log response body logger.debug("response body: %s", r.data) if not 200 <= r.status <= 299: if r.status == 401: raise UnauthorizedException(http_resp=r) if r.status == 403: raise ForbiddenException(http_resp=r) if r.status == 404: raise NotFoundException(http_resp=r) if 500 <= r.status <= 599: raise ServiceException(http_resp=r) raise ApiException(http_resp=r) return r def GET(self, url, headers=None, query_params=None, _preload_content=True, _request_timeout=None): return self.request("GET", url, headers=headers, _preload_content=_preload_content, _request_timeout=_request_timeout, query_params=query_params) def HEAD(self, url, headers=None, query_params=None, _preload_content=True, _request_timeout=None): return self.request("HEAD", url, headers=headers, _preload_content=_preload_content, _request_timeout=_request_timeout, query_params=query_params) def OPTIONS(self, url, headers=None, query_params=None, post_params=None, body=None, _preload_content=True, _request_timeout=None): return self.request("OPTIONS", url, headers=headers, query_params=query_params, post_params=post_params, _preload_content=_preload_content, _request_timeout=_request_timeout, body=body) def DELETE(self, url, headers=None, query_params=None, body=None, _preload_content=True, _request_timeout=None): return self.request("DELETE", url, headers=headers, query_params=query_params, _preload_content=_preload_content, _request_timeout=_request_timeout, body=body) def POST(self, url, headers=None, query_params=None, post_params=None, body=None, _preload_content=True, _request_timeout=None): return self.request("POST", url, headers=headers, query_params=query_params, post_params=post_params, _preload_content=_preload_content, _request_timeout=_request_timeout, body=body) def PUT(self, url, headers=None, query_params=None, post_params=None, body=None, _preload_content=True, _request_timeout=None): return self.request("PUT", url, headers=headers, query_params=query_params, post_params=post_params, _preload_content=_preload_content, _request_timeout=_request_timeout, body=body) def PATCH(self, url, headers=None, query_params=None, post_params=None, body=None, _preload_content=True, _request_timeout=None): return self.request("PATCH", url, headers=headers, query_params=query_params, post_params=post_params, _preload_content=_preload_content, _request_timeout=_request_timeout, body=body) # end of class RESTClientObject def is_ipv4(target): """ Test if IPv4 address or not """ try: chk = ipaddress.IPv4Address(target) return True except ipaddress.AddressValueError: return False def in_ipv4net(target, net): """ Test if target belongs to given IPv4 network """ try: nw = ipaddress.IPv4Network(net) ip = ipaddress.IPv4Address(target) if ip in nw: return True return False except ipaddress.AddressValueError: return False except ipaddress.NetmaskValueError: return False def should_bypass_proxies(url, no_proxy=None): """ Yet another requests.should_bypass_proxies Test if proxies should not be used for a particular url. """ parsed = urlparse(url) # special cases if parsed.hostname in [None, '']: return True # special cases if no_proxy in [None , '']: return False if no_proxy == '*': return True no_proxy = no_proxy.lower().replace(' ',''); entries = ( host for host in no_proxy.split(',') if host ) if is_ipv4(parsed.hostname): for item in entries: if in_ipv4net(parsed.hostname, item): return True return proxy_bypass_environment(parsed.hostname, {'no': no_proxy} )
PypiClean
/cubrid-python-9.3.0.0002.tar.gz/RB-9.3.0/CUBRIDdb/cursors.py
import sys import types from CUBRIDdb import FIELD_TYPE class BaseCursor(object): """ A base for Cursor classes. Useful attributes: description:: A tuple of DB API 7-tuples describing the columns in the last executed query; see PEP-249 for details. arraysize:: default number of rows fetchmany() will fetch """ def __init__(self, conn): self.con = conn.connection self._cs = conn.connection.cursor() self.arraysize = 1 self.rowcount = -1 self.description = None self.charset = conn.charset self._cs._set_charset_name(conn.charset) def __del__(self): if self._cs == None: pass self.close() def __check_state(self): if self._cs == None: raise Exception("The cursor has been closed. No operation is allowed any more.") def close(self): """Close the cursor, and no further queries will be possible.""" self.__check_state() self._cs.close() self._cs = None def _bind_params(self, args,set_type=None): self.__check_state() if type(args) not in (tuple, list): args = [args,] args = list(args) for i in range(len(args)): if args[i] == None: pass elif isinstance(args[i], bool): if args[i] == True: args[i] = '1' else: args[i] = '0' elif isinstance(args[i], tuple): args[i] = args[i] else: # Python3.X dosen't support unicode keyword. try: mytest = unicode except NameError: if isinstance(args[i], str): pass elif isinstance(args[i], bytes): args[i] = args[i].decode(self.charset) else: args[i] = str(args[i]) else: if isinstance(args[i], unicode): args[i] = args[i].encode(self.charset) else: args[i] = str(args[i]) if type(args[i]) != tuple: self._cs.bind_param(i+1, args[i]) else: if set_type == None: data_type = int(FIELD_TYPE.CHAR) else: if type(set_type) != tuple: set_type = [set_type,] data_type = set_type[i] s = self.con.set() s.imports(args[i], data_type) self._cs.bind_set(i+1, s) def execute(self, query, args=None,set_type=None): """ Execute a query. query -- string, query to execute on server args -- optional sequence or mapping, parameters to use with query. Returns long integer rows affected, if any """ self.__check_state() self._cs.prepare(query) if args != None: self._bind_params(args,set_type) r = self._cs.execute() self.rowcount = self._cs.rowcount self.description = self._cs.description return r def executemany(self, query, args): """ Execute a multi-row query. query -- string, query to execute on server args -- Sequence of sequences or mappings, parameters to use with query Returns long integer rows affected, if any. This method improves performance on multiple-row INSERT and REPLACE. Otherwise it is equivalent to looping over args with execute(). """ self.__check_state() for p in args: self.execute(query, *(p,)) def _fetch_row(self): self.__check_state() return self._cs.fetch_row(self._fetch_type) def fetchone(self): """ Fetch the next row of a query result set, returning a single sequence, or None when no more data is available. """ self.__check_state() return self._fetch_row() def _fetch_many(self, size): self.__check_state() rlist = [] i = 0 while size < 0 or i < size: r = self.fetchone() if not r: break rlist.append(r) i = i+1 return rlist def fetchmany(self, size=None): """ Fetch the next set of rows of a query result, returning a sequence of sequences (e.g. a list of tuples). An empty sequence is returned when no more rows are available. The number of rows to fetch per call is specified by the parameter. If it is not given, the cursor's arraysize determines the number of rows to be fetched. The method should try to fetch as many rows as indicated by the size parameter. If this is not possible due to the specified number of rows not being available, fewer rows may be returned. """ self.__check_state() if size == None: size = self.arraysize if size <= 0: return [] return self._fetch_many(size) def fetchall(self): """ Fetch all (remaining) rows of a query result, returning them as a sequence of sequences (e.g. a list of tuples). Note that the cursor's arraysize attribute can affect the performance of this operation. """ self.__check_state() return self._fetch_many(-1) def setinputsizes(self, *args): """Does nothing, required by DB API.""" pass def setoutputsizes(self, *args): """Does nothing, required by DB API.""" pass def nextset(self): """Advance to the next result set. Returns None if there are no more result sets.""" pass def callproc(self, procname, args=()): """ Execute stored procedure procname with args procname -- string, name of procedure to execute on server args -- Sequence of parameters to use with procedure Returns the original args. """ pass def __iter__(self): """ Iteration over the result set which calls self.fetchone() and returns the next row. """ self.__check_state() return self # iter(self.fetchone, None) def next(self): """ Return the next row from the currently executing SQL statement using the same semantics as fetchone(). A StopIteration exception is raised when the result set is exhausted for Python versions 2.2 and later. """ self.__check_state() return self.__next__() def __next__(self): self.__check_state() row = self.fetchone() if row is None: raise StopIteration return row class CursorTupleRowsMixIn(object): _fetch_type = 0 class CursorDictTupleMixIn(object): _fetch_type = 1 class Cursor(CursorTupleRowsMixIn, BaseCursor): ''' This is the standard Cursor class that returns rows as tuples and stores the result set in the client. ''' class DictCursor(CursorDictTupleMixIn, BaseCursor): ''' This is a Cursor class that returns rows as dictionaries and stores the result set in the client. '''
PypiClean
/BitGlitter-2.0.0.tar.gz/BitGlitter-2.0.0/bitglitter/config/presetfunctions.py
from bitglitter.config.config import session from bitglitter.config.presetmodels import Preset from bitglitter.validation.validatewrite import write_preset_validate def _convert_preset_to_dict(preset): return {'nickname': preset.nickname, 'datetime_created': preset.datetime_created, 'output_mode': preset.output_mode, 'compression_enabled': preset.compression_enabled, 'scrypt_n': preset.scrypt_n, 'scrypt_r': preset.scrypt_r, 'scrypt_p': preset.scrypt_p, 'cpu_cores': preset.cpu_cores, 'stream_palette_id': preset.stream_palette_id, 'pixel_width': preset.pixel_width, 'block_height': preset.block_height, 'block_width': preset.block_width, 'frames_per_second': preset.frames_per_second} def add_new_preset(nickname, output_mode='video', compression_enabled=True, scrypt_n=14, scrypt_r=8, scrypt_p=1, max_cpu_cores=0, stream_palette_id='6', pixel_width=24, block_height=45, block_width=80, frames_per_second=30): write_preset_validate(nickname, output_mode, compression_enabled, scrypt_n, scrypt_r, scrypt_p, max_cpu_cores, stream_palette_id, pixel_width, block_height, block_width, frames_per_second) if session.query(Preset).filter(Preset.nickname == nickname).first(): raise ValueError(f'\'{nickname}\' already exists as a preset nickname. Please choose a new name or' f'delete the existing preset.') new_preset = Preset.create(nickname=nickname, output_mode=output_mode, compression_enabled=compression_enabled, scrypt_n=scrypt_n, scrypt_r=scrypt_r, scrypt_p=scrypt_p, max_cpu_cores=max_cpu_cores, stream_palette_id=stream_palette_id, pixel_width=pixel_width, block_height=block_height, block_width=block_width, frames_per_second=frames_per_second) return new_preset def return_all_preset_data(): returned_list = [] presets = session.query(Preset).all() for preset in presets: returned_list.append(_convert_preset_to_dict(preset)) return presets def return_preset(nickname): preset = session.query(Preset).filter(Preset.nickname == nickname).first() if preset: return _convert_preset_to_dict(preset) else: return {} def remove_preset(nickname): preset = session.query(Preset).filter(Preset.nickname == nickname).first() preset.delete() def remove_all_presets(): session.query(Preset).delete()
PypiClean
/FeaSel-Net-0.0.9.tar.gz/FeaSel-Net-0.0.9/src/feasel/linear/lda.py
import numpy as np from .base import ModelContainer from .pca import PCA from .svd import SVD from ..data import classification from ..plot import LinearTransformationVisualizer as Visualizer from ..utils.time import get_timestamp class LDA(ModelContainer): def __init__(self, X, y, features=None, name=None, **kwargs): """ A class environment for analysing X using the theory of principal component analysis. This is an unsupervised technique for dimensionality reduction. Parameters ---------- X : float 2D-data array. y : float 1D-class array. The default is 'None'. features : float 1D-Array with features. If 'None', the features are incremented from 0 to N-1. The default is 'None'. name : str, optional The model name. The default is 'None'. Returns ------- None. """ self.X = X self.y = y self.features = features self.set_name(name) self.timestamp = get_timestamp() self._get_params(**kwargs) #data container self._data = classification.Linear(X, y, features, sample_axis=self.params.data.sample_axis, normalization=self.params.data.normalization) # embed plot class self._plot = Visualizer(self) self.p = self.data.n_features self.q = self.params.build.n_components def __str__(self): return 'ModelContainer for LDAs' def __repr__(self): return (f"LDA(n_features: {self.data.n_features}, " f"n_samples: {self.data.n_samples}, " "n_components: {self.params.build.n_components}, " "normalization: {self.params.data.normalization})") @property def data(self): return self._data @property def plot(self): return self._plot def get_dependencies(self, X, type='scatter'): """ Calculation of either covariance or correlation matrix. This is a measure of the linear dependencies between the features. Returns ------- Sigma or R : ndarray The covariance or correlation matrix of the input dataset. """ TYPE = {'scatter': self.data.scatter_matrix } try: self.data.mean_centered(self.data.X_train) # sets 'data.mu' self.data.scatter_w = np.zeros([self.data.n_features, self.data.n_features]) self.data.scatter_b = np.zeros([self.data.n_features, self.data.n_features]) for c in self.data.classes: # handbook of statistics (Cheriet 2013) idx = self.data.y_train == c data = self.data.X_train[idx] # add data to each class: self.data.class_props['n'].append(len(data)) self.data.class_props['prior'].append(data.shape[0] / self.data.n_samples) self.data.class_props['mu'].append(np.mean(data, axis=0, keepdims=True)) self.data.class_props['sigma'].append(np.std(data, axis=0, ddof=1, keepdims=True)) self.data.class_props['scatter'].append(TYPE[type](data)) S = self.data.class_props['scatter'][-1] n = self.data.class_props['n'][-1] self.data.class_props['covariance'].append(S / (n - 1)) self.data.scatter_w += S d_mu = self.data.class_props['mu'][-1] - self.data.mu self.data.scatter_b += n * d_mu.T @ d_mu self.data.common_covariance = (self.data.scatter_b / (self.data.n_samples -self.data.n_classes)) sw_sb = np.linalg.inv(self.data.scatter_w) @ self.data.scatter_b return sw_sb except: raise NameError(f'{type} is an invalid parameter for type.') # internal getters def get_eigs(self): """ Solves the eigenvalue problem of the quadratic covariance or correlation matrix respectively. The order of evecs and evals is sorted by the magnitude of the evals from highest to lowest. Returns ------- None. """ if isinstance(self.data.evals, np.ndarray): return D = self.get_dependencies(self.data.X_train, 'scatter') if self.params.build.solver == 'eigs': evals, evecs = np.linalg.eig(D) # complex eigenvalues # this ensures the evals to be real valued: evals = np.abs(evals) idx = np.flip(np.argsort(evals)) evals = evals[idx] evecs = evecs[:, idx].real elif self.params.build.solver == 'svd': svd = SVD(D) U, Sigma, Vh = svd() evals, evecs = np.diagonal(Sigma), Vh.T self.data.score_variance = evals**2 self.data.evals, self.data.evecs = np.array(evals, ndmin=2), evecs self.get_overall_explained_variance() def get_pca(self, n_components=None): """ Gets a PCA for the pre-processing of the LDA's data. This is necessary, if the number of features is bigger than the number of samples. Parameters ---------- n_components : int, optional The number of components for the PCA. The default is None. Returns ------- None. """ if n_components is None: n_components = int(self.n_samples * 0.1) # 10 % of sample data self.pca = PCA(self.X, self.y, n_components=self.params.build.n_components, normalization=self.params.build.normalization, solver=self.params.build.solver) self.pca.get_eigs() def decision_rule(self, score, components=None): loadings = self.data.loadings[:self.params.build.n_components] common_covariance = loadings @ self.data.common_covariance @ loadings.T common_covariance_ = np.linalg.inv(common_covariance) if not isinstance(components, type(None)): common_covariance_ = common_covariance_[components][:,components] # actual decision rule delta = np.zeros([len(score), self.data.n_classes]) for i, c in enumerate(self.data.classes): mu = (loadings @ self.data.class_props["mu"][i].T)[components] delta[:, i] = (np.log10(self.data.class_props["prior"][i]) - 1/2 * mu.T @ common_covariance_ @ mu + score @ common_covariance_ @ mu).flatten() return delta def predict(self, X_test, components=None): scores = self.get_scores(X_test) delta = self.decision_rule(scores, components, transformed=True) idx = np.argmax(delta, axis=1) y_pred = self.data.classes[idx] return y_pred
PypiClean
/Incantation-0.4.1.1-py3-none-any.whl/incantation/Component/Cards.py
from ..abst import traits_class, Attribute, Tag, ITraitsAttribute, ITraitsTag from ..utils import default_initializer, doc_printer from .Buttons import Horizontal if False: import incantation as inc class CardModule: @staticmethod @doc_printer def help(self): """ >>> import incantation as inc >>> assert str(inc.C()) == str(inc.Tag('div')) >>> card = inc.C(inc.IsCard(), >>> >>> inc.Color('blue-grey').darken, >>> >>> inc.CardContent( >>> inc.CardTitle('Card Title'), >>> inc.Paragraph("I am a very simple card."), >>> text_color='white'), >>> >>> inc.CardAction(inc.Tag('a', inc.Attribute('href', '#!'), 'this is a link'), >>> inc.Tag('a', inc.Attribute('href', '#!'), 'this is another link'))) >>> print(card) <div class="card blue-grey"> <div class="card-content white-text"> <span class="card-title"> Card Title </span> <p> I am a very simple card. </p> </div> <div class="card-action"> <a href="#!"> this is a link </a> <a href="#!"> this is another link </a> </div> </div> """ @traits_class('class', 'card', inherit_from=Attribute) class IsCard(ITraitsAttribute): pass @traits_class('span', Attribute('class', 'card-title'), inherit_from=Tag) class CardTitle(ITraitsTag): pass @traits_class('div', Attribute('class', 'card-action'), inherit_from=Tag) class CardAction(ITraitsTag): pass class CardImg(Tag): @default_initializer def __init__(self, *components: 'requires an image and a str'): Tag.__init__(self, 'div', Attribute('class', 'card-image'), *components) @doc_printer def help(self): """ >>>with open('xxx.html', 'w') as file: >>> inc.C(inc.IsCard(), >>> inc.Color('blue-grey').lighten, >>> inc.CardImage(img=inc.Img(src='xxx.png')), >>> inc.CardContent(inc.CardTitle('Title'), >>> inc.Paragraph('some text here'), >>> text_color='whilte')).set_indent(0) >> file.write """ @traits_class('div', Attribute('class', 'card-image'), inherit_from=Tag) class CardImage(ITraitsTag): pass class CardContent(Tag): @default_initializer def __init__(self, *components: 'requres a TextColor'): Tag.__init__(self, 'div', Attribute('class', 'card-content'), *components)
PypiClean
/FLaP-0.6.0.tar.gz/FLaP-0.6.0/flap/latex/macros/graphics.py
# # This file is part of Flap. # # Flap is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Flap is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Flap. If not, see <http://www.gnu.org/licenses/>. # from flap.latex.macros.commons import Macro, UpdateLink, Environment class GraphicsPath(Macro): """ Intercept the `\graphicspath` directive """ def __init__(self, flap): super().__init__(flap, r"\graphicspath", None, None) def _capture_arguments(self, parser, invocation): invocation.append_argument( "paths", parser.capture_group()) def _execute(self, parser, invocation): argument = parser.evaluate_as_text(invocation.argument("paths")) paths = list(map(str.strip, argument.split(","))) self._flap.record_graphic_path(paths, invocation) return invocation.as_tokens class IncludeGraphics(UpdateLink): """ Intercept the `\includegraphics` directive """ def __init__(self, flap): super().__init__(flap, r"\includegraphics") def update_link(self, parser, link, invocation): return self._flap.update_link(link, invocation) class IncludeSVG(UpdateLink): """ Intercept the r`\includegraphics` directive """ def __init__(self, flap): super().__init__(flap, r"\includesvg") def update_link(self, parser, link, invocation): return self._flap.update_link(link, invocation) class Overpic(Environment): """ Intercept the \begin{overpic} environment """ def __init__(self, flap): super().__init__(flap, "overpic") def execute(self, parser, invocation): invocation.append_argument("options", parser.capture_options()) invocation.append_argument("link", parser.capture_one()) link = parser.evaluate_as_text(invocation.argument("link")) new_link = self._flap.update_link(link, invocation) return invocation.substitute("link", parser._create.as_list( "{" + new_link + "}")).as_tokens
PypiClean
/ESMValTool-2.9.0-py3-none-any.whl/esmvaltool/diag_scripts/thermodyn_diagtool/fluxogram.py
from matplotlib import pyplot as plt class Fluxogram(): """The diagram flux module. A class to draw and maintain all fluxes and storages from a model or some similiar kind of thing to be drawn as a sequence of storages and fluxes. """ def __init__(self, max_flux, max_storage, grid_size=20): """Initialize a fluxogram. must be called with. The arguments are: - max_flux: aximum flux of all fluxes; needed for scaling - max_storage: maximum storages of all storages; needed for scaling - grid_size:grid_size for drawing the fluxogram, determines how big everything is. Fluxes and storages scaled accordingly - storages: all the storages the fluxogram has (usually empy to begin with) - fluxes: all the fluxes the fluxogram has (usually empty to begin with). """ self.storages = [] self.fluxes = [] self.max_flux = max_flux self.max_storage = max_storage self.grid_size = grid_size def add_storage(self, name, amount, order, offset): """Add a storage to the storages of the fluxogram.""" self.storages.append( Storage(name, self.grid_size, len(self.storages), amount, order, offset)) def add_flux(self, name, from_storage, to_storage, amount): """Add a flux to the fluxes of the fluxogram.""" self.fluxes.append( Flux(name, self.grid_size, from_storage, to_storage, amount)) def update_all_storages(self, amounts): """Update the amount of all storages.""" for storage, amount in zip(self.storages, amounts): storage.update_storage(amount) def update_all_fluxes(self, amounts): """Update the amount of all fluxes.""" for flux, amount in zip(self.fluxes, amounts): flux.update_flux(amount) def update_everything(self, amounts_storages, amounts_fluxes): """Update all fluxes and storages.""" self.update_all_fluxes(amounts_fluxes) self.update_all_storages(amounts_storages) def draw(self, filen, listv): """Draw all fluxes and storages.""" fig = plt.figure() frame1 = plt.axes() fig.set_size_inches(18.5, 10.5) # find the smallest/largest offset_ so the fluxogram can be drawn big # enough largest_offset = 0 smallest_offset = 0 largest_order = 0 for storage in self.storages: if storage.offset > largest_offset: largest_offset = storage.offset if storage.offset < smallest_offset: smallest_offset = storage.offset if storage.order > largest_order: largest_order = storage.order # set y and x limits y_max = 0 y_min = (largest_order + 1) * 2 * self.grid_size * -1 x_max = (largest_offset + 2) * 2 * self.grid_size x_min = (smallest_offset - 1) * 2 * self.grid_size plt.axis([x_min, x_max, y_min, y_max]) frame1.axes.get_xaxis().set_visible(False) frame1.axes.get_yaxis().set_visible(False) # draw all fluxes dict_r = { 'AZ+': listv[0], 'ASE+': listv[2], 'ATE+': listv[4], 'A2KS': listv[6], 'A2KT': listv[7], 'KTE-': listv[8], 'KSE-': listv[10], 'KZ-': listv[12] } dict_oth = { 'l': listv[14], 'dn': listv[15], 'rdn': listv[16], 'ldn': listv[17], 'up': listv[18], 'lup': listv[19], 'rup': listv[20] } switcher = { 'l': self.leftarr_txt, 'dn': self.dnarr_txt, 'rdn': self.rdnarr_txt, 'ldn': self.ldnarr_txt, 'up': self.uparr_txt, 'lup': self.luparr_txt, 'rup': self.ruparr_txt } for flux in self.fluxes: idb = flux.name # scale the amount scaled_amount_flux = self.scaler(flux.amount, self.max_flux) # width multiplied because if not, the arrows are so tiny arrow = plt.Arrow(flux.x_start, flux.y_start, flux.d_x, flux.d_y, width=scaled_amount_flux * 1.7, alpha=0.8) if flux.dire == 'r': for key in dict_r: value = dict_r[key] if idb == key: plt.text(flux.x_start + 0.25 * self.grid_size, flux.y_start + 0.05 * self.grid_size, value, size=self.grid_size * 0.7) else: for key in dict_oth: value = dict_oth[key] if flux.dire == key: switcher[flux.dire](value, flux, plt) plt.gca().add_patch(arrow) # draw all storages for storage in self.storages: # scale the amount scaled_amount_stor = self.scaler(storage.amount, self.max_storage) if scaled_amount_stor == 0: scaled_amount_stor = 0.0001 # change_x and y, so the storages are centered to the middle # of their position and not to upper left x_p = ( storage.x_p + (1 - storage.amount / self.max_storage) * 1.3 * self.grid_size) y_p = ( storage.y_p - (1 - storage.amount / self.max_storage) * 1.3 * self.grid_size) rectangle = plt.Rectangle((x_p, y_p), scaled_amount_stor, -scaled_amount_stor, alpha=0.4) # label all storages plt.text(storage.x_p + 0.6 * self.grid_size, storage.y_p - 0.65 * self.grid_size, storage.name, fontsize=0.7 * self.grid_size) dict_s = { 'AZ': listv[1], 'ASE': listv[3], 'ATE': listv[5], 'KTE': listv[9], 'KSE': listv[11], 'KZ': listv[13] } for key in dict_s: value = dict_s[key] if storage.name == key: plt.text(storage.x_p + 0.6 * self.grid_size, storage.y_p - 0.85 * self.grid_size, value, fontsize=0.7 * self.grid_size) # draw a date plt.gca().add_patch(rectangle) plt.savefig(filen) plt.close(fig) def dnarr_txt(self, value, flux, pltt): """Write text on arrow pointing down.""" x_start = flux.x_start y_start = flux.y_start pltt.text(x_start - 0.2 * self.grid_size, y_start - 0.45 * self.grid_size, value, size=self.grid_size * 0.7, rotation=-90) def leftarr_txt(self, value, flux, pltt): """Write text on arrow pointing left.""" x_start = flux.x_start y_start = flux.y_start pltt.text(x_start - 1.35 * self.grid_size, y_start + 0.05 * self.grid_size, value, size=self.grid_size * 0.7) def ldnarr_txt(self, value, flux, pltt): """Write text on arrow pointing down-left.""" x_start = flux.x_start y_start = flux.y_start pltt.text(x_start - 0.35 * self.grid_size, y_start - 0.25 * self.grid_size, value, size=self.grid_size * 0.5, rotation=-110) def luparr_txt(self, value, flux, pltt): """Write text on arrow pointing up-left.""" x_start = flux.x_start y_start = flux.y_start pltt.text(x_start - 0.35 * self.grid_size, y_start + 0.45 * self.grid_size, value, size=self.grid_size * 0.5, rotation=110) def rdnarr_txt(self, value, flux, pltt): """Write text on arrow pointing down-right.""" x_start = flux.x_start y_start = flux.y_start pltt.text(x_start + 0.05 * self.grid_size, y_start - 0.25 * self.grid_size, value, size=self.grid_size * 0.5, rotation=-75) def ruparr_txt(self, value, flux, pltt): """Write text on arrow pointing up-right.""" x_start = flux.x_start y_start = flux.y_start pltt.text(x_start - 0.1 * self.grid_size, y_start + 0.45 * self.grid_size, value, size=self.grid_size * 0.5, rotation=75) def uparr_txt(self, value, flux, pltt): """Write text on arrow pointing up.""" x_start = flux.x_start y_start = flux.y_start pltt.text(x_start + 0.05 * self.grid_size, y_start + 0.75 * self.grid_size, value, size=self.grid_size * 0.7, rotation=90) def scaler(self, value_in, base_max): """Scale the values in the blocks of the diagram. Scale the fluxes and storages, so they don't overstep their grafical bounds must be called with: - valueIn: the value that needs rescaling - baseMax: the upper limit of the original dataset ~ 100 for fluxes, ~250 for stores (in my model). """ # baseMin: the lower limit of the original dataset (usually zero) base_min = 0 # limitMin: the lower limit of the rescaled dataset (usually zero) limit_min = 0 # limitMax: the upper limit of the rescaled dataset (in our case grid) limit_max = self.grid_size # prevents wrong use of scaler if value_in > base_max: raise ValueError("Input value larger than base max") return (((limit_max - limit_min) * (value_in - base_min) / (base_max - base_min)) + limit_min) class Flux: """Contain a flux of a fluxogram.""" def __init__(self, name, grid_size, from_storage, to_storage, amount=0): """Initialize a flux. Arguments are: - name: name of the flux - grid_size: grid size of the diagram - from_storage: storage the flux is originating from - to_storage: storage the flux is going into - amount: how much stuff fluxes. """ self.name = name self.from_storage = from_storage self.to_storage = to_storage self.amount = amount self.grid_size = grid_size (self.x_start, self.y_start, self.x_end, self.y_end, self.d_x, self.d_y, self.dire) = (self.calc_start_end_dx_dy()) def update_flux(self, amount): """Update the amount of the flux.""" self.amount = amount def calc_start_end_dx_dy(self): """Scale the arrows. Calculate the starting and ending point of an arrow depending on the order and offset of the starting and ending storages. This helps determine the direction of the arrow returns the start and end xy coordinates of the arrow as tuples. """ # arrow pointing to left up if (self.from_storage.offset > self.to_storage.offset and self.from_storage.order > self.to_storage.order): x_start = self.from_storage.x_p + 0.85 * self.grid_size y_start = self.from_storage.y_p - self.grid_size * 0.5 x_end = self.to_storage.x_p + self.grid_size * 0.65 y_end = self.to_storage.y_p - 0.7 * self.grid_size d_x = abs(x_start - x_end) * (-1) d_y = abs(y_start - y_end) dire = 'lup' # arrow pointing up elif (self.from_storage.offset == self.to_storage.offset and self.from_storage.order > self.to_storage.order): x_start = self.from_storage.x_p + 0.85 * self.grid_size y_start = self.from_storage.y_p - 0.5 * self.grid_size x_end = self.to_storage.x_p + 0.85 * self.grid_size y_end = self.to_storage.y_p - 0.25 * self.grid_size d_x = abs(x_start - x_end) d_y = abs(y_start - y_end) dire = 'up' # arrow pointing right up elif (self.from_storage.offset < self.to_storage.offset and self.from_storage.order > self.to_storage.order): x_start = (self.from_storage.x_p + self.grid_size) y_start = self.from_storage.y_p - 0.5 * self.grid_size x_end = self.to_storage.x_p + 0.05 * self.grid_size y_end = self.to_storage.y_p - 0.75 * self.grid_size d_x = abs(x_start - x_end) d_y = abs(y_start - y_end) dire = 'rup' # arrow pointing right elif (self.from_storage.offset < self.to_storage.offset and self.from_storage.order == self.to_storage.order): x_start = (self.from_storage.x_p + self.grid_size) y_start = self.from_storage.y_p - 0.8 * self.grid_size x_end = self.to_storage.x_p + 1.25 * self.grid_size y_end = self.to_storage.y_p - 0.8 * self.grid_size d_x = abs(x_start - x_end) d_y = abs(y_start - y_end) dire = 'r' # arrow pointing right down elif (self.from_storage.offset < self.to_storage.offset and self.from_storage.order < self.to_storage.order): x_start = (self.from_storage.x_p + 0.85 * self.grid_size) y_start = self.from_storage.y_p - 1.12 * self.grid_size x_end = self.to_storage.x_p + 0.85 * self.grid_size y_end = self.to_storage.y_p - 0.9 * self.grid_size d_x = abs(x_start - x_end) d_y = abs(y_start - y_end) * (-1) dire = 'rdn' # arrow pointing down elif (self.from_storage.offset == self.to_storage.offset and self.from_storage.order < self.to_storage.order): x_start = self.from_storage.x_p + 0.8 * self.grid_size y_start = (self.from_storage.y_p - 1.12 * self.grid_size) x_end = self.to_storage.x_p + 0.8 * self.grid_size y_end = self.to_storage.y_p - 1.4 * self.grid_size d_x = abs(x_start - x_end) d_y = abs(y_start - y_end) * (-1) dire = 'dn' # arrow pointing left down elif (self.from_storage.offset > self.to_storage.offset and self.from_storage.order < self.to_storage.order): x_start = self.from_storage.x_p + 0.75 * self.grid_size y_start = (self.from_storage.y_p - 1.1 * self.grid_size) x_end = self.to_storage.x_p + 0.6 * self.grid_size y_end = self.to_storage.y_p - 0.9 * self.grid_size d_x = abs(x_start - x_end) * (-1) d_y = abs(y_start - y_end) * (-1) dire = 'ldn' # arrow pointing left elif (self.from_storage.offset > self.to_storage.offset and self.from_storage.order == self.to_storage.order): x_start = self.from_storage.x_p + 0.5 * self.grid_size y_start = self.from_storage.y_p - 0.75 * self.grid_size x_end = self.to_storage.x_p + 0.25 * self.grid_size y_end = self.to_storage.y_p - 0.75 * self.grid_size d_x = abs(x_start - x_end) * (-1) d_y = abs(y_start - y_end) dire = 'l' # multiply by 0.9 so there is a gap between storages and arrows d_x = d_x * 0.75 d_y = d_y * 0.75 return x_start, y_start, x_end, y_end, d_x, d_y, dire class Storage: """Contain a storage of a fluxogram.""" def __init__(self, name, grid_size, number, amount=0, order=0, offset=0): """Initialize a storage. Arguments are: - name: name of the storage - number: consecutive number - grid_size of the diagram - amount: how much stuff is in it - order: how much down it is in the hierachie (starts with 0) - offset = how much the storage is offset to the left/right in relationship to the center. """ self.name = name self.amount = amount self.number = number self.order = order self.offset = offset self.grid_size = grid_size self.x_p, self.y_p = self.calculate_xy() def update_storage(self, amount): """Update the amount of the storage.""" self.amount = amount def calculate_xy(self): """Provide coordinates of the blocks in the diagram. Calculate the xy coordinates of the starting point from where the rectangle is drawn. The additional multiplication by two is to produce the gaps in the diagram. """ x_p = self.offset * self.grid_size * 2 # multiply by -1 to draw the diagram from top to bottom y_p = self.order * self.grid_size * 2 * -1 return x_p, y_p
PypiClean
/GO-3-data-model-1.0.2.tar.gz/GO-3-data-model-1.0.2/datamodel/input/staticinner.py
import logging from pydantic import root_validator, validator from datamodel.input.staticinnerbase import * class BusInitialStatus(BusInitialStatusBase): pass class ShuntInitialStatus(ShuntInitialStatusBase): pass class DispatchableDevices_SimpleProducingConsumingDevicesInitialStatus(DispatchableDevices_SimpleProducingConsumingDevicesInitialStatusBase): @validator("accu_down_time") def shutdown_accu_down_time_ge_0(cls, data): if data < 0: msg = "fails accu_down_time >= 0. accu_down_time: {}".format(data) raise ValueError(msg) return data @validator("accu_up_time") def shutup_accu_up_time_ge_0(cls, data): if data < 0: msg = "fails accu_up_time >= 0. accu_up_time: {}".format(data) raise ValueError(msg) return data @root_validator def accu_up_or_down_time_eq_0(cls, data): up = data.get("accu_up_time") down = data.get("accu_down_time") if (up is not None) and (down is not None) and not ((up <= 0.0) or (down <= 0.0)): msg = "fails (accu_up_time <= 0.0) or (accu_down_time <= 0.0). accu_up_time: {}, accu_down_time: {}".format( up, down) raise ValueError(msg) return data @root_validator def if_prior_on_then_accu_up_gt_0(cls, data): on = data.get("on_status") up = data.get("accu_up_time") if (on is not None) and (up is not None) and (on > 0) and (up <= 0.0): msg = "fails (on_status > 0 implies accu_up_time > 0.0). on_status: {}, accu_up_time: {}".format( on, up) raise ValueError(msg) return data @root_validator def if_prior_off_then_accu_down_gt_0(cls, data): on = data.get("on_status") down = data.get("accu_down_time") if (on is not None) and (down is not None) and (on < 1) and (down <= 0.0): msg = "fails (on_status < 1 implies accu_down_time > 0.0). on_status: {}, accu_down_time: {}".format( on, down) raise ValueError(msg) return data # may not want these two, due to startup trajectories # @root_validator # def if_prior_on_then_p_eq_0(cls, data): # on = data.get("on_status") # p = data.get("p") # if (on is not None) and (p is not None) and (on <= 0) and (p != 0.0): # msg = "fails (on_status == 0 implies p == 0.0). on_status: {}, p: {}".format( # on, p) # raise ValueError(msg) # return data # @root_validator # def if_prior_on_then_q_eq_0(cls, data): # on = data.get("on_status") # q = data.get("q") # if (on is not None) and (q is not None) and (on <= 0) and (q != 0.0): # msg = "fails (on_status == 0 implies q == 0.0). on_status: {}, q: {}".format( # on, q) # raise ValueError(msg) # return data # if on0 > 0.0 then accu_up > 0.0 # if on0 < 1.0 then accu_down > 0.0 class ACTransmissionLineInitialStatus(ACTransmissionLineInitialStatusBase): pass class TwoWindingTransformerInitialStatus(TwoWindingTransformerInitialStatusBase): pass class DCLineInitialStatus(DCLineInitialStatusBase): pass
PypiClean
/NSFWDetection-1.0.2.tar.gz/NSFWDetection-1.0.2/nsfw_detector/model.py
import os import pydload import numpy as np import onnxruntime from .image_utils import load_images # sigmoid function def sig(x): return 1/(1 + np.exp(-x)) class Model: """ Class for loading model and running predictions. For example on how to use take a look the if __name__ == '__main__' part. """ nsfw_model = None def __init__(self): """ model = Classifier() """ url = "https://github.com/gsarridis/NSFW-Detection-Pytorch/releases/download/pretrained_models_v2/2022_06_20_11_01_42.onnx" home = os.path.expanduser("~") model_folder = os.path.join(home, ".NSFWModel/") if not os.path.exists(model_folder): os.mkdir(model_folder) model_path = os.path.join(model_folder, os.path.basename(url)) if not os.path.exists(model_path): print("Downloading the checkpoint to", model_path) pydload.dload(url, save_to_path=model_path, max_time=None) self.nsfw_model = onnxruntime.InferenceSession(model_path) def predict( self, image_paths=[], batch_size=4, image_size=(240, 240) ): """ inputs: image_paths: list of image paths or can be a string too (for single image) batch_size: batch_size for running predictions image_size: size to which the image needs to be resized categories: since the model predicts numbers, categories is the list of actual names of categories """ if not isinstance(image_paths, list): image_paths = [image_paths] loaded_images, loaded_image_paths = load_images( image_paths, image_size, image_names=image_paths ) if not loaded_image_paths: return {} preds = [] model_preds = [] sigmoid_v = np.vectorize(sig) while len(loaded_images): _model_preds = self.nsfw_model.run( [self.nsfw_model.get_outputs()[0].name], {self.nsfw_model.get_inputs()[0].name: loaded_images[:batch_size]}, )[0] _model_preds = sigmoid_v(_model_preds) model_preds = [*model_preds, *(np.transpose(_model_preds).tolist()[0])] t_preds = np.rint(_model_preds) t_preds = np.transpose(t_preds).astype(int).tolist()[0] preds = [*preds, *t_preds] loaded_images = loaded_images[batch_size:] images_preds = {} for i, loaded_image_path in enumerate(loaded_image_paths): if not isinstance(loaded_image_path, str): loaded_image_path = i images_preds[loaded_image_path] = {} if preds[i]> 0.5: images_preds[loaded_image_path] = { 'Label': 'NSFW', 'Score': model_preds[i]} else: images_preds[loaded_image_path] = { 'Label': 'SFW', 'Score': model_preds[i]} return images_preds if __name__ == "__main__": m = Model() while 1: print( "\n Enter single image path or multiple images seperated by ; \n" ) images = input().split(";") images = [image.strip() for image in images] print(m.predict(images), "\n")
PypiClean
/Lifetimes-0.11.3-py3-none-any.whl/lifetimes/generate_data.py
import warnings warnings.simplefilter(action="ignore", category=FutureWarning) import numpy as np from numpy import random import pandas as pd def beta_geometric_nbd_model(T, r, alpha, a, b, size=1): """ Generate artificial data according to the BG/NBD model. See [1] for model details Parameters ---------- T: array_like The length of time observing new customers. r, alpha, a, b: float Parameters in the model. See [1]_ size: int, optional The number of customers to generate Returns ------- DataFrame With index as customer_ids and the following columns: 'frequency', 'recency', 'T', 'lambda', 'p', 'alive', 'customer_id' References ---------- .. [1]: '"Counting Your Customers" the Easy Way: An Alternative to the Pareto/NBD Model' (http://brucehardie.com/papers/bgnbd_2004-04-20.pdf) """ if type(T) in [float, int]: T = T * np.ones(size) else: T = np.asarray(T) probability_of_post_purchase_death = random.beta(a, b, size=size) lambda_ = random.gamma(r, scale=1.0 / alpha, size=size) columns = ["frequency", "recency", "T", "lambda", "p", "alive", "customer_id"] df = pd.DataFrame(np.zeros((size, len(columns))), columns=columns) for i in range(size): p = probability_of_post_purchase_death[i] l = lambda_[i] # hacky until I can find something better times = [] next_purchase_in = random.exponential(scale=1.0 / l) alive = True while (np.sum(times) + next_purchase_in < T[i]) and alive: times.append(next_purchase_in) next_purchase_in = random.exponential(scale=1.0 / l) alive = random.random() > p times = np.array(times).cumsum() df.iloc[i] = ( np.unique(np.array(times).astype(int)).shape[0], np.max(times if times.shape[0] > 0 else 0), T[i], l, p, alive, i, ) return df.set_index("customer_id") def beta_geometric_nbd_model_transactional_data(T, r, alpha, a, b, observation_period_end="2019-1-1", freq="D", size=1): """ Generate artificial transactional data according to the BG/NBD model. See [1] for model details Parameters ---------- T: int, float or array_like The length of time observing new customers. r, alpha, a, b: float Parameters in the model. See [1]_ observation_period_end: date_like The date observation ends freq: string, optional Default 'D' for days, 'W' for weeks, 'h' for hours size: int, optional The number of customers to generate Returns ------- DataFrame The following columns: 'customer_id', 'date' References ---------- .. [1]: '"Counting Your Customers" the Easy Way: An Alternative to the Pareto/NBD Model' (http://brucehardie.com/papers/bgnbd_2004-04-20.pdf) """ observation_period_end = pd.to_datetime(observation_period_end) if type(T) in [float, int]: start_date = [observation_period_end - pd.Timedelta(T - 1, unit=freq)] * size T = T * np.ones(size) else: start_date = [observation_period_end - pd.Timedelta(T[i] - 1, unit=freq) for i in range(size)] T = np.asarray(T) probability_of_post_purchase_death = random.beta(a, b, size=size) lambda_ = random.gamma(r, scale=1.0 / alpha, size=size) columns = ["customer_id", "date"] df = pd.DataFrame(columns=columns) for i in range(size): s = start_date[i] p = probability_of_post_purchase_death[i] l = lambda_[i] age = T[i] purchases = [[i, s - pd.Timedelta(1, unit=freq)]] next_purchase_in = random.exponential(scale=1.0 / l) alive = True while next_purchase_in < age and alive: purchases.append([i, s + pd.Timedelta(next_purchase_in, unit=freq)]) next_purchase_in += random.exponential(scale=1.0 / l) alive = random.random() > p df = df.append(pd.DataFrame(purchases, columns=columns)) return df.reset_index(drop=True) def pareto_nbd_model(T, r, alpha, s, beta, size=1): """ Generate artificial data according to the Pareto/NBD model. See [2]_ for model details. Parameters ---------- T: array_like The length of time observing new customers. r, alpha, s, beta: float Parameters in the model. See [1]_ size: int, optional The number of customers to generate Returns ------- :obj: DataFrame with index as customer_ids and the following columns: 'frequency', 'recency', 'T', 'lambda', 'mu', 'alive', 'customer_id' References ---------- .. [2]: Fader, Peter S. and Bruce G. S. Hardie (2005), "A Note on Deriving the Pareto/NBD Model and Related Expressions," <http://brucehardie.com/notes/009/>. """ if type(T) in [float, int]: T = T * np.ones(size) else: T = np.asarray(T) lambda_ = random.gamma(r, scale=1.0 / alpha, size=size) mus = random.gamma(s, scale=1.0 / beta, size=size) columns = ["frequency", "recency", "T", "lambda", "mu", "alive", "customer_id"] df = pd.DataFrame(np.zeros((size, len(columns))), columns=columns) for i in range(size): l = lambda_[i] mu = mus[i] time_of_death = random.exponential(scale=1.0 / mu) # hacky until I can find something better times = [] next_purchase_in = random.exponential(scale=1.0 / l) while np.sum(times) + next_purchase_in < min(time_of_death, T[i]): times.append(next_purchase_in) next_purchase_in = random.exponential(scale=1.0 / l) times = np.array(times).cumsum() df.iloc[i] = ( np.unique(np.array(times).astype(int)).shape[0], np.max(times if times.shape[0] > 0 else 0), T[i], l, mu, time_of_death > T[i], i, ) return df.set_index("customer_id") def modified_beta_geometric_nbd_model(T, r, alpha, a, b, size=1): """ Generate artificial data according to the MBG/NBD model. See [3]_, [4]_ for model details Parameters ---------- T: array_like The length of time observing new customers. r, alpha, a, b: float Parameters in the model. See [1]_ size: int, optional The number of customers to generate Returns ------- DataFrame with index as customer_ids and the following columns: 'frequency', 'recency', 'T', 'lambda', 'p', 'alive', 'customer_id' References ---------- .. [1]: '"Counting Your Customers" the Easy Way: An Alternative to the Pareto/NBD Model' (http://brucehardie.com/papers/bgnbd_2004-04-20.pdf) .. [2] Batislam, E.P., M. Denizel, A. Filiztekin (2007), "Empirical validation and comparison of models for customer base analysis," International Journal of Research in Marketing, 24 (3), 201-209. """ if type(T) in [float, int]: T = T * np.ones(size) else: T = np.asarray(T) probability_of_post_purchase_death = random.beta(a, b, size=size) lambda_ = random.gamma(r, scale=1.0 / alpha, size=size) columns = ["frequency", "recency", "T", "lambda", "p", "alive", "customer_id"] df = pd.DataFrame(np.zeros((size, len(columns))), columns=columns) for i in range(size): p = probability_of_post_purchase_death[i] l = lambda_[i] # hacky until I can find something better times = [] next_purchase_in = random.exponential(scale=1.0 / l) alive = random.random() > p # essentially the difference between this model and BG/NBD while (np.sum(times) + next_purchase_in < T[i]) and alive: times.append(next_purchase_in) next_purchase_in = random.exponential(scale=1.0 / l) alive = random.random() > p times = np.array(times).cumsum() df.iloc[i] = ( np.unique(np.array(times).astype(int)).shape[0], np.max(times if times.shape[0] > 0 else 0), T[i], l, p, alive, i, ) return df.set_index("customer_id") def beta_geometric_beta_binom_model(N, alpha, beta, gamma, delta, size=1): """ Generate artificial data according to the Beta-Geometric/Beta-Binomial Model. You may wonder why we can have frequency = n_periods, when frequency excludes their first order. When a customer purchases something, they are born, _and in the next period_ we start asking questions about their alive-ness. So really they customer has bought frequency + 1, and been observed for n_periods + 1 Parameters ---------- N: array_like Number of transaction opportunities for new customers. alpha, beta, gamma, delta: float Parameters in the model. See [1]_ size: int, optional The number of customers to generate Returns ------- DataFrame with index as customer_ids and the following columns: 'frequency', 'recency', 'n_periods', 'lambda', 'p', 'alive', 'customer_id' References ---------- .. [1] Fader, Peter S., Bruce G.S. Hardie, and Jen Shang (2010), "Customer-Base Analysis in a Discrete-Time Noncontractual Setting," Marketing Science, 29 (6), 1086-1108. """ if type(N) in [float, int, np.int64]: N = N * np.ones(size) else: N = np.asarray(N) probability_of_post_purchase_death = random.beta(a=alpha, b=beta, size=size) thetas = random.beta(a=gamma, b=delta, size=size) columns = ["frequency", "recency", "n_periods", "p", "theta", "alive", "customer_id"] df = pd.DataFrame(np.zeros((size, len(columns))), columns=columns) for i in range(size): p = probability_of_post_purchase_death[i] theta = thetas[i] # hacky until I can find something better current_t = 0 alive = True times = [] while current_t < N[i] and alive: alive = random.binomial(1, theta) == 0 if alive and random.binomial(1, p) == 1: times.append(current_t) current_t += 1 # adding in final death opportunity to agree with [1] if alive: alive = random.binomial(1, theta) == 0 df.iloc[i] = len(times), times[-1] + 1 if len(times) != 0 else 0, N[i], p, theta, alive, i return df
PypiClean
/NREL-erad-0.0.0a0.tar.gz/NREL-erad-0.0.0a0/erad/utils/ditto_utils.py
from pathlib import Path import shutil import logging from typing import List # third-party libraries import boto3 from botocore import UNSIGNED from botocore.config import Config from ditto.store import Store from ditto.readers.opendss.read import Reader from ditto.network.network import Network from ditto.models.power_source import PowerSource import networkx as nx from networkx.readwrite import json_graph # internal libraries from erad.constants import SMARTDS_VALID_AREAS, SMARTDS_VALID_YEARS from erad.exceptions import SMARTDSInvalidInput, DittoException from erad.utils.util import timeit, write_file, path_validation from erad.utils.util import read_file, setup_logging logger = logging.getLogger(__name__) @timeit def download_aws_dir( bucket: str, path: str, target: str, unsigned=True, **kwargs ) -> None: """Utility function download data from AWS S3 directory. Args: bucket (str): Name of the bucket. path (str): S3 bucket prefix target (str): Path for downloading the data unsigned (bool): Indicate whether to use credential or not kwargs (dict): Keyword arguments accepted by `boto3.client` """ target = Path(target) if unsigned: client = boto3.client("s3", config=Config(signature_version=UNSIGNED)) else: if kwargs: client = boto3.client("s3", **kwargs) else: client = boto3.client("s3") # Handle missing / at end of prefix if not path.endswith("/"): path += "/" paginator = client.get_paginator("list_objects_v2") for result in paginator.paginate(Bucket=bucket, Prefix=path): # Download each file individually for key in result["Contents"]: # Calculate relative path rel_path = key["Key"][len(path) :] # Skip paths ending in / if not key["Key"].endswith("/"): local_file_path = target / rel_path local_file_path.parent.mkdir(parents=True, exist_ok=True) client.download_file(bucket, key["Key"], str(local_file_path)) @timeit def download_smartds_data( smartds_region: str, output_path: str = "./smart_ds_downloads", year: int = 2018, area: str = "SFO", s3_bucket_name: str = "oedi-data-lake", folder_name: str = "opendss_no_loadshapes", cache_folder: str = "cache", ) -> str: """Utility function to download SMARTDS data from AWS S3 bucket. Args: smartds_region (str): SMARTDS region name output_path (str): Path for downloaded data year (int): Valid year input for downloading the data area (str): Valid SMARTDS area s3_bucket_name (str): S3 bucket name storing the SMARTDS data folder_name (str): S3 bucket folder to download cache_folder (str): Folder path for caching the results Raises: SMARTDSInvalidInput: Raises this error if year and/or area provided is not valid. Returns: str: Folder path containing downloaded data. """ if year not in SMARTDS_VALID_YEARS or area not in SMARTDS_VALID_AREAS: raise SMARTDSInvalidInput( f"Not valid input! year= {year} area={area}, \ valid_years={SMARTDS_VALID_YEARS}, valid_areas={SMARTDS_VALID_AREAS}" ) output_path = Path(output_path) cache_folder = Path(cache_folder) output_path.mkdir(exist_ok=True) cache_folder.mkdir(exist_ok=True) cache_key = ( f"{smartds_region}__{year}__{area}__{s3_bucket_name}_{folder_name}" ) cache_data_folder = cache_folder / cache_key output_folder = output_path / cache_key if cache_data_folder.exists(): logger.info(f"Cache hit for {cache_data_folder}") shutil.copytree(cache_data_folder, output_folder, dirs_exist_ok=True) else: logger.info( f"Cache missed reaching to AWS for downloading the data ..." ) output_folder.mkdir(exist_ok=True) prefix = f"SMART-DS/v1.0/{year}/{area}/{smartds_region}/scenarios/base_timeseries/{folder_name}/" download_aws_dir(s3_bucket_name, prefix, output_folder) shutil.copytree(output_folder, cache_data_folder, dirs_exist_ok=False) logger.info(f"Check the folder {output_folder} for downloaded data") return output_folder @timeit def _create_networkx_from_ditto( output_path: str, file_name: str, **kwargs ) -> List: """Creates networkx graph from OpenDSS model using Ditto. Args: output_path (str): Path to store the networkx data in json file format file_name (str): JSON file name used to export the network kwargs (dict): Keyword arguments accepted by Ditto Raises: DittoException: Raises if multiple sources are found. Returns: List: Pair of networkx graph and path containing JSON file """ file_name = Path(file_name).stem logger.debug( "Attempting to create NetworkX representation from OpenDSS \ files using DiTTo" ) path_validation(output_path) store = Store() reader = Reader( master_file=kwargs["master_file"], buscoordinates_file=kwargs["buscoordinates_file"], coordinates_delimiter=kwargs["coordinates_delimiter"], ) reader.parse(store) all_sources = [] for i in store.models: if isinstance(i, PowerSource) and i.connecting_element is not None: all_sources.append(i) elif isinstance(i, PowerSource): print( "Warning - a PowerSource element has a None connecting element" ) if len(all_sources) > 1: raise DittoException( f"This feeder has lots of sources {len(all_sources)}" ) ditto_graph = Network() ditto_graph.build(store, all_sources[0].connecting_element) ditto_graph.set_attributes(store) data = dict(ditto_graph.graph.nodes.data()) data_new = {} for node, node_data in data.items(): try: data_new[node] = node_data["positions"][0]._trait_values except Exception as e: connecting_node = node_data["connecting_element"] data_new[node] = data[connecting_node]["positions"][0]._trait_values adj_file = file_name + ".adjlist" nx.write_adjlist(ditto_graph.graph, output_path / adj_file) g = nx.read_adjlist(output_path / adj_file) nx.set_node_attributes(g, data_new) data = json_graph.adjacency_data(g) json_file = file_name + ".json" output_file = output_path / json_file write_file(data, output_file) logger.debug( f"Successfully created json file representing the network \ check the file {output_file}" ) return (g, output_file) def create_networkx_from_ditto( output_path: str, file_name: str, **kwargs ) -> None: """Creates networkx graph from OpenDSS model using Ditto. Args: output_path (str): Path to store the networkx data in json file format file_name (str): JSON file name used to export the network kwargs (dict): Keyword arguments accepted by Ditto """ try: output_path = Path(output_path) return _create_networkx_from_ditto(output_path, file_name, **kwargs) finally: for file_path in output_path.iterdir(): if file_path.suffix == ".adjlist": file_path.unlink(missing_ok=True) def create_networkx_from_json(json_file_path: str): """Returns networkx graph from JSON file.""" content = read_file(json_file_path) return json_graph.adjacency_graph(content)
PypiClean
/AyiinXd-0.0.8-cp311-cp311-macosx_10_9_universal2.whl/fipper/node_modules/wrappy/README.md
# wrappy Callback wrapping utility ## USAGE ```javascript var wrappy = require("wrappy") // var wrapper = wrappy(wrapperFunction) // make sure a cb is called only once // See also: http://npm.im/once for this specific use case var once = wrappy(function (cb) { var called = false return function () { if (called) return called = true return cb.apply(this, arguments) } }) function printBoo () { console.log('boo') } // has some rando property printBoo.iAmBooPrinter = true var onlyPrintOnce = once(printBoo) onlyPrintOnce() // prints 'boo' onlyPrintOnce() // does nothing // random property is retained! assert.equal(onlyPrintOnce.iAmBooPrinter, true) ```
PypiClean
/Cibyl-1.0.0.0rc1.tar.gz/Cibyl-1.0.0.0rc1/tripleo/insights/io.py
from dataclasses import dataclass, field from typing import Optional from tripleo.insights.defaults import (DEFAULT_ENVIRONMENT_FILE, DEFAULT_FEATURESET_FILE, DEFAULT_NODES_FILE, DEFAULT_QUICKSTART, DEFAULT_RELEASE_FILE, DEFAULT_THT) from tripleo.insights.topology import Topology from tripleo.utils.urls import URL @dataclass class DeploymentOutline: """Defines the input data required to form the deployment summary. """ quickstart: URL = DEFAULT_QUICKSTART """URL of TripleO QuickStart repository.""" heat: URL = DEFAULT_THT """URL of Triple Heat Templates repository.""" environment: str = DEFAULT_ENVIRONMENT_FILE """Path to environment file relative to the repository's root.""" featureset: str = DEFAULT_FEATURESET_FILE """Path to featureset file relative to the repository's root.""" nodes: str = DEFAULT_NODES_FILE """Path to nodes file relative to the repository's root.""" release: str = DEFAULT_RELEASE_FILE """Path to release file relative to the repository's root.""" overrides: dict = field(default_factory=lambda: {}) """Defines the collection of deployment items that will override those coming from the deployment's files. The dictionary is meant to have the format: YAML item -> New value. On any case, the items on the dictionary must follow the same item naming and value types as the original file. This can be used as a way of altering the deployment without the need of modifying the files. For example, this dictionary: {'overcloud_ipv6' : True } will force the featureset to use IPv6. """ @dataclass class DeploymentSummary: """Defines the deployment that TripleO will perform based on the outline provided as input. Every field left as 'None' indicates that no information related to it could be found. Interpret it as missing content. """ @dataclass class Components: """Holds information on each of the deployed components. """ @dataclass class Cinder: """Information on the Cinder component. """ backend: Optional[str] = None """Name of the backend supporting cinder.""" @dataclass class Neutron: """Information on the Neutron component. """ ip_version: Optional[str] = None """TCP/IP protocol in use.""" backend: Optional[str] = None """Name of the backend supporting neutron.""" ml2_driver: Optional[str] = None """Comma delimited list with the name of the mechanism drivers.""" tls_everywhere: Optional[str] = None """State (On / Off) of TLS-Everywhere.""" cinder: Cinder = field( default_factory=lambda *_: DeploymentSummary.Components.Cinder() ) """Section for the Cinder component.""" neutron: Neutron = field( default_factory=lambda *_: DeploymentSummary.Components.Neutron() ) """Section for the Neutron component.""" release: Optional[str] = None """Name of the OpenStack release deployed.""" infra_type: Optional[str] = None """Infrastructure type of the cloud.""" topology: Optional[Topology] = None """Definition of the deployed network.""" components: Components = field( default_factory=lambda *_: DeploymentSummary.Components() ) """Section dedicated to each of the components that form the deployment."""
PypiClean
/MachaTheKing-99.99.99-py3-none-any.whl/secret_santa/conversation.py
import turtle import time import christmas def santa_n_Macha(): screen= turtle.Screen() turtle.title('Santa vs Macha') screen.setup(width=1.0, height=1.0) screen.bgcolor('black') screen.tracer(0) tt = turtle.Turtle() info_turtle = turtle.Turtle() info_turtle.color('red') info_turtle.hideturtle() info_turtle.speed(0) info_turtle.penup() info_turtle.goto(-300,0) info_turtle.pendown() info_turtle.write('Next vache script yevarini vudesinchi kadhu just for FUN , Please fun ghane tesukondiiiiiii , A HUMBLE REQUEST') time.sleep(5) info_turtle.clear() my_col = ['red','purple','blue','green','orange','yellow'] tt.color('blue') tt.hideturtle() tt.speed(10) tt.penup() #Macha tt.right(180) tt.forward(200) tt.right(90) tt.pendown() #tt.right(90) tt.forward(100) tt.right(90) tt.circle(30) tt.right(90) tt.forward(40) #head to hands tt.left(45) tt.forward(60) tt.backward(60) tt.right(90) tt.forward(60) tt.backward(60) tt.right(315) tt.forward(110) #hands to legs tt.left(45) tt.forward(60) tt.backward(60) tt.right(90) tt.forward(60) tt.backward(60) tt.penup() tt.goto(-180,-10) tt.pendown() tt.write('*Macha') #santa tt2 = turtle.Turtle() tt2.color('red') tt2.hideturtle() tt2.speed(6) tt2.penup() #tt2.right(180) tt2.forward(200) tt2.left(90) tt2.pendown() #tt.right(90) tt2.forward(100) tt2.right(90) tt2.circle(30) tt2.right(90) tt2.forward(40) #head to hands tt2.left(45) tt2.forward(60) tt2.backward(60) tt2.right(90) tt2.forward(60) tt2.backward(60) tt2.right(315) tt2.forward(110) #hands to legs tt2.left(45) tt2.forward(60) tt2.backward(60) tt2.right(90) tt2.forward(60) tt2.backward(60) tt2.penup() tt2.goto(220,60) tt2.pendown() tt2.write('*Santa') #conversation #santa tt3 = turtle.Turtle() tt3.color('green') tt3.hideturtle() tt3.speed(0) tt3.penup() tt3.goto(220,180) tt3.pendown() tt3.write('Hello Sai Krishna') time.sleep(3) tt3.clear() #conversation #santa tt4 = turtle.Turtle() tt4.color('red') tt4.hideturtle() tt4.penup() tt4.goto(-220,180) tt4.pendown() tt4.write('OMG OMGG Santa is it real or am i dreaming') time.sleep(5) tt3.speed(6) tt4.speed(6) tt4.clear() tt3.write('Yes My child, \n Its Real') time.sleep(2) tt3.clear() tt4.write('Thank god I have lot of items \n in my amazon and flipkart cart for you') time.sleep(5) tt4.clear() tt3.write('Don"t Expect too much my dear Sai krishna \n Maku konni budjet problems unnay') time.sleep(6) tt3.clear() tt4.write('oho noooo , \n Firstly santa please don"t call me as Sai krishna \n just use "Macha", Macha is Fine') time.sleep(6) tt4.clear() tt3.write('Enduku child') time.sleep(3) tt3.clear() tt4.write('Sai Krishna is old santa \n Macha is in trending now,\n i think nek elantivi teleeka povachu santa') time.sleep(6) tt4.clear() tt3.write('Uff... Its ok my child...🤦🏽') time.sleep(4) tt3.clear() tt4.write('Santa, Chala bore kodutundhiii.....') time.sleep(4) tt4.clear() tt3.write('Please My Child, Konni ammaila numbers evvandi ani matram aadagaku') time.sleep(5) tt3.clear() tt4.write('uff.....🙆🏽‍♂️') time.sleep(3) tt4.clear() tt3.write('Shall we play a game Macha') time.sleep(4) tt3.clear() tt4.write('huhuu im too excited santa 🦸🏽‍♂️ 🦹🏽‍♀️ lets start') time.sleep(5) tt4.clear() tt3.write('But Truth matrame cheppali , \n this game is like truth or dare') time.sleep(5) tt3.clear() tt4.write('haha santa...! i think neku ee vishayam teleeka povachu \n Macha yeppudu nijale chepthadu , \n Lie ante entoo kuda naku teleedhu') time.sleep(6) tt4.clear() tt3.write('Edhi Comedy chese time kadhu my child') time.sleep(5) tt3.clear() tt4.write('🥲😰 (Inside : Navvalo yedalo kuda \n teleeedam ledhu karma 🤦‍♂️)') time.sleep(5) tt4.clear() tt4.write('Sarlendi santa aha game ento \n start cheyyandi nijale cheptha') time.sleep(5) tt4.clear() tt3.write('Nee bestie aina RANJITH Gurinchi \n em anukuntunnav cheppu') time.sleep(4) tt3.clear() tt4.write('Em cheppamantaru santa , \n Chepthe oka badha cheppaka pothe oka badha') time.sleep(6) tt4.clear() tt4.write('Aha Dashboard yedho chesthunnam kadha , \n Kastamainavi anni naku echi easy tasks emo thanu chesthunnadu') time.sleep(6) tt4.clear() tt4.write('Poni avi aina chesthunnada ante , \n Phone nokkuthaa Phone matladuthanee untadu') time.sleep(6) tt4.clear() tt4.write('Chivaraku em chestham Deadline time lo thana tasks kuda \n nenee cheyyalsina paristhithi ') time.sleep(6) tt4.clear() tt4.write('Eppatiki oka regret enti ante Callcenter lo \n join aipothe best emo anipistha untadhu ,\n Mana vadu kalipee pulihoraki') time.sleep(6) tt4.clear() tt3.write('Ounaa...😕 🙁 , Enni kastalu ochayi macha, \n Nee kastalu pagavadiki kuda rakudadhu') time.sleep(6) tt3.clear() tt3.write('Mari, What about SAI ABINESH my child') time.sleep(4) tt3.clear() tt4.write('Emo santa , Konni sarlu bayam vestha untadhi, \n Sai abinesh valla Naku mental ochesthademo ') time.sleep(6) tt4.clear() tt4.write('Yee erragadda hospital lo aina join chesestharemo ani ') time.sleep(5) tt4.clear() tt3.write('Em aindhi macha') time.sleep(3) tt3.clear() tt4.write('Abinesh code medha chese prayogalu chusthe, \n Athanu aadighee questions chusthe yeppudoo kappudu \n naku mental ochisthadi santa ') time.sleep(6) tt4.clear() tt3.write('Hoo , Mari Vamsi gurinchi em anukuntunnav') time.sleep(6) tt3.clear() tt4.write('Em cheppali santa Over action candidate') time.sleep(6) tt4.clear() tt3.write('Ouna em aindhi macha') time.sleep(4) tt3.clear() tt4.write('Ounu santa Yedhoo chesthunna anukuntadu , em cheyyadu \n aha kullipoina jokes okati vesthadu') time.sleep(6) tt4.clear() tt4.write('Ninna kaka monna ochadaa , \n Appudee nannu commanding chesthunnadu') time.sleep(6) tt4.clear() tt4.write('Yevarini command chesina naku badha ledhu') time.sleep(4) tt4.clear() tt4.write('Andarini vadilesi nannu commanding chesthunnadu') time.sleep(4) tt4.clear() tt4.write('Work cheydu em cheydu \n Memu chesina work antha \n athanu chesa annatlu cheppukuntadu santa') time.sleep(6) tt4.clear() tt3.write('Ouna, Enni kastalu ochay macha') time.sleep(4) tt3.clear() tt3.write('Mari last gha Meenakshi Gurinchi em antav macha') time.sleep(5) tt3.clear() tt4.write('Cheppedhi em undhi santa He is like one type of psycho') time.sleep(6) tt4.clear() tt4.write('Emee kuda overaction candidate ehe, \n Yekkado yee forest lono puttalsindhi , \n by mistake ekkadaki ochesindhi') time.sleep(7) tt4.clear() tt4.write('Narakam chupinchesthadi santa babu \n papam chesukune athanu ela bathukuthadoo entoo') time.sleep(6) tt4.clear() tt3.write('Ayyooo, Oka doubt \n Why are you pronouncing with "HE"') time.sleep(5) tt3.clear() tt4.write('i too have some serious doubts on that') time.sleep(5) tt4.clear() #Meenakshi tt5 = turtle.Turtle() tt5.color('Yellow') tt5.hideturtle() tt5.speed(6) tt5.penup() #tt2.right(180) tt5.forward(0) tt5.left(90) tt5.pendown() #tt.right(90) tt5.forward(100) tt5.right(90) tt5.circle(30) tt5.right(90) tt5.forward(40) #head to hands tt5.left(45) tt5.forward(60) tt5.backward(60) tt5.right(90) tt5.forward(60) tt5.backward(60) tt5.right(315) tt5.forward(110) #hands to legs tt5.left(45) tt5.forward(60) tt5.backward(60) tt5.right(90) tt5.forward(60) tt5.backward(60) tt5.penup() tt5.goto(0,60) tt5.pendown() tt5.write('Meenakshi') tt6 = turtle.Turtle() tt6.color('yellow') tt6.hideturtle() tt6.penup() tt6.goto(0,180) tt6.pendown() tt6.write('Em matladutunnav Macha') time.sleep(5) tt6.clear() tt4.write('Ayyo sorry meenakshi nuv ekkade vunnav ani teleeka \n Nijam cheppesaa') time.sleep(6) tt4.clear() tt6.write('Nee yenkamma , Cheptha cheptha naku time vasthadi Macha') time.sleep(6) tt6.clear() tt4.write('sorry Meenakshi') time.sleep(4) tt4.clear() tt5.clear() tt4.write('Santa last gha oka question') time.sleep(5) tt4.clear() tt4.write('Naa Jeevitham lo unna kastalanni, \n Povalante em cheyyali santa ') time.sleep(6) tt4.clear() tt3.write('Ala avvalante 1st nuv over action cheyyadam manali , Automatic gha antha set avthadi') time.sleep(6) tt3.clear() tt3.write('Ok Macha I need to GO,\n will meet you again, \n Always Keep this smile like this \n (i Hope you are smiling now)') time.sleep(6) tt3.clear() tt4.write('Ok santa Bye and Miss u') time.sleep(5) tt4.clear() tt.clear() tt2.clear() exit_info_turtle = turtle.Turtle() exit_info_turtle.color('red') exit_info_turtle.hideturtle() exit_info_turtle.speed(0) exit_info_turtle.penup() exit_info_turtle.goto(-300,0) exit_info_turtle.pendown() exit_info_turtle.write('Humble request andi , \n Yevaru serious gha tesukokandi , \n Just casual gha and fun kosamee chesina, \n if memalni hurt chesi unte im really sorry and im ready to remove this module') time.sleep(10) exit_info_turtle.clear() exit_info_turtle = turtle.Turtle() exit_info_turtle.color('red') exit_info_turtle.hideturtle() exit_info_turtle.speed(0) exit_info_turtle.penup() exit_info_turtle.goto(-300,0) exit_info_turtle.pendown() exit_info_turtle.write('Merry Christmas to you and your family , \n #221133') time.sleep(10) exit_info_turtle.clear() print('successfull completed') turtle.bye() turtle.mainloop() return 'Done' if __name__ == '__main__': santa_n_Macha()
PypiClean
/MaterialDjango-0.2.5.tar.gz/MaterialDjango-0.2.5/bower_components/prism/components/prism-d.min.js
Prism.languages.d=Prism.languages.extend("clike",{string:[/\b[rx]"(?:\\[\s\S]|[^\\"])*"[cwd]?/,/\bq"(?:\[[\s\S]*?\]|\([\s\S]*?\)|<[\s\S]*?>|\{[\s\S]*?\})"/,/\bq"([_a-zA-Z][_a-zA-Z\d]*)(?:\r?\n|\r)[\s\S]*?(?:\r?\n|\r)\1"/,/\bq"(.)[\s\S]*?\1"/,/'(?:\\'|\\?[^']+)'/,/(["`])(?:\\[\s\S]|(?!\1)[^\\])*\1[cwd]?/],number:[/\b0x\.?[a-f\d_]+(?:(?!\.\.)\.[a-f\d_]*)?(?:p[+-]?[a-f\d_]+)?[ulfi]*/i,{pattern:/((?:\.\.)?)(?:\b0b\.?|\b|\.)\d[\d_]*(?:(?!\.\.)\.[\d_]*)?(?:e[+-]?\d[\d_]*)?[ulfi]*/i,lookbehind:!0}],keyword:/\$|\b(?:abstract|alias|align|asm|assert|auto|body|bool|break|byte|case|cast|catch|cdouble|cent|cfloat|char|class|const|continue|creal|dchar|debug|default|delegate|delete|deprecated|do|double|else|enum|export|extern|false|final|finally|float|for|foreach|foreach_reverse|function|goto|idouble|if|ifloat|immutable|import|inout|int|interface|invariant|ireal|lazy|long|macro|mixin|module|new|nothrow|null|out|override|package|pragma|private|protected|public|pure|real|ref|return|scope|shared|short|static|struct|super|switch|synchronized|template|this|throw|true|try|typedef|typeid|typeof|ubyte|ucent|uint|ulong|union|unittest|ushort|version|void|volatile|wchar|while|with|__(?:(?:FILE|MODULE|LINE|FUNCTION|PRETTY_FUNCTION|DATE|EOF|TIME|TIMESTAMP|VENDOR|VERSION)__|gshared|traits|vector|parameters)|string|wstring|dstring|size_t|ptrdiff_t)\b/,operator:/\|[|=]?|&[&=]?|\+[+=]?|-[-=]?|\.?\.\.|=[>=]?|!(?:i[ns]\b|<>?=?|>=?|=)?|\bi[ns]\b|(?:<[<>]?|>>?>?|\^\^|[*\/%^~])=?/}),Prism.languages.d.comment=[/^\s*#!.+/,{pattern:/(^|[^\\])\/\+(?:\/\+[\s\S]*?\+\/|[\s\S])*?\+\//,lookbehind:!0}].concat(Prism.languages.d.comment),Prism.languages.insertBefore("d","comment",{"token-string":{pattern:/\bq\{(?:\{[^}]*\}|[^}])*\}/,alias:"string"}}),Prism.languages.insertBefore("d","keyword",{property:/\B@\w*/}),Prism.languages.insertBefore("d","function",{register:{pattern:/\b(?:[ABCD][LHX]|E[ABCD]X|E?(?:BP|SP|DI|SI)|[ECSDGF]S|CR[0234]|DR[012367]|TR[3-7]|X?MM[0-7]|R[ABCD]X|[BS]PL|R[BS]P|[DS]IL|R[DS]I|R(?:[89]|1[0-5])[BWD]?|XMM(?:[89]|1[0-5])|YMM(?:1[0-5]|\d))\b|\bST(?:\([0-7]\)|\b)/,alias:"variable"}});
PypiClean
/DLTA-AI-1.1.tar.gz/DLTA-AI-1.1/DLTA_AI_app/mmdetection/mmdet/models/seg_heads/panoptic_fusion_heads/maskformer_fusion_head.py
import torch import torch.nn.functional as F from mmdet.core.evaluation.panoptic_utils import INSTANCE_OFFSET from mmdet.core.mask import mask2bbox from mmdet.models.builder import HEADS from .base_panoptic_fusion_head import BasePanopticFusionHead @HEADS.register_module() class MaskFormerFusionHead(BasePanopticFusionHead): def __init__(self, num_things_classes=80, num_stuff_classes=53, test_cfg=None, loss_panoptic=None, init_cfg=None, **kwargs): super().__init__(num_things_classes, num_stuff_classes, test_cfg, loss_panoptic, init_cfg, **kwargs) def forward_train(self, **kwargs): """MaskFormerFusionHead has no training loss.""" return dict() def panoptic_postprocess(self, mask_cls, mask_pred): """Panoptic segmengation inference. Args: mask_cls (Tensor): Classfication outputs of shape (num_queries, cls_out_channels) for a image. Note `cls_out_channels` should includes background. mask_pred (Tensor): Mask outputs of shape (num_queries, h, w) for a image. Returns: Tensor: Panoptic segment result of shape \ (h, w), each element in Tensor means: \ ``segment_id = _cls + instance_id * INSTANCE_OFFSET``. """ object_mask_thr = self.test_cfg.get('object_mask_thr', 0.8) iou_thr = self.test_cfg.get('iou_thr', 0.8) filter_low_score = self.test_cfg.get('filter_low_score', False) scores, labels = F.softmax(mask_cls, dim=-1).max(-1) mask_pred = mask_pred.sigmoid() keep = labels.ne(self.num_classes) & (scores > object_mask_thr) cur_scores = scores[keep] cur_classes = labels[keep] cur_masks = mask_pred[keep] cur_prob_masks = cur_scores.view(-1, 1, 1) * cur_masks h, w = cur_masks.shape[-2:] panoptic_seg = torch.full((h, w), self.num_classes, dtype=torch.int32, device=cur_masks.device) if cur_masks.shape[0] == 0: # We didn't detect any mask :( pass else: cur_mask_ids = cur_prob_masks.argmax(0) instance_id = 1 for k in range(cur_classes.shape[0]): pred_class = int(cur_classes[k].item()) isthing = pred_class < self.num_things_classes mask = cur_mask_ids == k mask_area = mask.sum().item() original_area = (cur_masks[k] >= 0.5).sum().item() if filter_low_score: mask = mask & (cur_masks[k] >= 0.5) if mask_area > 0 and original_area > 0: if mask_area / original_area < iou_thr: continue if not isthing: # different stuff regions of same class will be # merged here, and stuff share the instance_id 0. panoptic_seg[mask] = pred_class else: panoptic_seg[mask] = ( pred_class + instance_id * INSTANCE_OFFSET) instance_id += 1 return panoptic_seg def semantic_postprocess(self, mask_cls, mask_pred): """Semantic segmengation postprocess. Args: mask_cls (Tensor): Classfication outputs of shape (num_queries, cls_out_channels) for a image. Note `cls_out_channels` should includes background. mask_pred (Tensor): Mask outputs of shape (num_queries, h, w) for a image. Returns: Tensor: Semantic segment result of shape \ (cls_out_channels, h, w). """ # TODO add semantic segmentation result raise NotImplementedError def instance_postprocess(self, mask_cls, mask_pred): """Instance segmengation postprocess. Args: mask_cls (Tensor): Classfication outputs of shape (num_queries, cls_out_channels) for a image. Note `cls_out_channels` should includes background. mask_pred (Tensor): Mask outputs of shape (num_queries, h, w) for a image. Returns: tuple[Tensor]: Instance segmentation results. - labels_per_image (Tensor): Predicted labels,\ shape (n, ). - bboxes (Tensor): Bboxes and scores with shape (n, 5) of \ positive region in binary mask, the last column is scores. - mask_pred_binary (Tensor): Instance masks of \ shape (n, h, w). """ max_per_image = self.test_cfg.get('max_per_image', 100) num_queries = mask_cls.shape[0] # shape (num_queries, num_class) scores = F.softmax(mask_cls, dim=-1)[:, :-1] # shape (num_queries * num_class, ) labels = torch.arange(self.num_classes, device=mask_cls.device).\ unsqueeze(0).repeat(num_queries, 1).flatten(0, 1) scores_per_image, top_indices = scores.flatten(0, 1).topk( max_per_image, sorted=False) labels_per_image = labels[top_indices] query_indices = top_indices // self.num_classes mask_pred = mask_pred[query_indices] # extract things is_thing = labels_per_image < self.num_things_classes scores_per_image = scores_per_image[is_thing] labels_per_image = labels_per_image[is_thing] mask_pred = mask_pred[is_thing] mask_pred_binary = (mask_pred > 0).float() mask_scores_per_image = (mask_pred.sigmoid() * mask_pred_binary).flatten(1).sum(1) / ( mask_pred_binary.flatten(1).sum(1) + 1e-6) det_scores = scores_per_image * mask_scores_per_image mask_pred_binary = mask_pred_binary.bool() bboxes = mask2bbox(mask_pred_binary) bboxes = torch.cat([bboxes, det_scores[:, None]], dim=-1) return labels_per_image, bboxes, mask_pred_binary def simple_test(self, mask_cls_results, mask_pred_results, img_metas, rescale=False, **kwargs): """Test segment without test-time aumengtation. Only the output of last decoder layers was used. Args: mask_cls_results (Tensor): Mask classification logits, shape (batch_size, num_queries, cls_out_channels). Note `cls_out_channels` should includes background. mask_pred_results (Tensor): Mask logits, shape (batch_size, num_queries, h, w). img_metas (list[dict]): List of image information. rescale (bool, optional): If True, return boxes in original image space. Default False. Returns: list[dict[str, Tensor | tuple[Tensor]]]: Semantic segmentation \ results and panoptic segmentation results for each \ image. .. code-block:: none [ { 'pan_results': Tensor, # shape = [h, w] 'ins_results': tuple[Tensor], # semantic segmentation results are not supported yet 'sem_results': Tensor }, ... ] """ panoptic_on = self.test_cfg.get('panoptic_on', True) semantic_on = self.test_cfg.get('semantic_on', False) instance_on = self.test_cfg.get('instance_on', False) assert not semantic_on, 'segmantic segmentation '\ 'results are not supported yet.' results = [] for mask_cls_result, mask_pred_result, meta in zip( mask_cls_results, mask_pred_results, img_metas): # remove padding img_height, img_width = meta['img_shape'][:2] mask_pred_result = mask_pred_result[:, :img_height, :img_width] if rescale: # return result in original resolution ori_height, ori_width = meta['ori_shape'][:2] mask_pred_result = F.interpolate( mask_pred_result[:, None], size=(ori_height, ori_width), mode='bilinear', align_corners=False)[:, 0] result = dict() if panoptic_on: pan_results = self.panoptic_postprocess( mask_cls_result, mask_pred_result) result['pan_results'] = pan_results if instance_on: ins_results = self.instance_postprocess( mask_cls_result, mask_pred_result) result['ins_results'] = ins_results if semantic_on: sem_results = self.semantic_postprocess( mask_cls_result, mask_pred_result) result['sem_results'] = sem_results results.append(result) return results
PypiClean
/LibRecommender-1.3.0-cp38-cp38-macosx_10_9_x86_64.whl/libserving/serialization/redis.py
import contextlib import os import redis import ujson @contextlib.contextmanager def redis_connection(host: str, port: int, db: int): r = None try: r = redis.Redis(host=host, port=port, db=db, decode_responses=True) yield r except (redis.ConnectionError, redis.DataError): raise finally: if r: r.close() def knn2redis(path: str, host: str = "localhost", port: int = 6379, db: int = 0): """Save KNN model to redis. Parameters ---------- path : str Model saving path. host : str, default: "localhost" Redis host. port : int, default: 6379 Redis port db : int, default: 0 Redis db number """ with redis_connection(host, port, db) as r: model_name2redis(path, r) id_mapping2redis(path, r) user_consumed2redis(path, r) sim2redis(path, r) def embed2redis(path: str, host: str = "localhost", port: int = 6379, db: int = 0): """Save Embed model to redis. Parameters ---------- path : str Model saving path. host : str, default: "localhost" Redis host. port : int, default: 6379 Redis port db : int, default: 0 Redis db number """ with redis_connection(host, port, db) as r: model_name2redis(path, r) id_mapping2redis(path, r) user_consumed2redis(path, r) user_embed2redis(path, r) def tf2redis(path: str, host: str = "localhost", port: int = 6379, db: int = 0): """Save TF model to redis. Parameters ---------- path : str Model saving path. host : str, default: "localhost" Redis host. port : int, default: 6379 Redis port db : int, default: 0 Redis db number """ with redis_connection(host, port, db) as r: model_name2redis(path, r) id_mapping2redis(path, r) user_consumed2redis(path, r) features2redis(path, r) def online2redis(path: str, host: str = "localhost", port: int = 6379, db: int = 0): """Save online computing model to redis. Parameters ---------- path : str Model saving path. host : str, default: "localhost" Redis host. port : int, default: 6379 Redis port db : int, default: 0 Redis db number """ with redis_connection(host, port, db) as r: model_name2redis(path, r) id_mapping2redis(path, r) user_consumed2redis(path, r) features2redis(path, r) user_sparse2redis(path, r) user_dense2redis(path, r) def model_name2redis(path: str, r: redis.Redis): model_name_path = os.path.join(path, "model_name.json") with open(model_name_path) as f: m = ujson.load(f) r.set("model_name", m["model_name"]) def id_mapping2redis(path: str, r: redis.Redis): user2id_path = os.path.join(path, "user2id.json") id2item_path = os.path.join(path, "id2item.json") item2id_path = os.path.join(path, "item2id.json") with open(user2id_path) as f1, open(id2item_path) as f2, open(item2id_path) as f3: user2id = ujson.load(f1) id2item = ujson.load(f2) item2id = ujson.load(f3) r.hset("user2id", mapping=user2id) r.hset("id2item", mapping=id2item) r.hset("item2id", mapping=item2id) def user_consumed2redis(path: str, r: redis.Redis): user_consumed_path = os.path.join(path, "user_consumed.json") with open(user_consumed_path) as f: user_consumed = ujson.load(f) pipe = r.pipeline() for u, items in user_consumed.items(): pipe.hset("user_consumed", u, ujson.dumps(items)) pipe.execute() def sim2redis(path: str, r: redis.Redis): sim_path = os.path.join(path, "sim.json") with open(sim_path) as f: sim = ujson.load(f) pipe = r.pipeline() for k, k_sims in sim.items(): pipe.hset("k_sims", k, ujson.dumps(k_sims)) pipe.execute() def user_embed2redis(path: str, r: redis.Redis): embed_path = os.path.join(path, "user_embed.json") with open(embed_path) as f: user_embeds = ujson.load(f) pipe = r.pipeline() for u, embed in user_embeds.items(): pipe.hset("user_embed", u, ujson.dumps(embed)) pipe.execute() def features2redis(path: str, r: redis.Redis): feature_path = os.path.join(path, "features.json") with open(feature_path) as f: feats = ujson.load(f) r.set("n_users", feats["n_users"]) r.set("n_items", feats["n_items"]) if "max_seq_len" in feats: r.set("max_seq_len", feats["max_seq_len"]) if "user_sparse_col_index" in feats: r.hset("feature", "user_sparse", 1) r.set("user_sparse_col_index", ujson.dumps(feats["user_sparse_col_index"])) pipe = r.pipeline() for u, vals in enumerate(feats["user_sparse_values"]): pipe.hset("user_sparse_values", str(u), ujson.dumps(vals)) pipe.execute() if "item_sparse_col_index" in feats: r.hset("feature", "item_sparse", 1) r.set("item_sparse_col_index", ujson.dumps(feats["item_sparse_col_index"])) pipe = r.pipeline() for vals in feats["item_sparse_values"]: pipe.rpush("item_sparse_values", ujson.dumps(vals)) pipe.execute() if "user_dense_col_index" in feats: r.hset("feature", "user_dense", 1) r.set("user_dense_col_index", ujson.dumps(feats["user_dense_col_index"])) pipe = r.pipeline() for u, vals in enumerate(feats["user_dense_values"]): pipe.hset("user_dense_values", str(u), ujson.dumps(vals)) pipe.execute() if "item_dense_col_index" in feats: r.hset("feature", "item_dense", 1) r.set("item_dense_col_index", ujson.dumps(feats["item_dense_col_index"])) pipe = r.pipeline() for vals in feats["item_dense_values"]: pipe.rpush("item_dense_values", ujson.dumps(vals)) pipe.execute() def user_sparse2redis(path: str, r: redis.Redis): user_sparse_fields_path = os.path.join(path, "user_sparse_fields.json") if os.path.exists(user_sparse_fields_path): with open(user_sparse_fields_path) as f: user_sparse_fields = ujson.load(f) r.hset("user_sparse_fields", mapping=user_sparse_fields) user_sparse_idx_mapping_path = os.path.join(path, "user_sparse_idx_mapping.json") if os.path.exists(user_sparse_idx_mapping_path): with open(user_sparse_idx_mapping_path) as f: user_sparse_idx_mapping = ujson.load(f) for col, idx_mapping in user_sparse_idx_mapping.items(): col_name = f"user_sparse_idx_mapping__{col}" r.hset(col_name, mapping=idx_mapping) def user_dense2redis(path: str, r: redis.Redis): user_dense_fields_path = os.path.join(path, "user_dense_fields.json") if os.path.exists(user_dense_fields_path): with open(user_dense_fields_path) as f: user_dense_fields = ujson.load(f) r.hset("user_dense_fields", mapping=user_dense_fields)
PypiClean
/OWSLib-0.29.2.tar.gz/OWSLib-0.29.2/owslib/csw.py
from .catalogue import csw2, csw3 from .catalogue.csw2 import CswRecord from .util import clean_ows_url, Authentication def CatalogueServiceWeb(url, lang='en-US', version='2.0.2', timeout=10, skip_caps=False, username=None, password=None, auth=None, headers=None): """ CSW factory function, returns a version specific CatalogueServiceWeb object @type url: string @paramurl: the URL of the CSW @type lang: string @param lang: the language (default is 'en-US') @type version: string @param version: version (default is '2.0.2') @type timeout: string @param timeout: timeout in seconds @type skip_caps: string @param skip_caps: whether to skip GetCapabilities processing on init (default is False) @type username: string @param username: username for HTTP basic authentication @type password: string @param password: password for HTTP basic authentication @type auth: string @param auth: instance of owslib.util.Authentication @type headers: dict @param headers: HTTP headers to send with requests @return: initialized CatalogueServiceWeb object """ if auth: if username: auth.username = username if password: auth.password = password else: auth = Authentication(username, password) clean_url = clean_ows_url(url) if version == '2.0.2': return csw2.CatalogueServiceWeb( clean_url, lang=lang, version=version, timeout=timeout, skip_caps=skip_caps, username=username, password=password, auth=auth, headers=headers) if version == '3.0.0': return csw3.CatalogueServiceWeb( clean_url, lang=lang, version=version, timeout=timeout, skip_caps=skip_caps, username=username, password=password, auth=auth, headers=headers) raise NotImplementedError('The CSW version ({}) you requested is' ' not implemented. Please use 2.0.2 or' ' 3.0.0'.format(version))
PypiClean
/MJOLNIR-1.3.1.tar.gz/MJOLNIR-1.3.1/docs/Tutorials/MJOLNIRHistory.rst
History of data files ===================== Both during an experiment and especially after, one can lose the overview of which data files contain which data. With a command line tool that prints out the most important properties of data files, this can be overcome. The script *MJOLNIRHistory* and it has the following help text:: $ MJOLNIRHistory -h usage: MJOLNIRHistory [-h] [-s SAVE] [-r] [DataFile [DataFile ...]] History tool for displaying files and command for selected data files. positional arguments: DataFile Data file(s) to be used. If none provided file dialogue will appear. Using string format, directory and year is also possible. See documentation. optional arguments: -h, --help show this help message and exit -s SAVE, --save SAVE Location to which the generated history will be saved. -r, --reuse Set flag to reuse files from previous usage. Default false. This script prints out one line for each data file selected containing the most important informations of the scan including the name, scan command, sample name and comment. As an example, running this on the MnF2 data file camea2018n000500.hdf one gets the following output:: $ MJOLNIRHistory camea2018n000500.hdf camea2018n000500.hdf: sc a3 50 da3 1 np 141 mn 50000 MnF2 MV=80 Ei=10 2t=-20 10 K
PypiClean
/Broad_genepy-1.2.6-py3-none-any.whl/genepy/epigenetics/plot.py
import pandas as pd import numpy as np import seaborn as sns import matplotlib.pyplot as plt from matplotlib import cm from genepy.epigenetics import chipseq as chip from genepy.utils import helper as h def plotAverageOfSamples(samples, folder="", showAll=False, maxv=None, minv=None): res = [] plt.figure() plt.ylim(minv,maxv) for sample in samples: data = pd.read_csv(sample, sep='\t', skiprows=1, header=None, names=['chr', 'start', 'end', 'name', "foldchange","."]+list(range(600))) r = data[list(range(600))].mean().tolist() res.append(r) if showAll: sns.lineplot(data=np.array(r), color="#BFBFFF") sns.lineplot(data=np.array(res).mean(0)) if folder: plt.savefig(folder+"_averageofsamples.pdf", color="#1F1FFF") return res def pysam_getPeaksAt(peaks, bams, folder='data/seqs/', window=1000, numpeaks=1000, numthreads=8): # get pysam data # ask for counts only at specific locus based on windows from center+-size from sorted MYC peaks # for each counts, do a rolling average (or a convolving of the data) with numpy # append to an array # return array, normalized loaded = {} res = {i: np.zeros((len(peaks), window * 2)) for i in bams} peaks = peaks.sort_values(by="foldchange", ascending=False).iloc[:numpeaks] peaks.chrom = peaks.chrom.astype(str) for val in bams: loaded.update({val: pysam.AlignmentFile( folder + val, 'rb', threads=numthreads)}) for k, bam in loaded.items(): for num, (i, val) in enumerate(peaks.iterrows()): print(int(num / len(peaks)), end='\r') center = int((val['start'] + val['end']) / 2) for pileupcolumn in bam.pileup(val['chrom'], start=center - window, stop=center + window, truncate=True): res[k][num][pileupcolumn.pos - (center - window)] = pileupcolumn.n fig, ax = plt.subplots(1, len(res)) for i, (k, val) in enumerate(res.items()): sns.heatmap(val, ax=ax[i]) ax[i].set_title(k.split('.')[0]) fig.show() return res, fig def bedtools_getPeaksAt(peaks, bams, folder='data/seqs/', window=1000, numpeaks=1000, numthreads=8): """ get pysam data ask for counts only at specific locus based on windows from center+-size from sorted MYC peaks for each counts, do a rolling average (or a convolving of the data) with numpy append to an array return array, normalized """ loaded = {} center = [int((val['start'] + val['end']) / 2) for k, val in peaks.iterrows()] peaks['start'] = [c - window for c in center] peaks['end'] = [c + window - 1 for c in center] peaks[peaks.columns[:3]].sort_values(by=['chrom', 'start']).to_csv( 'temp/peaks.bed', sep='\t', index=False, header=False) bedpeaks = BedTool('temp/peaks.bed') fig, ax = plt.subplots(1, len(bams)) peakset = peaks["foldchange"].values.argsort()[::-1][:numpeaks] for i, val in enumerate(bams): coverage = BedTool(folder + val).intersect(bedpeaks).genome_coverage(bga=True, split=True)\ .intersect(bedpeaks).to_dataframe(names=['chrom', 'start', 'end', 'coverage']) cov = np.zeros((len(peaks), window * 2), dtype=int) j = 0 pdb.set_trace() for i, (k, val) in enumerate(peaks.iterrows()): print(i / len(peaks), end='\r') while coverage.iloc[j].start > val.start: j -= 1 while coverage.iloc[j].start < val.end: cov[i][coverage.iloc[j].start - val.start:coverage.iloc[j].end - val.start] =\ coverage.iloc[j].coverage j += 1 sns.heatmap(coverage, ax=ax[i]) ax[i].set_title(val.split('.')[0]) fig.show() return None, fig def makeProfiles(matx=[], folder='', matnames=[], title='', name='temp/peaksat.pdf', refpoint="TSS", scale=None, sort=False, withDeeptools=True, cluster=1, vmax=None, vmin=None, overlap=False, legendLoc=None): if withDeeptools: if not (len(matnames) == 2 and len(matx) == 2): raise ValueError('you need two mat.gz files and two names') h.createFoldersFor(name) cmd = 'computeMatrixOperations relabel -m ' cmd += matx[0] + ' -o '+matx[0]+' --groupLabels '+matnames[0] cmd += ' && computeMatrixOperations relabel -m ' cmd += matx[1] + ' -o '+matx[1]+' --groupLabels '+matnames[1] cmd += ' && computeMatrixOperations rbind -m ' cmd += matx[0] + ' ' + matx[1] + " -o " + \ '.'.join(name.split('.')[:-1]) + ".gz" cmd += ' && plotProfile' cmd += " --matrixFile " + '.'.join(name.split('.')[:-1]) + ".gz" cmd += " --outFileName " + name cmd += " --refPointLabel " + refpoint if vmax is not None: cmd += " -max "+str(vmax) if vmin is not None: cmd += " -min "+str(vmin) if cluster > 1: cmd += " --perGroup --kmeans "+str(cluster) if legendLoc: cmd += " --legendLocation "+legendLoc if title: cmd += " --plotTitle " + title data = subprocess.run(cmd, shell=True, capture_output=True) print(data) def getPeaksAt(peaks, bigwigs, folder='', bigwignames=[], peaknames=[], window=1000, title='', numpeaks=4000, numthreads=8, width=5, length=10, torecompute=False, name='temp/peaksat.pdf', refpoint="TSS", scale=None, sort=False, withDeeptools=True, onlyProfile=False, cluster=1, vmax=None, vmin=None, overlap=False, legendLoc=None): """ get pysam data ask for counts only at specific locus based on windows from center+-size from sorted MYC peaks for each counts, do a rolling average (or a convolving of the data) with numpy append to an array return array, normalized """ if withDeeptools: if isinstance(peaks, pd.DataFrame): peaks = 'peaks.bed ' peaks.to_csv('peaks.bed', sep='\t', index=False, header=False) elif type(peaks) == list: pe = '' i = 0 for n, p in enumerate(peaks): if 20 < int(os.popen('wc -l ' + p).read().split(' ')[0]): pe += p + ' ' elif len(peaknames) > 0: peaknames.pop(n-i) i += 1 peaks = pe elif type(peaks) == str: peaks += ' ' else: raise ValueError(' we dont know this filetype') if type(bigwigs) is list: pe = '' for val in bigwigs: pe += folder + val + ' ' bigwigs = pe else: bigwigs = folder + bigwigs + ' ' h.createFoldersFor(name) cmd = '' if not os.path.exists('.'.join(name.split('.')[:-1]) + ".gz") or torecompute: cmd += "computeMatrix reference-point -S " cmd += bigwigs cmd += " --referencePoint "+refpoint cmd += " --regionsFileName " + peaks cmd += " --missingDataAsZero" cmd += " --outFileName " + '.'.join(name.split('.')[:-1]) + ".gz" cmd += " --upstream " + str(window) + " --downstream " + str(window) cmd += " --numberOfProcessors " + str(numthreads) + ' && ' cmd += "plotHeatmap" if not onlyProfile else 'plotProfile' if type(name) is list: if not onlyProfile: raise ValueError('needs to be set to True, can\'t average heatmaps') cmd += " --matrixFile " + '.gz '.join(name) + ".gz" if average: cmd += "--averageType mean" else: cmd += " --matrixFile " + '.'.join(name.split('.')[:-1]) + ".gz" cmd += " --outFileName " + name cmd += " --refPointLabel " + refpoint if vmax is not None: cmd += " -max "+str(vmax) if vmin is not None: cmd += " -min "+str(vmin) if cluster > 1: cmd += " --perGroup --kmeans "+str(cluster) if overlap: if onlyProfile: cmd += " --plotType overlapped_lines" else: raise ValueError("overlap only works when onlyProfile is set") if legendLoc: cmd += " --legendLocation "+legendLoc if len(peaknames) > 0: pe = '' for i in peaknames: pe += ' ' + i cmd += " --regionsLabel" + pe if type(bigwigs) is list: if len(bigwignames) > 0: pe = '' for i in bigwignames: pe += ' "' + i + '"' cmd += " --samplesLabel" + pe if title: cmd += " --plotTitle '"+title+"'" data = subprocess.run(cmd, shell=True, capture_output=True) print(data) else: if 'relative_summit_pos' in peaks.columns: center = [int((val['start'] + val['relative_summit_pos'])) for k, val in peaks.iterrows()] else: center = [int((val['start'] + val['end']) / 2) for k, val in peaks.iterrows()] pd.set_option('mode.chained_assignment', None) peaks['start'] = [c - window for c in center] peaks['end'] = [c + window for c in center] fig, ax = plt.subplots(1, len(bigwigs), figsize=[ width, length], title=title if title else 'Chip Heatmap') if sort: peaks = peaks.sort_values(by=["foldchange"], ascending=False) if numpeaks > len(peaks): numpeaks = len(peaks) - 1 cov = {} maxs = [] for num, bigwig in enumerate(bigwigs): bw = pyBigWig.open(folder + bigwig) co = np.zeros((numpeaks, window * 2), dtype=int) scale = scale[bigwig] if scale is dict else 1 for i, (k, val) in enumerate(peaks.iloc[:numpeaks].iterrows()): try: co[i] = np.nan_to_num(bw.values(str(val.chrom), val.start, val.end), 0) except RuntimeError as e: print(str(val.chrom), val.start, val.end) pass cov[bigwig] = co maxs.append(co.max()) for num, bigwig in enumerate(bigwigs): sns.heatmap(cov[bigwig] * scale, ax=ax[num], vmax=max(maxs), yticklabels=[], cmap=cmaps[num], cbar=True) ax[num].set_title(bigwig.split('.')[0]) fig.subplots_adjust(wspace=0.1) fig.show() fig.savefig(name) return cov, fig def andrew(groups, merged, annot, enr=None, pvals=None, cols=8, precise=True, title = "sorted clustermap of cobindings clustered", folder="", rangeval=4, okpval=10**-3, size=(20,15),vmax=3, vmin=0): if enr is None or pvals is None: enr, pvals = chip.enrichment(merged, groups=groups) rand = np.random.choice(merged.index,5000) subgroups = groups[rand] sorting = np.argsort(subgroups) redblue = cm.get_cmap('RdBu_r',256) subenr = enr.iloc[annot-cols:] subenr[subenr>rangeval]=rangeval subenr[subenr<-rangeval]=-rangeval subenr = subenr/rangeval data = [] #colors = [] impv = pvals.values for i in subgroups[sorting]: #colors.append(viridis(i)) a = redblue((128+(subenr[i]*128)).astype(int)).tolist() for j in range(len(a)): a[j] = [1.,1.,1.,1.] if impv[j,i] > okpval else a[j] data.append(a) data = pd.DataFrame(data=data,columns=list(subenr.index),index= rand[sorting]) #data["clusters"] = colors a = np.log2(1.01+merged[merged.columns[cols:annot]].iloc[rand].iloc[sorting].T) if not precise: for i in set(groups): e = a[a.columns[subgroups[sorting]==i]].mean(1) e = pd.DataFrame([e for i in range((subgroups[sorting]==i).sum())]).T a[a.columns[subgroups[sorting]==i]] = e fig = sns.clustermap(a, vmin=vmin, vmax=vmax, figsize=size, z_score=0, colors_ratio=0.01, col_cluster=False,col_colors=data, xticklabels=False) fig.ax_col_dendrogram.set_visible(False) fig.fig.suptitle(title) fig.savefig(folder + str(len(set(groups))) + '_clustermap_cobinding_enrichment_andrewplot.pdf') plt.show()
PypiClean
/Harambe-0.10.0.tar.gz/Harambe-0.10.0/harambe/core.py
import re import os import sys import arrow import jinja2 import inspect import logging import functools import pkg_resources import logging.config from six import string_types from werkzeug import import_string from flask_assets import Environment from werkzeug.contrib.fixers import ProxyFix from werkzeug.routing import (BaseConverter, parse_rule) from flask import (Flask, g, render_template, flash, session, make_response, Response, request, abort, url_for as f_url_for, redirect as f_redirect) from . import (utils, exceptions) from .extras.harambe_db import HarambeDB from .__about__ import * _py2 = sys.version_info[0] == 2 # ------------------------------------------------------------------------------ __all__ = [ "Harambe", "HarambeApp", "db", "models", "views", "get_env", "set_env", "get_app_env", "get_env_config", "get_config", "page_meta", "flash_success", "flash_error", "flash_info", "flash_data", "get_flash_data", "init_app", "register_package", "register_models", "utc_now", "local_datetime", "local_now", "to_local_datetime", # For convenience when importing from harambe, but can use # the flask one "flash", "session", "request", "abort", "g", # They have been altered with extra functionalities "redirect", "url_for" ] # Hold the current environment __ENV__ = None is_method = lambda x: inspect.ismethod if _py2 else inspect.isfunction # Will hold all active class views # It can be used for redirection etc # ie: redirect(views.ContactPage.index) views = type('', (), {}) # Will hold models from apps, or to be shared # ie, set new model property -> models.MyNewModel = MyModel # ie: use property -> models.MyNewModel.all() # For convenience, use `register_models(**kw)` to register the models # By default harambe will load all the application/models.py models models = type('', (), {}) # Setup the DB # upon initialization will use the right URL for it # also, it exposes the db object to all modules db = HarambeDB() def register_models(**kwargs): """ Alias to register model :param kwargs: :return: """ [setattr(models, k, v) for k, v in kwargs.items()] def set_env(env): """ Set the envrionment manually :param env: :return: """ global __ENV__ __ENV__ = env.lower().capitalize() def get_env(): """ Return the Capitalize environment name It can be used to retrieve class base config Default: Development :returns: str Capitalized """ if not __ENV__: env = os.environ["env"] if "env" in os.environ else "Dev" set_env(env) return __ENV__ def get_app_env(): """ if the app and the envi are passed in the command line as 'app=$app:$env' :return: tuple app, env """ app, env = None, get_env() if "app" in os.environ: app = os.environ["app"].lower() if ":" in app: app, env = os.environ["app"].split(":", 2) set_env(env) return app, env def get_env_config(config): """ Return config class based based on the config :param config : Object - The configuration module containing the environment object """ return getattr(config, get_env()) def init_app(kls): """ To bind middlewares, plugins that needs the 'app' object to init Bound middlewares will be assigned on cls.init() """ if not hasattr(kls, "__call__"): raise exceptions.HarambeError("init_app: '%s' is not callable" % kls) Harambe._init_apps.add(kls) return kls def register_package(pkg): """ Allow to register an app packages by loading and exposing: templates, static, and exceptions for abort() Structure of package root | $package_name | __init__.py | | /templates | | | | /static | | assets.yml :param pkg: str - __package__ or __name__ or The root dir or the dotted resource package (package.path.path, usually __name__ of templates and static """ root_pkg_dir = pkg if not os.path.isdir(pkg) and "." in pkg: root_pkg_dir = pkg_resources.resource_filename(pkg, "") template_path = os.path.join(root_pkg_dir, "templates") static_path = os.path.join(root_pkg_dir, "static") logging.info("Registering App: " + pkg) if os.path.isdir(template_path): template_path = jinja2.FileSystemLoader(template_path) Harambe._template_paths.add(template_path) if os.path.isdir(static_path): Harambe._static_paths.add(static_path) Harambe._add_asset_bundle(static_path) def get_config(key, default=None): """ Shortcut to access the application's config in your class :param key: The key to access :param default: The default value when None :returns mixed: """ return Harambe._app.config.get(key, default) if Harambe._app else default def page_meta(title=None, **kwargs): """ Meta allows you to add page meta data in the request `g` context :params **kwargs: meta keys we're expecting: title (str) description (str) url (str) (Will pick it up by itself if not set) image (str) site_name (str) (but can pick it up from config file) object_type (str) keywords (list) locale (str) card (str) **Boolean By default these keys are True use_opengraph use_twitter use_googleplus python """ default = dict( title="", description="", url="", image="", site_name="", object_type="article", locale="", keywords=[], use_opengraph=True, use_googleplus=True, use_twitter=True, properties={} ) meta = getattr(g, "__META__", default) if title: kwargs["title"] = title meta.update(**kwargs) setattr(g, "__META__", meta) def flash_success(msg): """ Alias to flash, but set a success message :param msg: :return: """ return flash(msg, "success") def flash_error(msg): """ Alias to flash, but set an error message :param msg: :return: """ return flash(msg, "error") def flash_info(msg): """ Alias to flash, but set an info message :param msg: :return: """ return flash(msg, "info") def flash_data(data): """ Just like flash, but will save data :param data: :return: """ session["_flash_data"] = data def get_flash_data(): """ Retrieved :return: mixed """ return session.pop("_flash_data", None) def utc_now(): """ Return the utcnow arrow object :return: Arrow """ return arrow.utcnow() def local_datetime(utcdatetime, format=None, timezone=None): """ Return local datetime based on the timezone Also can format the date :param utcdatetime: Arrow or string :param format: string of format :param timezone: string, ie: US/Eastern :return: """ timezone = timezone or get_config("DATETIME_TIMEZONE", "US/Eastern") dt = utcdatetime.to(timezone) \ if isinstance(utcdatetime, arrow.Arrow) \ else arrow.get(utcdatetime, timezone) if format is None: return dt _ = get_config("DATETIME_FORMAT") format = _.get("default") or "MM/DD/YYYY" if not format else _.get(format) return dt.format(format) def to_local_datetime(dt, tz=None): """ DEPRECATED :param dt: :param tz: :return: """ return local_datetime(dt, tz) def local_now(): """ DEPRECATED :return: """ return to_local_datetime(utc_now()) # ------------------------------------------------------------------------------ # Altered flask functions def url_for(endpoint, **kw): """ Harambe url_for is an alias to the flask url_for, with the ability of passing the function signature to build the url, without knowing the endpoint :param endpoint: :param kw: :return: """ _endpoint = None if isinstance(endpoint, string_types): return f_url_for(endpoint, **kw) else: # self, will refer the caller method, by getting the method name if isinstance(endpoint, Harambe): fn = sys._getframe().f_back.f_code.co_name endpoint = getattr(endpoint, fn) if is_method(endpoint): _endpoint = _get_action_endpoint(endpoint) if not _endpoint: _endpoint = _build_endpoint_route_name(endpoint) if _endpoint: return f_url_for(_endpoint, **kw) else: raise exceptions.HarambeError('Harambe `url_for` received an invalid endpoint') def redirect(endpoint, **kw): """ Redirect allow to redirect dynamically using the classes methods without knowing the right endpoint. Expecting all endpoint have GET as method, it will try to pick the first match, based on the endpoint provided or the based on the Rule map_url An endpoint can also be passed along with **kw An http: or https: can also be passed, and will redirect to that site. example: redirect(self.hello_world) redirect(self.other_page, name="x", value="v") redirect("https://google.com") redirect(views.ContactPage.index) :param endpoint: :return: redirect url """ _endpoint = None if isinstance(endpoint, string_types): _endpoint = endpoint # valid for https:// or /path/ # Endpoint should not have slashes. Use : (colon) to build endpoint if "/" in endpoint: return f_redirect(endpoint) else: for r in Harambe._app.url_map.iter_rules(): _endpoint = endpoint if 'GET' in r.methods and endpoint in r.endpoint: _endpoint = r.endpoint break else: # self, will refer the caller method, by getting the method name if isinstance(endpoint, Harambe): fn = sys._getframe().f_back.f_code.co_name endpoint = getattr(endpoint, fn) if is_method(endpoint): _endpoint = _get_action_endpoint(endpoint) if not _endpoint: _endpoint = _build_endpoint_route_name(endpoint) if _endpoint: return f_redirect(url_for(_endpoint, **kw)) else: raise exceptions.HarambeError("Invalid endpoint") def _get_action_endpoint(action): """ Return the endpoint base on the view's action :param action: :return: """ _endpoint = None if is_method(action): if hasattr(action, "_rule_cache"): rc = action._rule_cache if rc: k = list(rc.keys())[0] rules = rc[k] len_rules = len(rules) if len_rules == 1: rc_kw = rules[0][1] _endpoint = rc_kw.get("endpoint", None) if not _endpoint: _endpoint = _build_endpoint_route_name(action) elif len_rules > 1: _prefix = _build_endpoint_route_name(action) for r in Harambe._app.url_map.iter_rules(): if ('GET' in r.methods or 'POST' in r.methods) \ and _prefix in r.endpoint: _endpoint = r.endpoint break return _endpoint def _build_endpoint_route_name(endpoint): is_class = inspect.isclass(endpoint) class_name = endpoint.im_class.__name__ if not is_class else endpoint.__name__ method_name = endpoint.__name__ cls = endpoint.im_class() \ if (not hasattr(endpoint, "__self__") or endpoint.__self__ is None) \ else endpoint.__self__ return build_endpoint_route_name(cls, method_name, class_name) # ------------------------------------------------------------------------------ class Harambe(object): decorators = [] base_route = None route_prefix = None trailing_slash = True base_layout = "layouts/base.jade" template_markup = "jade" assets = None logger = None _ext = set() __special_methods = ["get", "put", "patch", "post", "delete", "index"] _installed_apps = [] _app = None _init_apps = set() _template_paths = set() _static_paths = set() _asset_bundles = set() @classmethod def __call__(cls, flask_or_import_name, projects=None, project_name=None, app_directory=None ): """ :param flask_or_import_name: Flask instance or import name -> __name__ :param projects: dict of app and views to load. ie: { "main": [ "main", "api" ] } :param project_name: name of the project. If empty, it will try to get it from the app_env(). By default it is "main" The app main is set as environment variable ie: app=PROJECT_NAME:CONFIG -> app=main:production :param app_directory: the directory name relative to the current execution path :return: """ if not app_directory: app_directory = "app" if not project_name: project_name = get_app_env()[0] or "main" app_env = get_env() app = flask_or_import_name \ if isinstance(flask_or_import_name, Flask) \ else Flask(flask_or_import_name) app.url_map.converters['regex'] = RegexConverter app.template_folder = "%s/templates" % app_directory app.static_folder = "%s/static" % app_directory # Load configs c = "%s.config.%s" % (app_directory, app_env) app.config.from_object(c) # Proxyfix # By default it will use PROXY FIX # To by pass it, or to use your own, set config # USE_PROXY_FIX = False if app.config.get("USE_PROXY_FIX") is not False: app.wsgi_app = ProxyFix(app.wsgi_app) cls._app = app cls.assets = Environment(cls._app) cls._load_extensions() cls._setup_logger() cls._setup_db() cls.setup_installed_apps() cls._expose_models() try: # import models m = "%s.models" % app_directory import_string(m) cls._expose_models() # import projects views if not projects: projects = {"main": "main"} if project_name not in projects: raise ValueError("Missing project: %s" % project_name) _projects = projects.get(project_name) if isinstance(_projects, string_types): _projects = [_projects] for _ in _projects: import_string("%s.views.%s" % (app_directory, _)) except ImportError as ie1: pass cls._expose_models() # Setup init_app # init_app instanciate functions that may need the flask.app object # Usually for flask extension to be setup _ = [_app(cls._app) for _app in cls._init_apps] # Add bundles cls._add_asset_bundle(cls._app.static_folder) # Register templates if cls._template_paths: loader = [cls._app.jinja_loader] + list(cls._template_paths) cls._app.jinja_loader = jinja2.ChoiceLoader(loader) # Static if cls._static_paths: cls.assets.load_path = [cls._app.static_folder] + list(cls._static_paths) [cls.assets.from_yaml(a) for a in cls._asset_bundles] # Register views for subcls in cls.__subclasses__(): base_route = subcls.base_route if not base_route: base_route = utils.dasherize(utils.underscore(subcls.__name__)) if subcls.__name__.lower() == "index": base_route = "/" subcls._register(cls._app, base_route=base_route) return cls._app @classmethod def setup_installed_apps(cls): """ To import 3rd party applications along with associated properties It is a list of dict or string. When a dict, it contains the `app` key and the configuration, if it's a string, it is just the app name If you require dependencies from other packages, dependencies must be placed before the calling package. It is required that __init__ in the package app has an entry point method -> 'main(**kw)' which will be used to setup the default app. As a dict INSTALLED_APPS = [ "it.can.be.a.string.to.the.module", ("in.a.tuple.with.props.dict", {options}), [ ("multi.app.list.in.a.list.of.tuple", {options}), ("multi.app.list.in.a.list.of.tuple2", {options}) ] ] :return: """ cls._installed_apps = cls._app.config.get("INSTALLED_APPS", []) if cls._installed_apps: def import_app(module, props={}): _ = import_string(module) setattr(_, "__options__", utils.dict_dot(props)) for k in cls._installed_apps: if isinstance(k, string_types): # One string import_app(k, {}) elif isinstance(k, tuple): import_app(k[0], k[1]) elif isinstance(k, list): # list of tuple[(module props), ...] for t in k: import_app(t[0], t[1]) @classmethod def render(cls, data={}, _template=None, _layout=None, **kwargs): """ Render the view template based on the class and the method being invoked :param data: The context data to pass to the template :param _template: The file template to use. By default it will map the module/classname/action.html :param _layout: The body layout, must contain {% include __template__ %} """ # Invoke the page meta so it can always be set page_meta() # Add some global Harambe data in g, along with APPLICATION DATA vars = dict( __NAME__=__title__, __VERSION__=__version__, __YEAR__=utc_now().year ) for k, v in vars.items(): setattr(g, k, v) if not _template: stack = inspect.stack()[1] action_name = stack[3] _template = build_endpoint_route_name(cls, action_name) _template = utils.list_replace([".", ":"], "/", _template) _template = "%s.%s" % (_template, cls.template_markup) data = data or dict() data.update(kwargs) data["__template__"] = _template return render_template(_layout or cls.base_layout, **data) @classmethod def _add_asset_bundle(cls, path): """ Add a webassets bundle yml file """ f = "%s/assets.yml" % path if os.path.isfile(f): cls._asset_bundles.add(f) @classmethod def _setup_logger(cls): logging_config = cls._app.config.get("LOGGING") if not logging_config: logging_config = { "version": 1, "handlers": { "default": { "class": cls._app.config.get("LOGGING_CLASS", "logging.StreamHandler") } }, 'loggers': { '': { 'handlers': ['default'], 'level': 'WARN', } } } logging.config.dictConfig(logging_config) cls.logger = logging.getLogger("root") cls._app._logger = cls.logger cls._app._loger_name = cls.logger.name @classmethod def _setup_db(cls): cls._app.db = None uri = cls._app.config.get("DB_URL") if uri: db._connect(uri, cls._app) cls._app.db = db @classmethod def _expose_models(cls): if cls._app.db: register_models(**{m.__name__:m for m in cls._app.db.Model.__subclasses__() if not hasattr(models, m.__name__)}) @classmethod def _register(cls, app, base_route=None, subdomain=None, route_prefix=None, trailing_slash=True): """Registers a Harambe class for use with a specific instance of a Flask app. Any methods not prefixes with an underscore are candidates to be routed and will have routes registered when this method is called. :param app: an instance of a Flask application :param base_route: The base path to use for all routes registered for this class. Overrides the base_route attribute if it has been set. :param subdomain: A subdomain that this registration should use when configuring routes. :param route_prefix: A prefix to be applied to all routes registered for this class. Precedes base_route. Overrides the class' route_prefix if it has been set. """ if cls is Harambe: raise TypeError("cls must be a subclass of Harambe, not Harambe itself") # Create a unique namespaced key to access view. # $module.$class_name.$Method module = cls.__module__.split(".")[-1] if not hasattr(views, module): setattr(views, module, type('', (), {})) mod = getattr(views, module) setattr(mod, cls.__name__, cls) if base_route: cls.orig_base_route = cls.base_route cls.base_route = base_route if route_prefix: cls.orig_route_prefix = cls.route_prefix cls.route_prefix = route_prefix if not subdomain: if hasattr(app, "subdomain") and app.subdomain is not None: subdomain = app.subdomain elif hasattr(cls, "subdomain"): subdomain = cls.subdomain if trailing_slash is not None: cls.orig_trailing_slash = cls.trailing_slash cls.trailing_slash = trailing_slash for name, value in get_interesting_members(Harambe, cls): proxy = cls.make_proxy_method(name) route_name = build_endpoint_route_name(cls, name) try: if hasattr(value, "_rule_cache") and name in value._rule_cache: for idx, cached_rule in enumerate(value._rule_cache[name]): rule, options = cached_rule rule = cls.build_rule(rule) sub, ep, options = cls.parse_options(options) if not subdomain and sub: subdomain = sub if ep: endpoint = ep elif len(value._rule_cache[name]) == 1: endpoint = route_name else: endpoint = "%s_%d" % (route_name, idx,) app.add_url_rule(rule, endpoint, proxy, subdomain=subdomain, **options) elif name in cls.__special_methods: if name in ["get", "index"]: methods = ["GET"] if name == "index": if hasattr(value, "_methods_cache"): methods = value._methods_cache else: methods = [name.upper()] rule = cls.build_rule("/", value) if not cls.trailing_slash: rule = rule.rstrip("/") app.add_url_rule(rule, route_name, proxy, methods=methods, subdomain=subdomain) else: methods = value._methods_cache \ if hasattr(value, "_methods_cache") \ else ["GET"] name = utils.dasherize(name) route_str = '/%s/' % name if not cls.trailing_slash: route_str = route_str.rstrip('/') rule = cls.build_rule(route_str, value) app.add_url_rule(rule, route_name, proxy, subdomain=subdomain, methods=methods) except DecoratorCompatibilityError: raise DecoratorCompatibilityError("Incompatible decorator detected on %s in class %s" % (name, cls.__name__)) if hasattr(cls, "orig_base_route"): cls.base_route = cls.orig_base_route del cls.orig_base_route if hasattr(cls, "orig_route_prefix"): cls.route_prefix = cls.orig_route_prefix del cls.orig_route_prefix if hasattr(cls, "orig_trailing_slash"): cls.trailing_slash = cls.orig_trailing_slash del cls.orig_trailing_slash @classmethod def parse_options(cls, options): """Extracts subdomain and endpoint values from the options dict and returns them along with a new dict without those values. """ options = options.copy() subdomain = options.pop('subdomain', None) endpoint = options.pop('endpoint', None) return subdomain, endpoint, options, @classmethod def make_proxy_method(cls, name): """Creates a proxy function that can be used by Flasks routing. The proxy instantiates the Harambe subclass and calls the appropriate method. :param name: the name of the method to create a proxy for """ i = cls() view = getattr(i, name) for decorator in cls.decorators: view = decorator(view) @functools.wraps(view) def proxy(**forgettable_view_args): # Always use the global request object's view_args, because they # can be modified by intervening function before an endpoint or # wrapper gets called. This matches Flask's behavior. del forgettable_view_args if hasattr(i, "before_request"): response = i.before_request(name, **request.view_args) if response is not None: return response before_view_name = "before_" + name if hasattr(i, before_view_name): before_view = getattr(i, before_view_name) response = before_view(**request.view_args) if response is not None: return response response = view(**request.view_args) # You can also return a dict or None, it will pass it to render if isinstance(response, dict) or response is None: response = response or {} if hasattr(i, "_renderer"): response = i._renderer(response) else: _template = build_endpoint_route_name(cls, view.__name__) _template = utils.list_replace([".", ":"], "/", _template) _template = "%s.%s" % (_template, cls.template_markup) response.setdefault("_template", _template) response = i.render(**response) if not isinstance(response, Response): response = make_response(response) for ext in cls._ext: response = ext(response) after_view_name = "after_" + name if hasattr(i, after_view_name): after_view = getattr(i, after_view_name) response = after_view(response) if hasattr(i, "after_request"): response = i.after_request(name, response) return response return proxy @classmethod def build_rule(cls, rule, method=None): """Creates a routing rule based on either the class name (minus the 'View' suffix) or the defined `base_route` attribute of the class :param rule: the path portion that should be appended to the route base :param method: if a method's arguments should be considered when constructing the rule, provide a reference to the method here. arguments named "self" will be ignored """ rule_parts = [] if cls.route_prefix: rule_parts.append(cls.route_prefix) base_route = cls.get_base_route() if base_route: rule_parts.append(base_route) rule_parts.append(rule) ignored_rule_args = ['self'] if hasattr(cls, 'base_args'): ignored_rule_args += cls.base_args if method: args = get_true_argspec(method)[0] for arg in args: if arg not in ignored_rule_args: rule_parts.append("<%s>" % arg) result = "/%s" % "/".join(rule_parts) return re.sub(r'(/)\1+', r'\1', result) @classmethod def get_base_route(cls): """Returns the route base to use for the current class.""" base_route = cls.__name__.lower() if cls.base_route is not None: base_route = cls.base_route base_rule = parse_rule(base_route) cls.base_args = [r[2] for r in base_rule] return base_route.strip("/") @staticmethod def _bind_route_rule_cache(f, rule, append_method=False, **kwargs): # Put the rule cache on the method itself instead of globally if rule is None: rule = utils.dasherize(f.__name__) + "/" if not hasattr(f, '_rule_cache') or f._rule_cache is None: f._rule_cache = {f.__name__: [(rule, kwargs)]} elif not f.__name__ in f._rule_cache: f._rule_cache[f.__name__] = [(rule, kwargs)] else: # when and endpoint accepts multiple METHODS, ie: post(), get() if append_method: for r in f._rule_cache[f.__name__]: if r[0] == rule and "methods" in r[1] and "methods" in kwargs: r[1]["methods"] = list(set(r[1]["methods"] + kwargs["methods"])) else: f._rule_cache[f.__name__].append((rule, kwargs)) return f @classmethod def _load_extensions(cls): extensions = [ 'pyjade.ext.jinja.PyJadeExtension', 'harambe.extras.jade.JadeTagExtension', 'harambe.extras.md.MarkdownExtension', 'harambe.extras.md.MarkdownTagExtension', ] if cls._app.config.get("COMPRESS_HTML"): extensions.append('harambe.extras.htmlcompress.HTMLCompress') for ext in extensions: cls._app.jinja_env.add_extension(ext) # HarambeApp HarambeApp = Harambe() # ------------------------------------------------------------------------------ def build_endpoint_route_name(cls, method_name, class_name=None): """ Build the route endpoint It is recommended to place your views in /views directory, so it can build the endpoint from it. If not, it will make the endpoint from the module name The main reason for having the views directory, it is explicitely easy to see the path of the view :param cls: The view class :param method_name: The name of the method :param class_name: To pass the class name. :return: string """ module = cls.__module__.split("views.")[1] if ".views." in cls.__module__ \ else cls.__module__.split(".")[-1] return "%s.%s:%s" % (module, class_name or cls.__name__, method_name) def get_interesting_members(base_class, cls): """Returns a generator of methods that can be routed to""" base_members = dir(base_class) predicate = inspect.ismethod if _py2 else inspect.isfunction all_members = inspect.getmembers(cls, predicate=predicate) return (member for member in all_members if not member[0] in base_members and ((hasattr(member[1], "__self__") and not member[1].__self__ in inspect.getmro(cls)) if _py2 else True) and not member[0].startswith("_") and not member[0].startswith("before_") and not member[0].startswith("after_")) def apply_function_to_members(cls, fn): for name, method in get_interesting_members(Harambe, cls): setattr(cls, name, fn(method)) def get_true_argspec(method): """Drills through layers of decorators attempting to locate the actual argspec for the method.""" argspec = inspect.getargspec(method) args = argspec[0] if args and args[0] == 'self': return argspec if hasattr(method, '__func__'): method = method.__func__ if not hasattr(method, '__closure__') or method.__closure__ is None: raise DecoratorCompatibilityError closure = method.__closure__ for cell in closure: inner_method = cell.cell_contents if inner_method is method: continue if not inspect.isfunction(inner_method) \ and not inspect.ismethod(inner_method): continue true_argspec = get_true_argspec(inner_method) if true_argspec: return true_argspec class DecoratorCompatibilityError(Exception): pass class RegexConverter(BaseConverter): def __init__(self, url_map, *items): super(RegexConverter, self).__init__(url_map) self.regex = items[0]
PypiClean
/Flask-User-AWS-1.0.1.7.tar.gz/Flask-User-AWS-1.0.1.7/flask_user/forms.py
import string from flask import current_app from flask_login import current_user # Flask-WTF v0.13 renamed Flask to FlaskForm try: from flask_wtf import FlaskForm # Try Flask-WTF v0.13+ except ImportError: from flask_wtf import Form as FlaskForm # Fallback to Flask-WTF v0.12 or older from wtforms import BooleanField, HiddenField, PasswordField, SubmitField, StringField from wtforms import validators, ValidationError from .translation_utils import lazy_gettext as _ # map _() to lazy_gettext() # **************** # ** Validators ** # **************** def password_validator(form, field): current_app.user_manager.password_validator(form, field) def username_validator(form, field): current_app.user_manager.username_validator(form, field) def unique_username_validator(form, field): """ Ensure that Username is unique. This validator may NOT be customized.""" user_manager = current_app.user_manager if not user_manager.db_manager.username_is_available(field.data): raise ValidationError(_('This Username is already in use. Please try another one.')) def unique_email_validator(form, field): """ Username must be unique. This validator may NOT be customized.""" user_manager = current_app.user_manager if not user_manager.email_is_available(field.data): raise ValidationError(_('This Email is already in use. Please try another one.')) # *********** # ** Forms ** # *********** class AddEmailForm(FlaskForm): """Add an email address form.""" email = StringField(_('Email'), validators=[ validators.DataRequired(_('Email is required')), validators.Email(_('Invalid Email')), unique_email_validator]) submit = SubmitField(_('Add Email')) class ChangePasswordForm(FlaskForm): """Change password form.""" old_password = PasswordField(_('Old Password'), validators=[ validators.DataRequired(_('Old Password is required')), ]) new_password = PasswordField(_('New Password'), validators=[ validators.DataRequired(_('New Password is required')), password_validator, ]) retype_password = PasswordField(_('Retype New Password'), validators=[ validators.EqualTo('new_password', message=_('New Password and Retype Password did not match')) ]) submit = SubmitField(_('Change password')) def validate(self): # Use feature config to remove unused form fields user_manager = current_app.user_manager if not user_manager.USER_REQUIRE_RETYPE_PASSWORD: delattr(self, 'retype_password') # # Add custom password validator if needed # has_been_added = False # for v in self.new_password.validators: # if v==user_manager.password_validator: # has_been_added = True # if not has_been_added: # self.new_password.validators.append(user_manager.password_validator) # Validate field-validators if not super(ChangePasswordForm, self).validate(): return False # Verify current_user and current_password if not current_user or not user_manager.verify_password(self.old_password.data, current_user.password): self.old_password.errors.append(_('Old Password is incorrect')) return False # All is well return True class ChangeUsernameForm(FlaskForm): """Change username form.""" new_username = StringField(_('New Username'), validators=[ validators.DataRequired(_('Username is required')), username_validator, unique_username_validator, ]) old_password = PasswordField(_('Old Password'), validators=[ validators.DataRequired(_('Old Password is required')), ]) submit = SubmitField(_('Change username')) def validate(self): user_manager = current_app.user_manager # # Add custom username validator if needed # has_been_added = False # for v in self.new_username.validators: # if v==user_manager.username_validator: # has_been_added = True # if not has_been_added: # self.new_username.validators.append(user_manager.username_validator) # Validate field-validators if not super(ChangeUsernameForm, self).validate(): return False # Verify current_user and current_password if not current_user or not user_manager.verify_password(self.old_password.data, current_user.password): self.old_password.errors.append(_('Old Password is incorrect')) return False # All is well return True class EditUserProfileForm(FlaskForm): """Edit user profile form.""" first_name = StringField(_('First name'), validators=[validators.DataRequired()]) last_name = StringField(_('Last name'), validators=[validators.DataRequired()]) submit = SubmitField(_('Update')) class LoginForm(FlaskForm): """Login form.""" next = HiddenField() # for login.html reg_next = HiddenField() # for login_or_register.html username = StringField(_('Username'), validators=[ validators.DataRequired(_('Username is required')), ]) email = StringField(_('Email'), validators=[ validators.DataRequired(_('Email is required')), validators.Email(_('Invalid Email')) ]) password = PasswordField(_('Password'), validators=[ validators.DataRequired(_('Password is required')), ]) remember_me = BooleanField(_('Remember me')) submit = SubmitField(_('Sign in')) def __init__(self, *args, **kwargs): super(LoginForm, self).__init__(*args, **kwargs) user_manager = current_app.user_manager if user_manager.USER_ENABLE_USERNAME and user_manager.USER_ENABLE_EMAIL: # Renamed 'Username' label to 'Username or Email' self.username.label.text = _('Username or Email') def validate(self): # Remove fields depending on configuration user_manager = current_app.user_manager if user_manager.USER_ENABLE_USERNAME: delattr(self, 'email') else: delattr(self, 'username') # Validate field-validators if not super(LoginForm, self).validate(): return False # Find user by username and/or email user = None user_email = None if user_manager.USER_ENABLE_USERNAME: # Find user by username user = user_manager.db_manager.find_user_by_username(self.username.data) # Find user by email address (username field) if not user and user_manager.USER_ENABLE_EMAIL: user, user_email = user_manager.db_manager.get_user_and_user_email_by_email(self.username.data) else: # Find user by email address (email field) user, user_email = user_manager.db_manager.get_user_and_user_email_by_email(self.email.data) # Handle successful authentication if user and user_manager.verify_password(self.password.data, user.password): return True # Successful authentication # Handle unsuccessful authentication # Email, Username or Email/Username depending on settings if user_manager.USER_ENABLE_USERNAME and user_manager.USER_ENABLE_EMAIL: username_or_email_field = self.username username_or_email_text = (_('Username/Email')) show_does_not_exist = user_manager.USER_SHOW_EMAIL_DOES_NOT_EXIST or user_manager.USER_SHOW_USERNAME_DOES_NOT_EXIST elif user_manager.USER_ENABLE_USERNAME: username_or_email_field = self.username username_or_email_text = (_('Username')) show_does_not_exist = user_manager.USER_SHOW_USERNAME_DOES_NOT_EXIST else: username_or_email_field = self.email username_or_email_text = (_('Email')) show_does_not_exist = user_manager.USER_SHOW_EMAIL_DOES_NOT_EXIST # Show 'username/email does not exist' or 'incorrect password' error message if show_does_not_exist: if not user: message = _('%(username_or_email)s does not exist', username_or_email=username_or_email_text) username_or_email_field.errors.append(message) else: self.password.errors.append(_('Incorrect Password')) # Always show 'incorrect username/email or password' error message for additional security else: message = _('Incorrect %(username_or_email)s and/or Password', username_or_email=username_or_email_text) username_or_email_field.errors.append(message) self.password.errors.append(message) return False # Unsuccessful authentication class RegisterForm(FlaskForm): """Register new user form.""" password_validator_added = False next = HiddenField() # for login_or_register.html reg_next = HiddenField() # for register.html username = StringField(_('Username'), validators=[ validators.DataRequired(_('Username is required')), username_validator, unique_username_validator]) email = StringField(_('Email'), validators=[ validators.DataRequired(_('Email is required')), validators.Email(_('Invalid Email')), unique_email_validator]) password = PasswordField(_('Password'), validators=[ validators.DataRequired(_('Password is required')), password_validator]) retype_password = PasswordField(_('Retype Password'), validators=[ validators.EqualTo('password', message=_('Password and Retype Password did not match'))]) invite_token = HiddenField(_('Token')) submit = SubmitField(_('Register')) def validate(self): # remove certain form fields depending on user manager config user_manager = current_app.user_manager if not user_manager.USER_ENABLE_USERNAME: delattr(self, 'username') if not user_manager.USER_ENABLE_EMAIL: delattr(self, 'email') if not user_manager.USER_REQUIRE_RETYPE_PASSWORD: delattr(self, 'retype_password') # # Add custom username validator if needed # if user_manager.USER_ENABLE_USERNAME: # has_been_added = False # for v in self.username.validators: # if v==user_manager.username_validator: # has_been_added = True # if not has_been_added: # self.username.validators.append(user_manager.username_validator) # # Add custom password validator if needed # has_been_added = False # for v in self.password.validators: # if v==user_manager.password_validator: # has_been_added = True # if not has_been_added: # self.password.validators.append(user_manager.password_validator) # Validate field-validators if not super(RegisterForm, self).validate(): return False # All is well return True class ForgotPasswordForm(FlaskForm): """Forgot password form.""" email = StringField(_('Your email address'), validators=[ validators.DataRequired(_('Email address is required')), validators.Email(_('Invalid Email address')), ]) submit = SubmitField(_('Send reset password email')) def validate_email(form, field): user_manager = current_app.user_manager if user_manager.USER_SHOW_EMAIL_DOES_NOT_EXIST: user, user_email = user_manager.db_manager.get_user_and_user_email_by_email(field.data) if not user: raise ValidationError(_('%(username_or_email)s does not exist', username_or_email=_('Email'))) class ResendEmailConfirmationForm(FlaskForm): """Resend email confirmation form.""" email = StringField(_('Your email address'), validators=[ validators.DataRequired(_('Email address is required')), validators.Email(_('Invalid Email address')), ]) submit = SubmitField(_('Resend email confirmation email')) class ResetPasswordForm(FlaskForm): """Reset password form.""" new_password = PasswordField(_('New Password'), validators=[ validators.DataRequired(_('New Password is required')), password_validator, ]) retype_password = PasswordField(_('Retype New Password'), validators=[ validators.EqualTo('new_password', message=_('New Password and Retype Password did not match'))]) next = HiddenField() submit = SubmitField(_('Change password')) def validate(self): # Use feature config to remove unused form fields user_manager = current_app.user_manager if not user_manager.USER_REQUIRE_RETYPE_PASSWORD: delattr(self, 'retype_password') # # Add custom password validator if needed # has_been_added = False # for v in self.new_password.validators: # if v==user_manager.password_validator: # has_been_added = True # if not has_been_added: # self.new_password.validators.append(user_manager.password_validator) # Validate field-validators if not super(ResetPasswordForm, self).validate(): return False # All is well return True class InviteUserForm(FlaskForm): """Invite new user form.""" email = StringField(_('Email'), validators=[ validators.DataRequired(_('Email is required')), validators.Email(_('Invalid Email'))]) next = HiddenField() submit = SubmitField(_('Invite!')) # Manually Add translation strings from QuickStart apps that use string templates _sign_in = _('Sign in') _sign_out = _('Sign out') _home_page = _('Home Page') _profile_page = _('User profile') _member_page = _('Member Page') _admin_page = _('Admin Page')
PypiClean
/ApiLogicServer-9.2.18-py3-none-any.whl/api_logic_server_cli/create_from_model/safrs-react-admin-npm-build/static/js/4877.7cebca37.chunk.js
"use strict";(self.webpackChunkreact_admin_upgrade=self.webpackChunkreact_admin_upgrade||[]).push([[4877],{54877:function(e,t,n){n.r(t),n.d(t,{conf:function(){return s},language:function(){return i}});var s={wordPattern:/(-?\d*\.\d\w*)|([^\`\~\!\#\%\^\&\*\(\)\-\=\+\[\{\]\}\\\|\;\:\'\"\,\.\<\>\/\?\s]+)/g,comments:{lineComment:"//",blockComment:["/*","*/"]},brackets:[["{","}"],["[","]"],["(",")"]],autoClosingPairs:[{open:"{",close:"}"},{open:"[",close:"]"},{open:"(",close:")"},{open:'"',close:'"'},{open:"'",close:"'"}],surroundingPairs:[{open:"{",close:"}"},{open:"[",close:"]"},{open:"(",close:")"},{open:'"',close:'"'},{open:"'",close:"'"},{open:"<",close:">"}],folding:{markers:{start:new RegExp("^\\s*//\\s*(?:(?:#?region\\b)|(?:<editor-fold\\b))"),end:new RegExp("^\\s*//\\s*(?:(?:#?endregion\\b)|(?:</editor-fold>))")}}},o=[];["abstract","activate","and","any","array","as","asc","assert","autonomous","begin","bigdecimal","blob","boolean","break","bulk","by","case","cast","catch","char","class","collect","commit","const","continue","convertcurrency","decimal","default","delete","desc","do","double","else","end","enum","exception","exit","export","extends","false","final","finally","float","for","from","future","get","global","goto","group","having","hint","if","implements","import","in","inner","insert","instanceof","int","interface","into","join","last_90_days","last_month","last_n_days","last_week","like","limit","list","long","loop","map","merge","native","new","next_90_days","next_month","next_n_days","next_week","not","null","nulls","number","object","of","on","or","outer","override","package","parallel","pragma","private","protected","public","retrieve","return","returning","rollback","savepoint","search","select","set","short","sort","stat","static","strictfp","super","switch","synchronized","system","testmethod","then","this","this_month","this_week","throw","throws","today","tolabel","tomorrow","transaction","transient","trigger","true","try","type","undelete","update","upsert","using","virtual","void","volatile","webservice","when","where","while","yesterday"].forEach((function(e){o.push(e),o.push(e.toUpperCase()),o.push(function(e){return e.charAt(0).toUpperCase()+e.substr(1)}(e))}));var i={defaultToken:"",tokenPostfix:".apex",keywords:o,operators:["=",">","<","!","~","?",":","==","<=",">=","!=","&&","||","++","--","+","-","*","/","&","|","^","%","<<",">>",">>>","+=","-=","*=","/=","&=","|=","^=","%=","<<=",">>=",">>>="],symbols:/[=><!~?:&|+\-*\/\^%]+/,escapes:/\\(?:[abfnrtv\\"']|x[0-9A-Fa-f]{1,4}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})/,digits:/\d+(_+\d+)*/,octaldigits:/[0-7]+(_+[0-7]+)*/,binarydigits:/[0-1]+(_+[0-1]+)*/,hexdigits:/[[0-9a-fA-F]+(_+[0-9a-fA-F]+)*/,tokenizer:{root:[[/[a-z_$][\w$]*/,{cases:{"@keywords":{token:"keyword.$0"},"@default":"identifier"}}],[/[A-Z][\w\$]*/,{cases:{"@keywords":{token:"keyword.$0"},"@default":"type.identifier"}}],{include:"@whitespace"},[/[{}()\[\]]/,"@brackets"],[/[<>](?!@symbols)/,"@brackets"],[/@symbols/,{cases:{"@operators":"delimiter","@default":""}}],[/@\s*[a-zA-Z_\$][\w\$]*/,"annotation"],[/(@digits)[eE]([\-+]?(@digits))?[fFdD]?/,"number.float"],[/(@digits)\.(@digits)([eE][\-+]?(@digits))?[fFdD]?/,"number.float"],[/(@digits)[fFdD]/,"number.float"],[/(@digits)[lL]?/,"number"],[/[;,.]/,"delimiter"],[/"([^"\\]|\\.)*$/,"string.invalid"],[/'([^'\\]|\\.)*$/,"string.invalid"],[/"/,"string",'@string."'],[/'/,"string","@string.'"],[/'[^\\']'/,"string"],[/(')(@escapes)(')/,["string","string.escape","string"]],[/'/,"string.invalid"]],whitespace:[[/[ \t\r\n]+/,""],[/\/\*\*(?!\/)/,"comment.doc","@apexdoc"],[/\/\*/,"comment","@comment"],[/\/\/.*$/,"comment"]],comment:[[/[^\/*]+/,"comment"],[/\*\//,"comment","@pop"],[/[\/*]/,"comment"]],apexdoc:[[/[^\/*]+/,"comment.doc"],[/\*\//,"comment.doc","@pop"],[/[\/*]/,"comment.doc"]],string:[[/[^\\"']+/,"string"],[/@escapes/,"string.escape"],[/\\./,"string.escape.invalid"],[/["']/,{cases:{"$#==$S2":{token:"string",next:"@pop"},"@default":"string"}}]]}}}}]); //# sourceMappingURL=4877.7cebca37.chunk.js.map
PypiClean
/node_managment_application-0.0.1.tar.gz/node_managment_application-0.0.1/nms_app/api_services/project_app_apis.py
import os import subprocess from rest_framework import status from nms_app.models import SetupProjectDetails from nms_app.nms_services.api_key_valdation import get_node_id from nms_app.nms_services.common_services import run_command, kill_processes_by_cmdline from nms_app.nms_services.get_json_result import get_json_result_func def start_project_app_func(request): try: # Get the 'node_id' from the query parameters of the request node_id = request.query_params.get('node_id') # Assuming 'get_node_id()' retrieves the expected node_id expected_node_id = get_node_id() if not node_id: return {'status': 'fail', 'message': "Node ID is missing."}, status.HTTP_400_BAD_REQUEST if node_id != expected_node_id: return {'status': 'fail', 'message': "Invalid Node ID."}, status.HTTP_401_UNAUTHORIZED spd_node_category = SetupProjectDetails.objects.values_list('spd_node_category', flat=True) if "MASTER" in spd_node_category: try: path = "/home/ubuntu/nms_project/node_mgmt_system/nms_app/nms_services/start_l2_project.sh" #run_command(f"sudo dos2unix {path}") os.system(f'echo "dos2unix {path}" > /app/docker_pipes/project_app_pipe') subprocess.check_call(['/usr/bin/bash', path]) msg = "Project application started successfully." return {'status': 'success', 'message': msg}, status.HTTP_200_OK except subprocess.CalledProcessError as e: msg = str(e) return {'status': 'fail', 'message': msg}, status.HTTP_500_INTERNAL_SERVER_ERROR if "GUARDIAN" in spd_node_category: try: path = "/home/ubuntu/nms_project/node_mgmt_system/nms_app/nms_services/start_l1_project.sh" #run_command(f"sudo dos2unix {path}") os.system(f'echo "dos2unix {path}" > /app/docker_pipes/project_app_pipe') subprocess.check_call(['/usr/bin/bash', path]) msg = "Project application started successfully." return {'status': 'success', 'message': msg}, status.HTTP_200_OK except subprocess.CalledProcessError as e: msg = str(e) return {'status': 'fail', 'message': msg}, status.HTTP_500_INTERNAL_SERVER_ERROR # Return a JSON response with status 404 if the 'spd_node_category' is not recognized return {'status': 'fail', 'message': "Unknown Node Category."}, status.HTTP_404_NOT_FOUND except Exception as e: # Return a JSON response with status 500 if an unexpected error occurs return {'status': 'error', 'message': str(e)}, status.HTTP_500_INTERNAL_SERVER_ERROR def stop_project_app_func(request): try: # Get the 'node_id' from the query parameters of the request node_id = request.query_params.get('node_id') # Assuming 'get_node_id()' retrieves the expected node_id expected_node_id = get_node_id() if not node_id: return {'status': 'fail', 'message': "Node ID is missing."}, status.HTTP_400_BAD_REQUEST if node_id != expected_node_id: return {'status': 'fail', 'message': "Invalid Node ID."}, status.HTTP_401_UNAUTHORIZED spd_node_category = SetupProjectDetails.objects.values_list('spd_node_category', flat=True) if "MASTER" in spd_node_category: # os.chdir(r'/home/ubuntu/L2_App/') # run_command('sudo docker-compose down') os.system('echo "cd /home/ubuntu/L2_App" > /app/docker_pipes/project_app_pipe') os.system('echo "docker-compose down" > /app/docker_pipes/project_app_pipe') elif "GUARDIAN" in spd_node_category: kill_processes_by_cmdline('tessellation-core-assembly-1.9.1.jar') msg = f"{spd_node_category[0]} project stopped successfully." return {'status': 'success', 'message': msg}, status.HTTP_200_OK except Exception as e: # Return a JSON response with status 500 if an unexpected error occurs return {'status': 'error', 'message': str(e)}, status.HTTP_500_INTERNAL_SERVER_ERROR def restart_project_app_func(request): try: # Get the 'node_id' from the query parameters of the request node_id = request.query_params.get('node_id') # Assuming 'get_node_id()' retrieves the expected node_id expected_node_id = get_node_id() if not node_id: return {'status': 'fail', 'message': "Node ID is missing."}, status.HTTP_400_BAD_REQUEST if node_id != expected_node_id: return {'status': 'fail', 'message': "Invalid Node ID."}, status.HTTP_401_UNAUTHORIZED # Your code to restart the project application goes here # Example: call_restart_project_app_function() msg = "Project application restarted successfully." return {'status': 'success', 'message': msg}, status.HTTP_200_OK except Exception as e: # Return a JSON response with status 500 if an unexpected error occurs return {'status': 'error', 'message': str(e)}, status.HTTP_500_INTERNAL_SERVER_ERROR def app_health_status_func(request): # Get the 'node_id' from the query parameters of the request node_id = request.query_params.get('node_id') # Assuming 'get_node_id()' retrieves the expected node_id expected_node_id = get_node_id() if node_id == expected_node_id: # Get the health status data from the 'get_json_result_func' data = get_json_result_func('IPHealthStatus') ihs_response = [i.get('ihs_response') for i in data['IPHealthStatus']] # Check if all health statuses are 'Not Found' is_server_down = all([True if i == 'Not Found' else False for i in ihs_response]) # Compose the response message based on server status if is_server_down: msg = "Server is down." else: msg = "Server is up." # Return a JSON response with status 200 return {'status': 'success', 'message': msg}, status.HTTP_200_OK else: # Return a JSON response with status 401 if the node_id is invalid return {'status': 'fail', 'message': "Invalid Node Id."}, status.HTTP_401_UNAUTHORIZED
PypiClean
/MAD-0.2.2.zip/MAD-0.2.2/mad/simulation/tasks.py
# # This file is part of MAD. # # MAD is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # MAD is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with MAD. If not, see <http://www.gnu.org/licenses/>. # from mad.evaluation import Symbols from mad.simulation.commons import SimulatedEntity class TaskPool: def put(self, task): raise NotImplementedError("TaskPool::put is abstract") def take(self): raise NotImplementedError("TaskPool::take is abstract") @property def size(self): raise NotImplementedError("TaskPool::size is abstract") @property def blocked_count(self): raise NotImplementedError("TaskPool::size is abstract") @property def are_pending(self): raise NotImplementedError("TaskPool::are_pending is abstract") def activate(self, task): raise NotImplementedError("TaskPool::activate is abstract") def pause(self, task): raise NotImplementedError("TaskPool::pause is abstract") def intercept(self, task): raise NotImplementedError("TaskPool::intercept is abstract") class TaskPoolDecorator(TaskPool): def __init__(self, delegate): assert isinstance(delegate, TaskPool), "Delegate should be a TaskPool (found '{!s}')".format(type(delegate)) self.delegate = delegate def put(self, task): self.delegate.put(task) def take(self): return self.delegate.take() @TaskPool.size.getter def size(self): return self.delegate.size @TaskPool.blocked_count.getter def blocked_count(self): return self.delegate.blocked_count @TaskPool.are_pending.getter def are_pending(self): return self.delegate.are_pending def activate(self, task): self.delegate.activate(task) def pause(self, task): self.delegate.pause(task) def intercept(self, task): self.delegate.intercept(task) class TaskPoolWrapper(TaskPoolDecorator, SimulatedEntity): """ Wrap a task pool into a simulation entity that that properly log events """ def __init__(self, environment, delegate): SimulatedEntity.__init__(self, Symbols.QUEUE, environment) TaskPoolDecorator.__init__(self, delegate) def put(self, task): super().put(task) self.listener.storage_of(task.request) def take(self): task = super().take() self.listener.selection_of(task.request) return task def activate(self, task): super().activate(task) self.listener.resuming(task.request) class AbstractTaskPool(TaskPool): def __init__(self): super().__init__() self.tasks = [] self.interrupted = [] self.paused = [] def pause(self, task): self.paused.append(task) def intercept(self, task): self.paused.remove(task) def put(self, task): task.request.accept() self.tasks.append(task) def take(self): if len(self.interrupted) > 0: return self._pick_from(self.interrupted) else: if len(self.tasks) > 0: return self._pick_from(self.tasks) raise ValueError("Unable to take from an empty task pool!") def _pick_from(self, candidates): high_priority = self._highest_priority(candidates) return self._next( high_priority) @staticmethod def _highest_priority(candidates): highest = max(candidates, key=lambda task: task.priority) return [any_task for any_task in candidates if any_task.priority == highest.priority] def _next(self, candidates): raise NotImplementedError("TaskPool::_next is abstract!") def _remove(self, task): if task in self.tasks: self.tasks.remove(task) if task in self.interrupted: self.interrupted.remove(task) @property def is_empty(self): return self.size == 0 @TaskPool.are_pending.getter def are_pending(self): return not self.is_empty @TaskPool.size.getter def size(self): return len(self.tasks) + len(self.interrupted) @TaskPool.blocked_count.getter def blocked_count(self): return len(self.paused) def activate(self, task): assert task in self.paused, "Error: Req. {:d} should have been paused!".format(task.request.identifier) self.paused.remove(task) self.interrupted.append(task) class FIFOTaskPool(AbstractTaskPool): def __init__(self): super().__init__() def _next(self, candidates): selected = candidates[0] self._remove(selected) return selected class LIFOTaskPool(AbstractTaskPool): def __init__(self): super().__init__() def _next(self, candidates): selected = candidates[-1] self._remove(selected) return selected class Task: def __init__(self, request=None): self.request = request self.is_started = False self.resume = lambda: None def reject(self): self.request.reply_error() @property def priority(self): return self.request.priority def mark_as_started(self): self.is_started = True
PypiClean
/Flask-Statics-Helper-1.0.0.tar.gz/Flask-Statics-Helper-1.0.0/flask_statics/static/BootstrapValidator/js/language/ar_MA.js
(function ($) { /** * Arabic language package * Translated by @Arkni */ $.fn.bootstrapValidator.i18n = $.extend(true, $.fn.bootstrapValidator.i18n, { base64: { 'default': 'الرجاء إدخال قيمة مشفرة طبقا للقاعدة 64.' }, between: { 'default': 'الرجاء إدخال قيمة بين %s و %s .', notInclusive: 'الرجاء إدخال قيمة بين %s و %s بدقة.' }, callback: { 'default': 'الرجاء إدخال قيمة صالحة.' }, choice: { 'default': 'الرجاء إدخال قيمة صالحة.', less: 'الرجاء اختيار %s خيارات كحد أدنى.', more: 'الرجاء اختيار %s خيارات كحد أقصى.', between: 'الرجاء إختيار %s-%s خيارات.' }, color: { 'default': 'الرجاء إدخال رمز لون صالح.' }, creditCard: { 'default': 'الرجاء إدخال رقم بطاقة إئتمان صحيح.' }, cusip: { 'default': 'الرجاء إدخال رقم CUSIP صالح.' }, cvv: { 'default': 'الرجاء إدخال رقم CVV صالح.' }, date: { 'default': 'الرجاء إدخال تاريخ صالح.', min: 'الرجاء إدخال تاريخ بعد %s.', max: 'الرجاء إدخال تاريخ قبل %s.', range: 'الرجاء إدخال تاريخ في المجال %s - %s.' }, different: { 'default': 'الرجاء إدخال قيمة مختلفة.' }, digits: { 'default': 'الرجاء إدخال الأرقام فقط.' }, ean: { 'default': 'الرجاء إدخال رقم EAN صالح.' }, emailAddress: { 'default': 'الرجاء إدخال بريد إلكتروني صحيح.' }, file: { 'default': 'الرجاء إختيار ملف صالح.' }, greaterThan: { 'default': 'الرجاء إدخال قيمة أكبر من أو تساوي %s.', notInclusive: 'الرجاء إدخال قيمة أكبر من %s.' }, grid: { 'default': 'الرجاء إدخال رقم GRid صالح.' }, hex: { 'default': 'الرجاء إدخال رقم ست عشري صالح.' }, hexColor: { 'default': 'الرجاء إدخال رمز لون صالح.' }, iban: { 'default': 'الرجاء إدخال رقم IBAN صالح.', countryNotSupported: 'البلد ذو الرمز %s غير معتمد.', country: 'الرجاء إدخال رقم IBAN صالح في %s.', countries: { AD: 'أندورا', AE: 'الإمارات العربية المتحدة', AL: 'ألبانيا', AO: 'أنغولا', AT: 'النمسا', AZ: 'أذربيجان', BA: 'البوسنة والهرسك', BE: 'بلجيكا', BF: 'بوركينا فاسو', BG: 'بلغاريا', BH: 'البحرين', BI: 'بوروندي', BJ: 'بنين', BR: 'البرازيل', CH: 'سويسرا', CI: 'ساحل العاج', CM: 'الكاميرون', CR: 'كوستاريكا', CV: 'الرأس الأخضر', CY: 'قبرص', CZ: 'التشيك', DE: 'ألمانيا', DK: 'الدنمارك', DO: 'جمهورية الدومينيكان', DZ: 'الجزائر', EE: 'إستونيا', ES: 'إسبانيا', FI: 'فنلندا', FO: 'جزر فارو', FR: 'فرنسا', GB: 'المملكة المتحدة', GE: 'جورجيا', GI: 'جبل طارق', GL: 'جرينلاند', GR: 'اليونان', GT: 'غواتيمالا', HR: 'كرواتيا', HU: 'المجر', IE: 'أيرلندا', IL: 'إسرائيل', IR: 'إيران', IS: 'آيسلندا', IT: 'إيطاليا', JO: 'الأردن', KW: 'الكويت', KZ: 'كازاخستان', LB: 'لبنان', LI: 'ليختنشتاين', LT: 'ليتوانيا', LU: 'لوكسمبورغ', LV: 'لاتفيا', MC: 'موناكو', MD: 'مولدوفا', ME: 'الجبل الأسود', MG: 'مدغشقر', MK: 'جمهورية مقدونيا', ML: 'مالي', MR: 'موريتانيا', MT: 'مالطا', MU: 'موريشيوس', MZ: 'موزمبيق', NL: 'هولندا', NO: 'النرويج', PK: 'باكستان', PL: 'بولندا', PS: 'فلسطين', PT: 'البرتغال', QA: 'قطر', RO: 'رومانيا', RS: 'صربيا', SA: 'المملكة العربية السعودية', SE: 'السويد', SI: 'سلوفينيا', SK: 'سلوفاكيا', SM: 'سان مارينو', SN: 'السنغال', TN: 'تونس', TR: 'تركيا', VG: 'جزر العذراء البريطانية' } }, id: { 'default': 'الرجاء إدخال رقم هوية صالحة.', countryNotSupported: 'البلد ذو الرمز %s غير معتمد.', country: 'الرجاء إدخال رقم تعريف صالح في %s.', countries: { BA: 'البوسنة والهرسك', BG: 'بلغاريا', BR: 'البرازيل', CH: 'سويسرا', CL: 'تشيلي', CN: 'الصين', CZ: 'التشيك', DK: 'الدنمارك', EE: 'إستونيا', ES: 'إسبانيا', FI: 'فنلندا', HR: 'كرواتيا', IE: 'أيرلندا', IS: 'آيسلندا', LT: 'ليتوانيا', LV: 'لاتفيا', ME: 'الجبل الأسود', MK: 'جمهورية مقدونيا', NL: 'هولندا', RO: 'رومانيا', RS: 'صربيا', SE: 'السويد', SI: 'سلوفينيا', SK: 'سلوفاكيا', SM: 'سان مارينو', TH: 'تايلاند', ZA: 'جنوب أفريقيا' } }, identical: { 'default': 'الرجاء إدخال نفس القيمة.' }, imei: { 'default': 'الرجاء إدخال رقم IMEI صالح.' }, imo: { 'default': 'الرجاء إدخال رقم IMO صالح.' }, integer: { 'default': 'الرجاء إدخال رقم صحيح.' }, ip: { 'default': 'الرجاء إدخال عنوان IP صالح.', ipv4: 'الرجاء إدخال عنوان IPv4 صالح.', ipv6: 'الرجاء إدخال عنوان IPv6 صالح.' }, isbn: { 'default': 'الرجاء إدخال رقم ISBN صالح.' }, isin: { 'default': 'الرجاء إدخال رقم ISIN صالح.' }, ismn: { 'default': 'الرجاء إدخال رقم ISMN صالح.' }, issn: { 'default': 'الرجاء إدخال رقم ISSN صالح.' }, lessThan: { 'default': 'الرجاء إدخال قيمة أصغر من أو تساوي %s.', notInclusive: 'الرجاء إدخال قيمة أصغر من %s.' }, mac: { 'default': 'يرجى إدخال عنوان MAC صالح.' }, meid: { 'default': 'الرجاء إدخال رقم MEID صالح.' }, notEmpty: { 'default': 'الرجاء إدخال قيمة.' }, numeric: { 'default': 'الرجاء إدخال عدد عشري صالح.' }, phone: { 'default': 'الرجاء إدخال رقم هاتف صحيح.', countryNotSupported: 'البلد ذو الرمز %s غير معتمد.', country: 'الرجاء إدخال رقم هاتف صالح في %s.', countries: { BR: 'البرازيل', CN: 'الصين', CZ: 'التشيك', DE: 'ألمانيا', DK: 'الدنمارك', ES: 'إسبانيا', FR: 'فرنسا', GB: 'المملكة المتحدة', MA: 'المغرب', PK: 'باكستان', RO: 'رومانيا', RU: 'روسيا', SK: 'سلوفاكيا', TH: 'تايلاند', US: 'الولايات المتحدة', VE: 'فنزويلا' } }, regexp: { 'default': 'الرجاء إدخال قيمة مطابقة للنمط.' }, remote: { 'default': 'الرجاء إدخال قيمة صالحة.' }, rtn: { 'default': 'الرجاء إدخال رقم RTN صالح.' }, sedol: { 'default': 'الرجاء إدخال رقم SEDOL صالح.' }, siren: { 'default': 'الرجاء إدخال رقم SIREN صالح.' }, siret: { 'default': 'الرجاء إدخال رقم SIRET صالح.' }, step: { 'default': 'الرجاء إدخال قيمة من مضاعفات %s .' }, stringCase: { 'default': 'الرجاء إدخال أحرف صغيرة فقط.', upper: 'الرجاء إدخال أحرف كبيرة فقط.' }, stringLength: { 'default': 'الرجاء إدخال قيمة ذات طول صحيح.', less: 'الرجاء إدخال أقل من %s حرفا.', more: 'الرجاء إدخال أكتر من %s حرفا.', between: 'الرجاء إدخال قيمة ذات عدد حروف بين %s و %s حرفا.' }, uri: { 'default': 'الرجاء إدخال URI صالح.' }, uuid: { 'default': 'الرجاء إدخال رقم UUID صالح.', version: 'الرجاء إدخال رقم UUID صالح إصدار %s.' }, vat: { 'default': 'الرجاء إدخال رقم VAT صالح.', countryNotSupported: 'البلد ذو الرمز %s غير معتمد.', country: 'الرجاء إدخال رقم VAT صالح في %s.', countries: { AT: 'النمسا', BE: 'بلجيكا', BG: 'بلغاريا', BR: 'البرازيل', CH: 'سويسرا', CY: 'قبرص', CZ: 'التشيك', DE: 'جورجيا', DK: 'الدنمارك', EE: 'إستونيا', ES: 'إسبانيا', FI: 'فنلندا', FR: 'فرنسا', GB: 'المملكة المتحدة', GR: 'اليونان', EL: 'اليونان', HR: 'كرواتيا', HU: 'المجر', IE: 'أيرلندا', IS: 'آيسلندا', IT: 'إيطاليا', LT: 'ليتوانيا', LU: 'لوكسمبورغ', LV: 'لاتفيا', MT: 'مالطا', NL: 'هولندا', NO: 'النرويج', PL: 'بولندا', PT: 'البرتغال', RO: 'رومانيا', RU: 'روسيا', RS: 'صربيا', SE: 'السويد', SI: 'سلوفينيا', SK: 'سلوفاكيا', VE: 'فنزويلا', ZA: 'جنوب أفريقيا' } }, vin: { 'default': 'الرجاء إدخال رقم VIN صالح.' }, zipCode: { 'default': 'الرجاء إدخال رمز بريدي صالح.', countryNotSupported: 'البلد ذو الرمز %s غير معتمد.', country: 'الرجاء إدخال رمز بريدي صالح في %s.', countries: { AT: 'النمسا', BR: 'البرازيل', CA: 'كندا', CH: 'سويسرا', CZ: 'التشيك', DE: 'ألمانيا', DK: 'الدنمارك', FR: 'فرنسا', GB: 'المملكة المتحدة', IE: 'أيرلندا', IT: 'إيطاليا', MA: 'المغرب', NL: 'هولندا', PT: 'البرتغال', RO: 'رومانيا', RU: 'روسيا', SE: 'السويد', SG: 'سنغافورة', SK: 'سلوفاكيا', US: 'الولايات المتحدة' } } }); }(window.jQuery));
PypiClean
/Hermes_lnestelroad-1.0.2-py3-none-any.whl/Hermes/Timer.py
import time from typing import Dict, List # NOTE: These objects are examples of handeling events without including zeromq sockets as opposed to how the Message class does things. # https://stackoverflow.com/questions/52722864/python-periodic-timer-interrupt # https://github.com/sankalpjonn/timeloop/tree/d3e58dbe3b362d4f08077f570a8cda870875de65 from threading import Timer class ProgramKilled(Exception): pass def signal_handler(signum, frame): raise ProgramKilled # https://stackoverflow.com/a/38317060 class PeriodicEvent(): def __init__(self, interval, function, *args, **kwargs): self._timer = None self.interval = interval self.function = function self.args = args self.kwargs = kwargs self.is_running = False self.start() def _run(self): self.is_running = False self.start() self.function(*self.args, **self.kwargs) def start(self): if not self.is_running: self._timer = Timer(self.interval, self._run) self._timer.start() self.is_running = True def stop(self): self._timer.cancel() self.is_running = False class Peer(): """ Struct to hold peer liveliness information and update values """ def __init__(self, liveliness=1000, retries=3): self.liveliness: int = liveliness self.retries, self.reset_retries = retries self.last_recv = time.time() self.expect_time = False def is_time(self) -> bool: """ Checks to see if it is time to expect a heartbeat from peer Returns ------- bool """ if self.liveliness + self.last_recv >= time.time(): self.expect_time = True return self.expect_time def received(self): """ Updates last_recv and resets retires and expect_time attributes upon receiving a message """ self.last_recv = time.time() self.retries = self.reset_retries self.expect_time = False def update_liveliness(self, new_val): self.liveliness = new_val def update_retries(self, new_val): self.reset_retries, self.retries = new_val class Heartbeater(): """ A timer to keep track on when to send and when to expect heartbeats for nodes. Attributes ---------- tabs : Dict[str, Peer] Holds information on all of the peers in which the node is interested in keeping tabs on and send heartbeats to. tardy : List[Peer] Contains the names of all the peers which have missed their heartbeat interval. last_sent : int The time stamp the of current nodes last send heartbeat liveliness : int The interval length for which to send heartbeats send_time : bool A flag to signify whether its time to send a new heartbeat to peers """ tabs: Dict[str, Peer] = {} tardy: List[Peer] = [] last_sent = 0 def __init__(self, liveliness=1000): # Sends a little earlier to give some buffer room for unexpected stalling self.liveliness = liveliness - 10 self.send_time = False def add_peer(self, peer_name, peer_liveliness=1000, peer_retries=3): """ Adds a new peer to keep track of """ self.tabs[peer_name](Peer(peer_liveliness, peer_retries)) def remove_peer(self, peer_name: str): """ Removes a presumed dead peer """ del self.tabs[peer_name] def reset_peers(self, peer_names: List[str]): """ Updates a peers timer values Parameters ---------- peer_names : List[str] A list of peer names from which messages have been received. """ for peer_name in peer_names: self.tabs[peer_name].received() def is_time(self) -> bool: """ Check the current epoch to determine if it is time to send and expect heartbeats. Returns ------- List[bool, List[Peer]] """ if self.last_sent + self.liveliness >= time.time(): self.send_time = True for peer_name, peer_obj in self.tabs.items(): if peer_obj.is_time(): self.tardy.append(peer_name) self.send_time = True return self.send_time def reset(self): """ Updates the send time stamp and resents the send flag. """ self.last_sent = time.time() self.send_time = False
PypiClean
/NESTML-5.3.0-py3-none-any.whl/pynestml/meta_model/ast_external_variable.py
from typing import Optional from pynestml.meta_model.ast_variable import ASTVariable class ASTExternalVariable(ASTVariable): r""" This class is used to store a single "external" variable: a variable the value of which is obtained during runtime from a neuron's postsynaptic partner. """ _altscope = None _altname = None def __init__(self, name, altname=None, altscope=None, *args, **kwargs): r""" Standard constructor. """ super(ASTExternalVariable, self).__init__(name, *args, **kwargs) self._altname = altname self._altscope = altscope def clone(self): r""" Return a clone ("deep copy") of this node. """ return ASTExternalVariable(altname=self._altname, altscape=self._altscope, # ASTVariable attributes: name=self.name, differential_order=self.differential_order, type_symbol=self.type_symbol, vector_parameter=self.vector_parameter, # ASTNode common attributes: source_position=self.get_source_position(), scope=self.scope, comment=self.comment, pre_comments=[s for s in self.pre_comments], in_comment=self.in_comment, implicit_conversion_factor=self.implicit_conversion_factor) def update_alt_scope(self, scope): self._altscope = scope def set_alternate_name(self, alternate_name: Optional[str]): self._altname = alternate_name def get_alternate_name(self): return self._altname def get_scope(self): if self._altscope: return self._altscope.get_scope() return self.scope
PypiClean
/GraKeL-0.1.9-cp310-cp310-macosx_10_9_universal2.whl/grakel/kernels/random_walk.py
# Author: Ioannis Siglidis <[email protected]> # License: BSD 3 clause import warnings import numpy as np from itertools import product from numpy import ComplexWarning from numpy.linalg import inv from numpy.linalg import eig from numpy.linalg import multi_dot from scipy.linalg import expm from scipy.sparse.linalg import cg from scipy.sparse.linalg import LinearOperator from grakel.kernels import Kernel from grakel.graph import Graph # Python 2/3 cross-compatibility import from builtins import range from six.moves.collections_abc import Iterable class RandomWalk(Kernel): """The random walk kernel class. See :cite:`kashima2003marginalized`, :cite:`gartner2003graph` and :cite:`vishwanathan2006fast`. Parameters ---------- lambda : float A lambda factor concerning summation. method_type : str, valid_values={"baseline", "fast"} The method to use for calculating random walk kernel: + "baseline" *Complexity*: :math:`O(|V|^6)` (see :cite:`kashima2003marginalized`, :cite:`gartner2003graph`) + "fast" *Complexity*: :math:`O((|E|+|V|)|V||M|)` (see :cite:`vishwanathan2006fast`) kernel_type : str, valid_values={"geometric", "exponential"} Defines how inner summation will be applied. p : int or None If initialised defines the number of steps. Attributes ---------- mu_ : list List of coefficients concerning a finite sum, in case p is not None. """ _graph_format = "adjacency" def __init__(self, n_jobs=None, normalize=False, verbose=False, lamda=0.1, method_type="fast", kernel_type="geometric", p=None): """Initialise a random_walk kernel.""" # setup valid parameters and initialise from parent super(RandomWalk, self).__init__( n_jobs=n_jobs, normalize=normalize, verbose=verbose) # Ignores ComplexWarning as it does not signify anything problematic warnings.filterwarnings('ignore', category=ComplexWarning) # Setup method type and define operation. self.method_type = method_type self.kernel_type = kernel_type self.p = p self.lamda = lamda self._initialized.update({"method_type": False, "kernel_type": False, "p": False, "lamda": False}) def initialize(self): """Initialize all transformer arguments, needing initialization.""" super(RandomWalk, self).initialize() if not self._initialized["method_type"]: # Setup method type and define operation. if (self.method_type == "baseline" or (self.method_type == "fast" and self.p is None and self.kernel_type == "geometric")): self.add_input_ = idem elif self.method_type == "fast": # Spectral Decomposition if adjacency matrix is symmetric self.add_input_ = sd else: raise ValueError('unsupported method_type') self._initialized["method_type"] = True if not self._initialized["kernel_type"]: if self.kernel_type not in ["geometric", "exponential"]: raise ValueError('unsupported kernel type: either "geometric" ' 'or "exponential"') if not self._initialized["p"]: if self.p is not None: if type(self.p) is int and self.p > 0: if self.kernel_type == "geometric": self.mu_ = [1] fact = 1 power = 1 for k in range(1, self.p + 1): fact *= k power *= self.lamda self.mu_.append(fact/power) else: self.mu_ = [1] power = 1 for k in range(1, self.p + 1): power *= self.lamda self.mu_.append(power) else: raise TypeError('p must be a positive integer bigger than ' 'zero or nonetype') self._initialized["kernel_type"] = True if not self._initialized["lamda"]: if self.lamda <= 0: raise TypeError('lambda must be positive bigger than equal') elif self.lamda > 0.5 and self.p is None: warnings.warn('random-walk series may fail to converge') self._initialized["lamda"] = True def parse_input(self, X): """Parse and create features for random_walk kernel. Parameters ---------- X : iterable For the input to pass the test, we must have: Each element must be an iterable with at most three features and at least one. The first that is obligatory is a valid graph structure (adjacency matrix or edge_dictionary) while the second is node_labels and the third edge_labels (that correspond to the given graph format). A valid input also consists of graph type objects. Returns ------- out : list The extracted adjacency matrices for any given input. """ if not isinstance(X, Iterable): raise TypeError('input must be an iterable\n') else: i = 0 out = list() for (idx, x) in enumerate(iter(X)): is_iter = isinstance(x, Iterable) if is_iter: x = list(x) if is_iter and len(x) in [0, 1, 2, 3]: if len(x) == 0: warnings.warn('Ignoring empty element' + ' on index: '+str(idx)) continue else: A = Graph(x[0], {}, {}, self._graph_format).get_adjacency_matrix() elif type(x) is Graph: A = x.get_adjacency_matrix() else: raise TypeError('each element of X must be either a ' + 'graph or an iterable with at least 1 ' + 'and at most 3 elements\n') i += 1 out.append(self.add_input_(A)) if i == 0: raise ValueError('parsed input is empty') return out def pairwise_operation(self, X, Y): """Calculate the random walk kernel. Fast: Spectral demoposition algorithm as presented in :cite:`vishwanathan2006fast` p.13, s.4.4, with complexity of :math:`O((|E|+|V|)|E||V|^2)` for graphs witout labels. Baseline: Algorithm presented in :cite:`kashima2003marginalized`, :cite:`gartner2003graph` with complexity of :math:`O(|V|^6)` Parameters ---------- X, Y : Objects Objects as produced from parse_input. Returns ------- kernel : number The kernel value. """ if self.method_type == "baseline": # calculate the product graph XY = np.kron(X, Y) # algorithm presented in # [Kashima et al., 2003; Gartner et al., 2003] # complexity of O(|V|^6) # XY is a square matrix s = XY.shape[0] if self.p is not None: P = np.eye(XY.shape[0]) S = self.mu_[0] * P for k in self.mu_[1:]: P = np.matmul(P, XY) S += k*P else: if self.kernel_type == "geometric": S = inv(np.identity(s) - self.lamda*XY).T elif self.kernel_type == "exponential": S = expm(self.lamda*XY).T return np.sum(S) elif self.method_type == "fast" and (self.p is not None or self.kernel_type == "exponential"): # Spectral demoposition algorithm as presented in # [Vishwanathan et al., 2006] p.13, s.4.4, with # complexity of O((|E|+|V|)|E||V|^2) for graphs # witout labels # calculate kernel qi_Pi, wi = X qj_Pj, wj = Y # calculate flanking factor ff = np.expand_dims(np.kron(qi_Pi, qj_Pj), axis=0) # calculate D based on the method Dij = np.kron(wi, wj) if self.p is not None: D = np.ones(shape=(Dij.shape[0],)) S = self.mu_[0] * D for k in self.mu_[1:]: D *= Dij S += k*D S = np.diagflat(S) else: # Exponential S = np.diagflat(np.exp(self.lamda*Dij)) return ff.dot(S).dot(ff.T) else: # Random Walk # Conjugate Gradient Method as presented in # [Vishwanathan et al., 2006] p.12, s.4.2 Ax, Ay = X, Y xs, ys = Ax.shape[0], Ay.shape[0] mn = xs*ys def lsf(x, lamda): xm = x.reshape((xs, ys), order='F') y = np.reshape(multi_dot((Ax, xm, Ay)), (mn,), order='F') return x - self.lamda * y # A*x=b A = LinearOperator((mn, mn), matvec=lambda x: lsf(x, self.lamda)) b = np.ones(mn) x_sol, _ = cg(A, b, tol=1.0e-6, maxiter=20, atol='legacy') return np.sum(x_sol) class RandomWalkLabeled(RandomWalk): """The labeled random walk kernel class. See :cite:`kashima2003marginalized`, :cite:`gartner2003graph` and :cite:`vishwanathan2006fast`. Parameters ---------- lambda : float A lambda factor concerning summation. method_type : str, valid_values={"baseline", "fast"} The method to use for calculating random walk kernel [geometric]: + "baseline" *Complexity*: :math:`O(|V|^6)` (see :cite:`kashima2003marginalized`, :cite:`gartner2003graph`) + "fast" *Complexity*: :math:`O(|E|^{2}rd|V|^{3})` (see :cite:`vishwanathan2006fast`) kernel_type : str, valid_values={"geometric", "exponential"} Defines how inner summation will be applied. p : int, optional If initialised defines the number of steps. Attributes ---------- _lamda : float, default=0.1 A lambda factor concerning summation. _kernel_type : str, valid_values={"geometric", "exponential"}, default="geometric" Defines how inner summation will be applied. _method_type : str valid_values={"baseline", "fast"}, default="fast" The method to use for calculating random walk kernel: + "baseline" *Complexity*: :math:`O(|V|^6)` (see :cite:`kashima2003marginalized`, :cite:`gartner2003graph`) + "fast" *Complexity*: :math:`O((|E|+|V|)|V||M|)` (see :cite:`vishwanathan2006fast`) _p : int, default=1 If not -1, the number of steps of the random walk kernel. """ _graph_format = "adjacency" def __init__(self, n_jobs=None, normalize=False, verbose=False, lamda=0.1, method_type="fast", kernel_type="geometric", p=None): """Initialise a labeled random_walk kernel.""" # Initialise from parent super(RandomWalkLabeled, self).__init__( n_jobs=n_jobs, normalize=normalize, verbose=verbose, lamda=lamda, method_type=method_type, kernel_type=kernel_type, p=p) def parse_input(self, X): """Parse and create features for graphlet_sampling kernel. Parameters ---------- X : iterable For the input to pass the test, we must have: Each element must be an iterable with at most three features and at least one. The first that is obligatory is a valid graph structure (adjacency matrix or edge_dictionary) while the second is node_labels and the third edge_labels (that correspond to the given graph format). A valid input also consists of graph type objects. Returns ------- out : list The extracted adjacency matrices for any given input. """ if not isinstance(X, Iterable): raise TypeError('input must be an iterable\n') else: i = 0 proc = list() for (idx, x) in enumerate(iter(X)): is_iter = isinstance(x, Iterable) if is_iter: x = list(x) if is_iter and len(x) in [1, 2, 3]: if len(x) == 0: warnings.warn('Ignoring empty element' + ' on index: '+str(idx)) continue else: x = Graph(x[0], x[1], {}, self._graph_format) elif type(x) is not Graph: raise TypeError('each element of X must be either a ' + 'graph or an iterable with at least 2 ' + 'and at most 3 elements\n') i += 1 x.desired_format("adjacency") Ax = x.get_adjacency_matrix() Lx = x.get_labels(purpose="adjacency") Lx = [Lx[idx] for idx in range(Ax.shape[0])] proc.append((Ax, Lx, Ax.shape[0])) out = list() for Ax, Lx, s in proc: amss = dict() labels = set(Lx) Lx = np.array(Lx) for t in product(labels, labels): selector = np.matmul(np.expand_dims(Lx == t[0], axis=1), np.expand_dims(Lx == t[1], axis=0)) amss[t] = Ax * selector out.append((amss, s)) if i == 0: raise ValueError('parsed input is empty') return out def pairwise_operation(self, X, Y): """Calculate the labeled random walk kernel. Fast [geometric]: Conjugate Gradient method as presented in :cite:`vishwanathan2006fast` p.12, s.4.2, with complexity of :math:`O(|E|^{2}rd|V|^{3})` for labeled graphs. Baseline: Algorithm presented in :cite:`kashima2003marginalized`, :cite:`gartner2003graph` with complexity of :math:`O(|V|^6)` Parameters ---------- X, Y : tuples Tuples of adjacency matrices and labels. Returns ------- kernel : number The kernel value. """ X, xs = X Y, ys = Y ck = set(X.keys()) & (set(Y.keys())) mn = xs * ys if self.kernel_type == "exponential" or self.method_type == "baseline" or self.p is not None: # Claculate Kronecker product matrix XY = np.zeros(shape=(mn, mn)) for k in ck: XY += np.kron(X[k], Y[k]) # XY is a square matrix s = XY.shape[0] if self.p is not None: P = np.eye(XY.shape[0]) S = self.mu_[0] * P for k in self.mu_[1:]: P *= XY S += k*P elif self.kernel_type == "exponential": S = expm(self.lamda*XY).T elif self.kernel_type == "geometric": # Baseline Algorithm as presented in # [Vishwanathan et al., 2006] Id = np.identity(s) S = inv(Id - self.lamda*XY).T return np.sum(S) elif self.method_type == "fast" and self.kernel_type == "geometric": # Conjugate Gradient Method as presented in # [Vishwanathan et al., 2006] p.12, s.4.2 AxAy = [(X[k], Y[k]) for k in ck] if len(ck): def lsf(x, lamda): y = 0 xm = x.reshape((xs, ys), order='F') for Ax, Ay in AxAy: y += np.reshape(multi_dot((Ax, xm, Ay)), (mn,), order='F') return x - self.lamda * y else: def lsf(x, lamda): return x - np.zeros(mn) # A*x=b A = LinearOperator((mn, mn), matvec=lambda x: lsf(x, self.lamda)) b = np.ones(mn) x_sol, _ = cg(A, b, tol=1.0e-6, maxiter=20, atol='legacy') return np.sum(x_sol) def idem(x): return x def invert(w, v): return (np.real(np.sum(v, axis=0)), np.real(w)) def sd(x): return invert(*eig(x))
PypiClean
/MarkdownTools2-1.0.1.tar.gz/MarkdownTools2-1.0.1/ez_setup.py
import os import shutil import sys import tempfile import zipfile import optparse import subprocess import platform import textwrap import contextlib from distutils import log try: from site import USER_SITE except ImportError: USER_SITE = None DEFAULT_VERSION = "3.1" DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/" def _python_cmd(*args): """ Return True if the command succeeded. """ args = (sys.executable,) + args return subprocess.call(args) == 0 def _install(archive_filename, install_args=()): with archive_context(archive_filename): # installing log.warn('Installing Setuptools') if not _python_cmd('setup.py', 'install', *install_args): log.warn('Something went wrong during the installation.') log.warn('See the error message above.') # exitcode will be 2 return 2 def _build_egg(egg, archive_filename, to_dir): with archive_context(archive_filename): # building an egg log.warn('Building a Setuptools egg in %s', to_dir) _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) # returning the result log.warn(egg) if not os.path.exists(egg): raise IOError('Could not build the egg.') def get_zip_class(): """ Supplement ZipFile class to support context manager for Python 2.6 """ class ContextualZipFile(zipfile.ZipFile): def __enter__(self): return self def __exit__(self, type, value, traceback): self.close return zipfile.ZipFile if hasattr(zipfile.ZipFile, '__exit__') else \ ContextualZipFile @contextlib.contextmanager def archive_context(filename): # extracting the archive tmpdir = tempfile.mkdtemp() log.warn('Extracting in %s', tmpdir) old_wd = os.getcwd() try: os.chdir(tmpdir) with get_zip_class()(filename) as archive: archive.extractall() # going in the directory subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) os.chdir(subdir) log.warn('Now working in %s', subdir) yield finally: os.chdir(old_wd) shutil.rmtree(tmpdir) def _do_download(version, download_base, to_dir, download_delay): egg = os.path.join(to_dir, 'setuptools-%s-py%d.%d.egg' % (version, sys.version_info[0], sys.version_info[1])) if not os.path.exists(egg): archive = download_setuptools(version, download_base, to_dir, download_delay) _build_egg(egg, archive, to_dir) sys.path.insert(0, egg) # Remove previously-imported pkg_resources if present (see # https://bitbucket.org/pypa/setuptools/pull-request/7/ for details). if 'pkg_resources' in sys.modules: del sys.modules['pkg_resources'] import setuptools setuptools.bootstrap_install_from = egg def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, download_delay=15): to_dir = os.path.abspath(to_dir) rep_modules = 'pkg_resources', 'setuptools' imported = set(sys.modules).intersection(rep_modules) try: import pkg_resources except ImportError: return _do_download(version, download_base, to_dir, download_delay) try: pkg_resources.require("setuptools>=" + version) return except pkg_resources.DistributionNotFound: return _do_download(version, download_base, to_dir, download_delay) except pkg_resources.VersionConflict as VC_err: if imported: msg = textwrap.dedent(""" The required version of setuptools (>={version}) is not available, and can't be installed while this script is running. Please install a more recent version first, using 'easy_install -U setuptools'. (Currently using {VC_err.args[0]!r}) """).format(VC_err=VC_err, version=version) sys.stderr.write(msg) sys.exit(2) # otherwise, reload ok del pkg_resources, sys.modules['pkg_resources'] return _do_download(version, download_base, to_dir, download_delay) def _clean_check(cmd, target): """ Run the command to download target. If the command fails, clean up before re-raising the error. """ try: subprocess.check_call(cmd) except subprocess.CalledProcessError: if os.access(target, os.F_OK): os.unlink(target) raise def download_file_powershell(url, target): """ Download the file at url to target using Powershell (which will validate trust). Raise an exception if the command cannot complete. """ target = os.path.abspath(target) cmd = [ 'powershell', '-Command', "(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)" % vars(), ] _clean_check(cmd, target) def has_powershell(): if platform.system() != 'Windows': return False cmd = ['powershell', '-Command', 'echo test'] devnull = open(os.path.devnull, 'wb') try: try: subprocess.check_call(cmd, stdout=devnull, stderr=devnull) except: return False finally: devnull.close() return True download_file_powershell.viable = has_powershell def download_file_curl(url, target): cmd = ['curl', url, '--silent', '--output', target] _clean_check(cmd, target) def has_curl(): cmd = ['curl', '--version'] devnull = open(os.path.devnull, 'wb') try: try: subprocess.check_call(cmd, stdout=devnull, stderr=devnull) except: return False finally: devnull.close() return True download_file_curl.viable = has_curl def download_file_wget(url, target): cmd = ['wget', url, '--quiet', '--output-document', target] _clean_check(cmd, target) def has_wget(): cmd = ['wget', '--version'] devnull = open(os.path.devnull, 'wb') try: try: subprocess.check_call(cmd, stdout=devnull, stderr=devnull) except: return False finally: devnull.close() return True download_file_wget.viable = has_wget def download_file_insecure(url, target): """ Use Python to download the file, even though it cannot authenticate the connection. """ try: from urllib.request import urlopen except ImportError: from urllib2 import urlopen src = dst = None try: src = urlopen(url) # Read/write all in one block, so we don't create a corrupt file # if the download is interrupted. data = src.read() dst = open(target, "wb") dst.write(data) finally: if src: src.close() if dst: dst.close() download_file_insecure.viable = lambda: True def get_best_downloader(): downloaders = [ download_file_powershell, download_file_curl, download_file_wget, download_file_insecure, ] for dl in downloaders: if dl.viable(): return dl def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, delay=15, downloader_factory=get_best_downloader): """ Download setuptools from a specified location and return its filename `version` should be a valid setuptools version number that is available as an egg for download under the `download_base` URL (which should end with a '/'). `to_dir` is the directory where the egg will be downloaded. `delay` is the number of seconds to pause before an actual download attempt. ``downloader_factory`` should be a function taking no arguments and returning a function for downloading a URL to a target. """ # making sure we use the absolute path to_dir = os.path.abspath(to_dir) zip_name = "setuptools-%s.zip" % version url = download_base + zip_name saveto = os.path.join(to_dir, zip_name) if not os.path.exists(saveto): # Avoid repeated downloads log.warn("Downloading %s", url) downloader = downloader_factory() downloader(url, saveto) return os.path.realpath(saveto) def _build_install_args(options): """ Build the arguments to 'python setup.py install' on the setuptools package """ return ['--user'] if options.user_install else [] def _parse_args(): """ Parse the command line for options """ parser = optparse.OptionParser() parser.add_option( '--user', dest='user_install', action='store_true', default=False, help='install in user site package (requires Python 2.6 or later)') parser.add_option( '--download-base', dest='download_base', metavar="URL", default=DEFAULT_URL, help='alternative URL from where to download the setuptools package') parser.add_option( '--insecure', dest='downloader_factory', action='store_const', const=lambda: download_file_insecure, default=get_best_downloader, help='Use internal, non-validating downloader' ) parser.add_option( '--version', help="Specify which version to download", default=DEFAULT_VERSION, ) options, args = parser.parse_args() # positional arguments are ignored return options def main(): """Install or upgrade setuptools and EasyInstall""" options = _parse_args() archive = download_setuptools( version=options.version, download_base=options.download_base, downloader_factory=options.downloader_factory, ) return _install(archive, _build_install_args(options)) if __name__ == '__main__': sys.exit(main())
PypiClean
/Flask-API-Utils-1.0.2.tar.gz/Flask-API-Utils-1.0.2/README.rst
=============== Flask-API-Utils =============== .. image:: https://travis-ci.org/marselester/flask-api-utils.png :target: https://travis-ci.org/marselester/flask-api-utils Flask-API-Utils helps you to create APIs. It makes responses in appropriate formats, for instance, JSON. All you need to do is to return dictionary from your views. Another useful feature is an authentication. The library supports Hawk_ HTTP authentication scheme and `Flask-Login`_ extension. To sum up, there is an `API example project`_. "Accept" Header based Response ------------------------------ **ResponsiveFlask** tends to make responses based on **Accept** request-header (RFC 2616). If a view function does not return a dictionary, then response will be processed as usual. Here is an example. .. code-block:: python from api_utils import ResponsiveFlask app = ResponsiveFlask(__name__) @app.route('/') def hello_world(): return {'hello': 'world'} def dummy_xml_formatter(*args, **kwargs): return '<hello>world</hello>' xml_mimetype = 'application/vnd.company+xml' app.response_formatters[xml_mimetype] = dummy_xml_formatter if __name__ == '__main__': app.run() It's assumed that file was saved as ``api.py``: .. code-block:: console $ python api.py * Running on http://127.0.0.1:5000/ Here are curl examples with different **Accept** headers: .. code-block:: console $ curl http://127.0.0.1:5000/ -i HTTP/1.0 200 OK Content-Type: application/json Content-Length: 22 Server: Werkzeug/0.9.4 Python/2.7.5 Date: Sat, 07 Dec 2013 14:01:14 GMT { "hello": "world" } $ curl http://127.0.0.1:5000/ -H 'Accept: application/vnd.company+xml' -i HTTP/1.0 200 OK Content-Type: application/vnd.company+xml; charset=utf-8 Content-Length: 20 Server: Werkzeug/0.9.4 Python/2.7.5 Date: Sat, 07 Dec 2013 14:01:50 GMT <hello>world</hello> $ curl http://127.0.0.1:5000/ -H 'Accept: blah/*' -i HTTP/1.0 406 NOT ACCEPTABLE Content-Type: application/json Content-Length: 83 Server: Werkzeug/0.9.4 Python/2.7.5 Date: Sat, 07 Dec 2013 14:02:23 GMT { "mimetypes": [ "application/json", "application/vnd.company+xml" ] } HTTP Error Handling ------------------- You can set HTTP error handler by using **@app.default_errorhandler** decorator. Note that it might override already defined error handlers, so you should declare it before them. .. code-block:: python from flask import request from api_utils import ResponsiveFlask app = ResponsiveFlask(__name__) @app.default_errorhandler def werkzeug_default_exceptions_handler(error): error_info_url = ( 'http://developer.example.com/errors.html#error-code-{}' ).format(error.code) response = { 'code': error.code, 'message': str(error), 'info_url': error_info_url, } return response, error.code @app.errorhandler(404) def page_not_found(error): return {'error': 'This page does not exist'}, 404 class MyException(Exception): pass @app.errorhandler(MyException) def special_exception_handler(error): return {'error': str(error)} @app.route('/my-exc') def hello_my_exception(): raise MyException('Krivens!') @app.route('/yarr') def hello_bad_request(): request.args['bad-key'] if __name__ == '__main__': app.run() Let's try to curl this example. First response shows that we redefined default ``{'code': 400, 'message': '400: Bad Request'}`` error format. Next ones show that you can handle specific errors as usual. .. code-block:: console $ curl http://127.0.0.1:5000/yarr -i HTTP/1.0 400 BAD REQUEST Content-Type: application/json Content-Length: 125 Server: Werkzeug/0.9.4 Python/2.7.5 Date: Sun, 29 Dec 2013 14:26:30 GMT { "code": 400, "info_url": "http://developer.example.com/errors.html#error-code-400", "message": "400: Bad Request" } $ curl http://127.0.0.1:5000/ -i HTTP/1.0 404 NOT FOUND Content-Type: application/json Content-Length: 41 Server: Werkzeug/0.9.4 Python/2.7.5 Date: Sun, 29 Dec 2013 14:28:46 GMT { "error": "This page does not exist" } $ curl http://127.0.0.1:5000/my-exc -i HTTP/1.0 200 OK Content-Type: application/json Content-Length: 25 Server: Werkzeug/0.9.4 Python/2.7.5 Date: Sun, 29 Dec 2013 14:27:33 GMT { "error": "Krivens!" } Authentication -------------- **Hawk** extension provides API authentication for Flask. Hawk_ is an HTTP authentication scheme using a message authentication code (MAC) algorithm to provide partial HTTP request cryptographic verification. The extension is based on Mohawk_, so make sure you have installed it. .. code-block:: console $ pip install mohawk Usage example: .. code-block:: python from flask import Flask from api_utils import Hawk app = Flask(__name__) hawk = Hawk(app) @hawk.client_key_loader def get_client_key(client_id): # In a real project you will likely use some storage. if client_id == 'Alice': return 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn' else: raise LookupError() @app.route('/') @hawk.auth_required def index(): return 'hello world' if __name__ == '__main__': app.run() .. code-block:: console $ curl http://127.0.0.1:5000/ -i HTTP/1.0 401 UNAUTHORIZED ... Cookie based authentication is disabled by default. Set ``HAWK_ALLOW_COOKIE_AUTH = True`` to enable it. Also **Hawk** supports response signing, enable it ``HAWK_SIGN_RESPONSE = True`` if you need it. Following configuration keys are used by Mohawk_ library. .. code-block:: python HAWK_ALGORITHM = 'sha256' HAWK_ACCEPT_UNTRUSTED_CONTENT = False HAWK_LOCALTIME_OFFSET_IN_SECONDS = 0 HAWK_TIMESTAMP_SKEW_IN_SECONDS = 60 Check `Mohawk documentation`_ for more information. It can be convenient to globally turn off authentication when unit testing by setting ``HAWK_ENABLED = False``. Tests ----- Tests are run by: .. code-block:: console $ pip install -r requirements.txt $ tox .. _API example project: https://github.com/marselester/api-example-based-on-flask .. _Hawk: https://github.com/hueniverse/hawk .. _Mohawk: https://github.com/kumar303/mohawk .. _Mohawk documentation: http://mohawk.readthedocs.org .. _Flask-Login: https://flask-login.readthedocs.org
PypiClean
/HTML-as-programming-language-0.1.21.tar.gz/HTML-as-programming-language-0.1.21/htmlc/elements/var_and_const.py
from htmlc.diagnostics import Diagnostic, Severity from htmlc.elements.element import Element from htmlc.utils import hyphenated_to_camel_case class Var(Element): """" HTML: <var b=6/> <var aString="hello"/> <var aChar='b'/> C: int b = 6; String aString = "hello"; char aChar = 'b'; """ def __init__(self): super().__init__() self.type = None self.var_name = None self.attr = None self.is_value_wrapper = True self.prefix = "" def init(self): for key in self.attributes: if key == "type": continue self.var_name = hyphenated_to_camel_case(key) self.attr = self.attributes[key] self.type = self.attributes.get("type", {}).get("val") if self.type is None: # user did not provide type like <var x=y type="int"/> self.type = ( self.attr["type"] or "unknown" ) if self.attr else "unknown" def diagnostics(self): d = [] if not self.var_name: d.append(Diagnostic( Severity.ERROR, self.code_range, "No variable name defined" )) if not self.type or self.type == "unknown": d.append(Diagnostic( Severity.ERROR, self.code_range, "Unknown variable type" "\nPlease provide a type in the type attribute " "like <var x=y type='int'/>" )) return d def to_c(self, mapped_c): val = self.attr["val"] if self.type == "String": val = '"{}"'.format(val) elif self.type == "char": val = "'{}'".format(val) mapped_c.add(f"{self.prefix}{self.type} {self.var_name}", self) if not val: val = self.get_inner_value() if isinstance(val, Element): mapped_c.add(" = ", self) val.to_c(mapped_c) elif val: mapped_c.add(f" = {val}", self) mapped_c.add(";\n", self) class Const(Var): def __init__(self): super().__init__() self.prefix = "static const "
PypiClean
/NitPycker-0.1.tar.gz/NitPycker-0.1/nitpycker/runners.py
import multiprocessing import queue import sys import threading import time import unittest from nitpycker.plugins.manager import Manager from nitpycker.result import InterProcessResult, ResultCollector, ResultAggregator, TestState __author__ = "Benjamin Schubert, [email protected]" class ParallelRunner: """ A parallel test runner for unittest :param plugins_manager: the manager to use for plugin handling :param process_number: the number of process to launch to run the tests :param verbosity: Processes verbosity """ class Process(multiprocessing.Process): """ A simple test runner for a TestSuite. :param test: the unittest.TestSuite to rnu :param results_queue: a queue where to put the results once done :param manager: the plugin manager to be called before and after the run :param task_done_notifier: semaphore to acquire to notify from end of task """ def __init__(self, test: unittest.TestSuite, results_queue: queue.Queue, manager: Manager, task_done_notifier: threading.Semaphore, **kwargs): super().__init__(**kwargs) self.test = test self.results = InterProcessResult(results_queue) self.manager = manager self.task_done = task_done_notifier def run(self) -> None: """ Launches the test and notifies of the result """ self.manager.pre_test_start(self.test) self.test(self.results) self.manager.post_test_end(self.test) self.task_done.release() def __init__(self, plugins_manager: Manager, process_number: int, verbosity: int): self.verbosity = verbosity self.plugins_manager = plugins_manager self.process_number = process_number @staticmethod def print_summary(report: ResultAggregator, time_taken: float) -> None: """ Prints a summary of the tests on the screen :param report: the test report :param time_taken: the time it took to run the whole testSuite """ number_of_tests = sum(len(x) for x in report.results.values()) print("Ran {number_of_tests} test{s} in {time:.2f}s\n".format( number_of_tests=number_of_tests, s="s" if number_of_tests >= 1 else "", time=time_taken), file=sys.stderr ) status = "OK" if report.wasSuccessful() else "FAILED" info = [] for state in TestState: if report.results[state.name] and state.name != TestState.success.name: info.append("{description}={number}".format( description=state.name.replace("_", " "), number=len(report.results[state.name])) ) if len(info) != 0: print(status, "({})".format(", ".join(info)), file=sys.stderr) else: print(status, file=sys.stderr) @staticmethod def module_can_run_parallel(test_module: unittest.TestSuite) -> bool: """ Checks if a given module of tests can be run in parallel or not :param test_module: the module to run :return: True if the module can be run on parallel, False otherwise """ for test_class in test_module: for test_case in test_class: return not getattr(sys.modules[test_case.__module__], "__no_parallel__", False) @staticmethod def class_can_run_parallel(test_class: unittest.TestSuite) -> bool: """ Checks if a given class of tests can be run in parallel or not :param test_class: the class to run :return: True if te class can be run in parallel, False otherwise """ for test_case in test_class: return not getattr(test_case, "__no_parallel__", False) def run(self, test: unittest.TestSuite) -> ResultAggregator: """ Given a TestSuite, will create one process per test case whenever possible and run them concurrently. Will then wait for the result and return them :param test: the TestSuite to run :return: a summary of the test run """ process = [] resource_manager = multiprocessing.Manager() results_queue = resource_manager.Queue() report_queue = resource_manager.Queue() tasks_running = resource_manager.BoundedSemaphore(self.process_number) start_time = time.time() test_suites = [] number_of_tests = 0 for test_module in test: if not self.module_can_run_parallel(test_module): number_of_tests += test_module.countTestCases() test_suites.append(test_module) continue for test_class in test_module: if not self.class_can_run_parallel(test_class): number_of_tests += test_class.countTestCases() test_suites.append(test_class) continue for _test in test_class: number_of_tests += 1 test_suite = unittest.TestSuite() test_suite.addTest(_test) test_suites.append(test_suite) results_collector = ResultCollector( results_queue, report_queue, self.verbosity, daemon=True, number_of_tests=number_of_tests ) results_collector.start() for suite in test_suites: tasks_running.acquire() x = self.Process(suite, results_queue, self.plugins_manager, tasks_running) x.start() process.append(x) for i in process: i.join() stop_time = time.time() results_queue.join() results_collector.end_collection() results_collector.join() report = ResultAggregator(report_queue.get()) time_taken = stop_time - start_time self.plugins_manager.report(report) if self.verbosity: self.print_summary(report, time_taken) return report
PypiClean
/dragonflow-4.0.0.tar.gz/dragonflow-4.0.0/doc/source/specs/services_status.rst
=============== Services Status =============== https://blueprints.launchpad.net/dragonflow/+spec/services-status This spec is introduced to keep the status of all the services available all the time in database. Problem Description =================== This spec solves the following problems * To view services status in cluster, administrator does not have any tool. This spec enables administrators to view status of the services in the cluster from any node having df-db command available. * Other services can take advantage of services status to schedule tasks. * It enables administrator to put a node in maintenance mode or stop a service from participating in processing. * This spec will provide support in future, for making services highly available. Proposed Change =============== Make Dragonflow services report its timestamp to Dragonflow Northbound Database periodically. Add common code, which can be used by all the services to report their status. The implementation will be under the assumption that all the nodes in OpenStack cloud have consistent time. Following services report its own status 1. L3 agent 2. metadata proxy 3. local controller 4. publisher service Local controller status is planned to report its status in [#]_ blueprint .. [#] https://review.openstack.org/#/c/385719/ To avoid writing duplicate code, mentioned blueprint's code will be changed to reuse the code of proposed blueprint. A generic module will be written, which takes name of the service as input and does the tasks of reporting status to DB. Therefore L3 agent, local controller, publisher and metadata proxy service can use the same module. Enable and disable a service makes the service down in NB database only. It does not impact any app. Later it can be extended to support operations in case a service is enabled and disabled. Configuration Changes --------------------- Add a new configuration option, *service_down_time* in df section, which means that the service will be considered as down, if it doesn't report itself for such a long time. The default value of *service_down_time* will be 80s which should be at least more than three of *report_interval* described below. Add a new configuration option, *report_interval*. Services will report timestamp to Dragonflow Northbound Database by using this option as time interval. The default value of *report_interval* is 25 seconds, which should not cause big impact to the performance of Dragonflow Northbound Database. NB Data Model Impact -------------------- A service table will be added in Dragonflow NB database, which contains following information regarding each service. :: +------+-------------------+--------------------------------------------------+ | S.No | field | Description | +======+===================+==================================================+ | 1. | id | ID of the service(UUID) | +------+-------------------+--------------------------------------------------+ | 2. | chassis | hostname | +------+-------------------+--------------------------------------------------+ | 3. | binary | Name of the service | +------+-------------------+--------------------------------------------------+ | 4. | disabled | Represent whether explicitly disabled or not | +------+-------------------+--------------------------------------------------+ | 5. | disabled_reason | Reason given when disabling the service | +------+-------------------+--------------------------------------------------+ | 6. | last_seen_up | Last time stamp reported by the service | +------+-------------------+--------------------------------------------------+ Plan is to store information on following bassis, if possible. :: { "binary1": { "chassis1": { "id": UUID, "chassis": hostname, "binary": service_name, "disabled": True/False, "disabled_reason": reason, "last_seen_up": timestamp, "report_count": count(int)" }, "chassis2": { "id": UUID, "chassis": hostname, "binary": service_name, "disabled": True/False, "disabled_reason": reason, "last_seen_up": timestamp, "report_count": count(int)" }, - - - }, "binary2": { "chassis1": { "id": UUID, "chassis": hostname, "binary": service_name, "disabled": True/False, "disabled_reason": reason, "last_seen_up": timestamp, "report_count": count(int)" }, "chassis2": { "id": UUID, "chassis": hostname, "binary": service_name, "disabled": True/False, "disabled_reason": reason, "last_seen_up": timestamp, "report_count": count(int)" }, - - - }, - - - } The assumption for the above data management is, there can be only one instance of a service on a node that has to be registered. It does not add any overhead during status reporting, services has to report their binary and host. And updation of service status can be done easily in constant time. It provides benefit for queries asking for example "list all the host running publishers." or "list all the publishers in the cluster". These queries seems to be more frequent as load has to be balanced between services etc. Publisher Subscriber Impact --------------------------- Dragonflow controller should silently ignore all the updates on the new table. Dragonflow DB CLI Impact ------------------------ df-db utility will provide following commands to the administrator. :: +------+------------------+----------------------------------------------------+ | S.No | command | Description | +======+==================+====================================================+ | 1. | service list | List all the registered services | +------+------------------+----------------------------------------------------+ | 2. | service show | Show detailed information of the service to fetch | +------+------------------+----------------------------------------------------+ | 3. | service enable | Enable an already disabled service | +------+------------------+----------------------------------------------------+ | 4. | service disable | Disable a service | +------+------------------+----------------------------------------------------+ Based on *service_down_time* configuration and last_seen_up, disabled field from database decide state of the service. Dragonflow Applications Impact ------------------------------ None Installed flows Impact ---------------------- None Implementation ============== Assignee(s) ----------- Primary assignee: `rajiv-kumar <https://launchpad.net/~rajiv-kumar>`_ Work Items ---------- 1. Make Dragonflow controller silently ignore the changes in service table. 2. Add configuration and make Dragonflow services report to Dragonflow Northbound Database periodically. 3. Add commands to Dragonflow DB CLI. References ========== https://review.openstack.org/#/c/385719/8/doc/source/specs/support_check_chassis_alive.rst https://docs.openstack.org/dragonflow/latest/specs/publish_subscribe_abstraction.html https://specs.openstack.org/openstack/fuel-specs/specs/6.1/neutron-agents-local-reports.html
PypiClean
/HTSQL-2.3.3.tar.gz/HTSQL-2.3.3/src/htsql/tweak/override/introspect.py
from ...core.context import context from ...core.adapter import rank from ...core.introspect import Introspect import threading class UnusedPatternCache(object): def __init__(self): self.patterns = [] self.lock = threading.Lock() def add(self, pattern): with self.lock: self.patterns.append(pattern) class OverrideIntrospect(Introspect): rank(2.0) def __call__(self): addon = context.app.tweak.override catalog = super(OverrideIntrospect, self).__call__() unused = set() if addon.included_tables or addon.excluded_tables: include = addon.included_tables exclude = addon.excluded_tables unused.update(include) unused.update(exclude) for schema in catalog.schemas: schema_exclude = [pattern for pattern in exclude if pattern.matches(schema)] if not (include or schema_exclude): continue for table in reversed(list(schema.tables)): include_matches = [pattern for pattern in include if pattern.matches(table)] exclude_matches = [pattern for pattern in schema_exclude if pattern.matches(table)] if exclude_matches or (include and not include_matches): table.remove() unused.difference_update(include_matches) unused.difference_update(exclude_matches) if addon.included_columns or addon.excluded_columns: include = addon.included_columns exclude = addon.excluded_columns unused.update(include) unused.update(exclude) for schema in catalog.schemas: schema_exclude = [pattern for pattern in exclude if pattern.matches(schema)] if not (include or schema_exclude): continue for table in schema.tables: table_exclude = [pattern for pattern in schema_exclude if pattern.matches(table)] if not (include or table_exclude): continue for column in reversed(list(table.columns)): include_matches = [pattern for pattern in include if pattern.matches(column)] exclude_matches = [pattern for pattern in table_exclude if pattern.matches(column)] if exclude_matches or (include and not include_matches): column.remove() unused.difference_update(include_matches) unused.difference_update(exclude_matches) if addon.not_nulls: unused.update(addon.not_nulls) for schema in catalog.schemas: schema_patterns = [pattern for pattern in addon.not_nulls if pattern.matches(schema)] if not schema_patterns: continue for table in schema.tables: table_patterns = [pattern for pattern in schema_patterns if pattern.matches(table)] if not table_patterns: continue for column in reversed(list(table.columns)): matches = [pattern for pattern in table_patterns if pattern.matches(column)] if matches: column.set_is_nullable(False) unused.difference_update(matches) if addon.unique_keys: unused.update(addon.unique_keys) for schema in catalog.schemas: schema_keys = [pattern for pattern in addon.unique_keys if pattern.matches(schema)] if not schema_keys: continue for table in schema.tables: table_keys = [pattern for pattern in schema_keys if pattern.matches(table)] for pattern in table_keys: columns = pattern.extract(table) if columns is None: continue if pattern.is_primary: for column in columns: column.set_is_nullable(False) if table.primary_key is not None: table.primary_key.set_is_primary(False) table.add_unique_key(columns, pattern.is_primary, pattern.is_partial) unused.discard(pattern) if addon.foreign_keys: unused.update(addon.foreign_keys) for schema in catalog.schemas: schema_keys = [pattern for pattern in addon.foreign_keys if pattern.matches(schema)] if not schema_keys: continue for table in schema.tables: table_keys = [pattern for pattern in schema_keys if pattern.matches(table)] for pattern in table_keys: columns = pattern.extract(table) if columns is None: continue targets = [target_table for target_schema in catalog.schemas if pattern.matches_target(target_schema) for target_table in target_schema.tables if pattern.matches_target(target_table) and pattern.extract_target(target_table)] if len(targets) != 1: continue [target_table] = targets target_columns = pattern.extract_target(target_table) table.add_foreign_key(columns, target_table, target_columns, pattern.is_partial) unused.discard(pattern) if addon.unlabeled_tables: unused.update(addon.unlabeled_tables) for schema in catalog.schemas: schema_matches = [pattern for pattern in addon.unlabeled_tables if pattern.matches(schema)] if not schema_matches: continue for table in schema.tables: matches = [pattern for pattern in schema_matches if pattern.matches(table)] unused.difference_update(matches) if addon.unlabeled_columns: unused.update(addon.unlabeled_columns) for schema in catalog.schemas: schema_matches = [pattern for pattern in addon.unlabeled_columns if pattern.matches(schema)] if not schema_matches: continue for table in schema.tables: table_matches = [pattern for pattern in schema_matches if pattern.matches(table)] if not table_matches: continue for column in table.columns: matches = [pattern for pattern in table_matches if pattern.matches(column)] unused.difference_update(matches) for pattern in (addon.included_tables + addon.excluded_tables + addon.included_columns + addon.excluded_columns + addon.not_nulls + addon.unique_keys + addon.foreign_keys + addon.unlabeled_tables + addon.unlabeled_columns): if pattern in unused: addon.unused_pattern_cache.add(str(pattern)) return catalog
PypiClean
/Dick.py-1.7-py3-none-any.whl/amino/client.py
import json import base64 import requests import threading from hashlib import sha1 import names import random import hmac import platform,socket,re,uuid import secmail from bs4 import BeautifulSoup from uuid import UUID from os import urandom from time import timezone, sleep from typing import BinaryIO from binascii import hexlify from time import time as timestamp from locale import getdefaultlocale as locale from .lib.util import exceptions, headers, device, objects, helpers from .socket import Callbacks, SocketHandler from .lib.util import device_gen device = device.DeviceGenerator() class Client(Callbacks, SocketHandler): def __init__(self, deviceId: str = None, proxies: dict = None, certificatePath = None, socket_trace = False, socketDebugging = False): self.api = "https://service.narvii.com/api/v1" self.authenticated = False self.configured = False self.user_agent = device.user_agent if deviceId is not None: self.device_id = deviceId else: self.device_id = device.device_id self.device_id_sig = device.device_id_sig SocketHandler.__init__(self, self, socket_trace=socket_trace, debug=socketDebugging) Callbacks.__init__(self, self) self.proxies = proxies self.certificatePath = certificatePath self.json = None self.sid = None self.userId = None self.secret= None self.account: objects.UserProfile = objects.UserProfile(None) self.profile: objects.UserProfile = objects.UserProfile(None) #print(self.device_id) self.session=requests.Session() self.check_device(self.device_id) self.headerrs=headers.Headers(deviceId=self.device_id).headers def parse_headers(self, data = None,deviceId:str=device_gen()): if data: #print(data) return headers.Headers(data=data, deviceId=deviceId).headers else: return headers.Headers(deviceId=deviceId).headers def join_voice_chat(self, comId: str, chatId: str, joinType: int = 1): """ Joins a Voice Chat **Parameters** - **comId** : ID of the Community - **chatId** : ID of the Chat """ # Made by Light, Ley and Phoenix data = { "o": { "ndcId": int(comId), "threadId": chatId, "joinRole": joinType, "id": "2154531" # Need to change? }, "t": 112 } data = json.dumps(data) self.send(data) def join_video_chat(self, comId: str, chatId: str, joinType: int = 1): """ Joins a Video Chat **Parameters** - **comId** : ID of the Community - **chatId** : ID of the Chat """ # Made by Light, Ley and Phoenix data = { "o": { "ndcId": int(comId), "threadId": chatId, "joinRole": joinType, "channelType": 5, "id": "2154531" # Need to change? }, "t": 108 } data = json.dumps(data) self.send(data) def join_video_chat_as_viewer(self, comId: str, chatId: str): data = { "o": { "ndcId": int(comId), "threadId": chatId, "joinRole": 2, "id": "72446" }, "t": 112 } data = json.dumps(data) self.send(data) def run_vc(self, comId: str, chatId: str, joinType: str): while self.active: data = { "o": { "ndcId": comId, "threadId": chatId, "joinRole": joinType, "id": "2154531" # Need to change? }, "t": 112 } data = json.dumps(data) self.send(data) sleep(1) def start_vc(self, comId: str, chatId: str, joinType: int = 1): data = { "o": { "ndcId": comId, "threadId": chatId, "joinRole": joinType, "id": "2154531" # Need to change? }, "t": 112 } data = json.dumps(data) self.send(data) data = { "o": { "ndcId": comId, "threadId": chatId, "channelType": 1, "id": "2154531" # Need to change? }, "t": 108 } data = json.dumps(data) self.send(data) self.active = True threading.Thread(target=self.run_vc, args=[comId, chatId, joinType]) def reputation(self, comId, chatId: str,joinType: int = 1,thread:bool=True): """ Start The self.send(json.dumps(data))ideo Chat **Parameters** - **comId**: ID of the Community (str) - **chatId**: ID of the Chat (str) - **joinType**: Join type to Start voice as.. (int) """ data = { "o": { "ndcId": comId, "threadId": chatId, "joinRole": joinType, "id": "2249844" }, "t": 112 } sleep(2) data = json.dumps(data) self.send(data) data = { "o": { "ndcId": comId, "threadId": chatId, "channelType": 5, "id": "2250161" }, "t": 108 } sleep(2) data = json.dumps(data) self.send(data) data={"o": {"ndcId":comId, "threadId":chatId, "playlist":{"currentItemIndex":0, "currentItemStatus":2, "items":[ {"author":"null", "duration":999999, "isDone":False, "mediaList": [[100,"http://pm1.narvii.com/8304/b8619e9c47f81fbf57259a57db69fb28e9704d99r1-1920-1080v2_00.jpg","null"]], "title":"dick.mp4", "type":1, "url": f"file://dick.py"}]}, "id":"244360"},"t":120} data = json.dumps(data) self.send(data) self.active = True if thread is True: threading.Thread(target=self.run_vc, args=[comId, chatId, joinType]) def start_video_call(self, comId: str, chatId: str, joinType: int = 1): data = { "o": { "ndcId": comId, "threadId": chatId, "joinRole": joinType, "id": "2154531" }, "t": 112 } data = json.dumps(data) self.send(data) data = { "o": { "ndcId": comId, "threadId": chatId, "channelType": 4, "id": "2154531" }, "t": 108 } data = json.dumps(data) self.send(data) self.active = True threading.Thread(target=self.run_vc, args=[comId, chatId, joinType]) def end_vc(self, comId: str, chatId: str, joinType: int = 2): self.active = False data = { "o": { "ndcId": comId, "threadId": chatId, "joinRole": joinType, "id": "2154531" # Need to change? }, "t": 112 } data = json.dumps(data) self.send(data) def send_action(self, actions: list,comId:str ,chatId: str =None, blogId: str = None, quizId: str = None, lastAction: bool = False): # Action List # Browsing if lastAction is True: t = 306 else: t = 304 data = { "o": { "actions": actions, "target": f"ndc://x{comId}/", "ndcId": int(comId), "params": {"topicIds": [45841, 17254, 26542, 42031, 22542, 16371, 6059, 41542, 15852]}, "id": "273887" }, "t": t } if blogId is not None or quizId is not None: data["o"]["target"] = f"ndc://x{comId}/blog/{blogId}" if blogId is not None: data["o"]["params"]["blogType"] = 0 if quizId is not None: data["o"]["params"]["blogType"] = 6 if chatId is not None: data["o"]["target"]=f"ndc://x{comId}/chat-thread/{chatId}" data["o"]["params"]={"membershipStatus":1,"threadType":2} #print(data) sleep(2) return self.send(json.dumps(data)) def login_sid(self, SID: str): """ Login into an account with an SID **Parameters** - **SID** : SID of the account """ uId = helpers.sid_to_uid(SID) self.authenticated = True self.sid = SID self.userId = uId self.account: objects.UserProfile = self.get_user_info(uId) self.profile: objects.UserProfile = self.get_user_info(uId) headers.sid = self.sid self.start() self.run_socket() def login(self, email: str, password: str,proxy:dict=None): """ Login into an account. **Parameters** - **email** : Email of the account. - **password** : Password of the account. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ data = json.dumps({ "email": email, "v": 2, "secret": f"0 {password}", "deviceID": self.device_id, "clientType": 100, "action": "normal", "timestamp": int(timestamp() * 1000) }) if proxy: proxy=proxy else: proxy =self.proxies response = requests.post(f"{self.api}/g/s/auth/login", headers=self.parse_headers(data=data), data=data, proxies=proxy, verify=self.certificatePath) self.run_socket() if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: self.authenticated = True self.json = json.loads(response.text) self.sid = self.json["sid"] self.userId = self.json["account"]["uid"] self.account: objects.UserProfile = objects.UserProfile(self.json["account"]).UserProfile self.profile: objects.UserProfile = objects.UserProfile(self.json["userProfile"]).UserProfile headers.sid = self.sid self.start() return response.status_code def register(self, nickname: str, email: str, password: str, verificationCode: str, deviceId: str = device.device_id): """ Register an account. **Parameters** - **nickname** : Nickname of the account. - **email** : Email of the account. - **password** : Password of the account. - **verificationCode** : Verification code. - **deviceId** : The device id being registered to. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ data = json.dumps({ "secret": f"0 {password}", "deviceID": deviceId, "email": email, "clientType": 100, "nickname": nickname, "latitude": 0, "longitude": 0, "address": None, "clientCallbackURL": "narviiapp://relogin", "validationContext": { "data": { "code": verificationCode }, "type": 1, "identity": email }, "type": 1, "identity": email, "timestamp": int(timestamp() * 1000) }) headd=self.parse_headers(deviceId=deviceId,data=data) headd["NDCAUTH"]=None response = requests.post(f"{self.api}/g/s/auth/register", data=data, headers=headd) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def restore(self, email: str, password: str): """ Restore a deleted account. **Parameters** - **email** : Email of the account. - **password** : Password of the account. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ data = json.dumps({ "secret": f"0 {password}", "deviceID": device.device_id, "email": email, "timestamp": int(timestamp() * 1000) }) response = requests.post(f"{self.api}/g/s/account/delete-request/cancel", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def logout(self): """ Logout from an account. **Parameters** - No parameters required. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ data = json.dumps({ "deviceID": self.device_id, "clientType": 100, "timestamp": int(timestamp() * 1000) }) response = requests.post(f"{self.api}/g/s/auth/logout", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: self.authenticated = False self.json = None self.sid = None self.userId = None self.account: None self.profile: None headers.sid = None self.close() return response.status_code def configure(self, age: int, gender: str): """ Configure the settings of an account. **Parameters** - **age** : Age of the account. Minimum is 13. - **gender** : Gender of the account. - ``Male``, ``Female`` or ``Non-Binary`` **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ if gender.lower() == "male": gender = 1 elif gender.lower() == "female": gender = 2 elif gender.lower() == "non-binary": gender = 255 else: raise exceptions.SpecifyType() if age <= 12: raise exceptions.AgeTooLow() data = json.dumps({ "age": age, "gender": gender, "timestamp": int(timestamp() * 1000) }) response = requests.post(f"{self.api}/g/s/persona/profile/basic", data=data, headers=self.parse_headers(data=data), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def verify(self, email: str, code: str,dev:str,verifyInfoKey:bool=False,key=None): """ Verify an account. **Parameters** - **email** : Email of the account. - **code** : Verification code. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ data ={ "validationContext": { "type": 1, "identity": email, "data": {"code": code}}, "deviceID": dev, #"timestamp": int(timestamp() * 1000) } if verifyInfoKey is True: data["verifyInfoKey"]=key data=json.dumps(data) response = requests.post(f"{self.api}/g/s/auth/check-security-validation", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) response = requests.post(f"{self.api}/g/s/auth/verify-account", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def change_pass(self,password,new_password): data = json.dumps({"secret":f"0 {password}","deviceID": self.device_id}) response = requests.post(f"{self.api}/g/s/auth/verify-password", headers=self.parse_headers(data=data),data=data) if response.json()['api:statuscode']==0: dat=json.dumps({"secret":f"0 {password}","updateSecret":f"0 {new_password}","validationContext":"","deviceID": self.device_id}) res = requests.post(f"{self.api}/g/s/auth/change-password", headers=self.parse_headers(data=dat),data=dat) return res.json() else: return response.json() def request_verify_code(self, dev: str,email: str, resetPassword: bool = False,verifyInfoKey:bool =False,key=None): """ Request an verification code to the targeted email. **Parameters** - **email** : Email of the account. - **resetPassword** : If the code should be for Password Reset. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ data = { "identity": email, "type": 1, "deviceID": dev } if verifyInfoKey is True: data["verifyInfoKey"]=key if resetPassword is True: data["level"] = 2 data["purpose"] = "reset-password" data = json.dumps(data) headd=self.parse_headers(deviceId=dev,data=data) headd["NDCAUTH"]=None response = requests.post(f"{self.api}/g/s/auth/request-security-validation", headers=headd, data=data, proxies=self.proxies, verify=self.certificatePath) #print(self.parse_headers(deviceId=dev,data=data)) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def activate_account(self, email: str, code: str): """ Activate an account. **Parameters** - **email** : Email of the account. - **code** : Verification code. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ data = json.dumps({ "type": 1, "identity": email, "data": {"code": code}, "deviceID": device.device_id }) response = requests.post(f"{self.api}/g/s/auth/activate-email", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def gen_email(self): mail = secmail.SecMail() email = mail.generate_email() return email def get_message(self,email): url="0" try: sleep(3) f=email mail = secmail.SecMail() inbox = mail.get_messages(f) print('done') for Id in inbox.id: msg = mail.read_message(email=f, id=Id).htmlBody bs = BeautifulSoup(msg, 'html.parser') images = bs.find_all('a')[0] url = (images['href']+'\n') if url is not None: print('Vrification Url\n') print(url) except: pass return url # Provided by "𝑰 𝑵 𝑻 𝑬 𝑹 𝑳 𝑼 𝑫 𝑬#4082" def delete_account(self, password: str): """ Delete an account. **Parameters** - **password** : Password of the account. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ data = json.dumps({ "deviceID": device.device_id, "secret": f"0 {password}" }) response = requests.post(f"{self.api}/g/s/account/delete-request", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def change_password(self, email: str, password: str, code: str): """ Change password of an account. **Parameters** - **email** : Email of the account. - **password** : Password of the account. - **code** : Verification code. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ data = json.dumps({ "updateSecret": f"0 {password}", "emailValidationContext": { "data": { "code": code }, "type": 1, "identity": email, "level": 2, "deviceID": device.device_id }, "phoneNumberValidationContext": None, "deviceID": device.device_id }) response = requests.post(f"{self.api}/g/s/auth/reset-password", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def check_device(self, deviceId: str): """ Check if the Device ID is valid. **Parameters** - **deviceId** : ID of the Device. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ data = json.dumps({ "deviceID": deviceId, "bundleID": "com.narvii.amino.master", "clientType": 100, "timezone": -timezone // 1000, "systemPushEnabled": True, "locale": locale()[0], "timestamp": int(timestamp() * 1000) }) response = requests.post(f"{self.api}/g/s/device", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: self.configured = True; return response.status_code def get_account_info(self): response = requests.get(f"{self.api}/g/s/account", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return objects.UserProfile(json.loads(response.text)["account"]).UserProfile def upload_media(self, file: BinaryIO, fileType: str): """ Upload file to the amino servers. **Parameters** - **file** : File to be uploaded. **Returns** - **Success** : Url of the file uploaded to the server. - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ if fileType == "audio": t = "audio/aac" elif fileType == "image": t = "image/jpg" else: raise exceptions.SpecifyType(fileType) data = file.read() response = requests.post(f"{self.api}/g/s/media/upload", data=data, headers=headers.Headers(type=t, data=data, deviceId=self.device_id).headers, proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return json.loads(response.text)["mediaValue"] def handle_socket_message(self, data): return self.resolve(data) def get_eventlog(self): response = requests.get(f"{self.api}/g/s/eventlog/profile?language=en", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return json.loads(response.text) def sub_clients(self, start: int = 0, size: int = 25): """ List of Communities the account is in. **Parameters** - *start* : Where to start the list. - *size* : Size of the list. **Returns** - **Success** : :meth:`Community List <amino.lib.util.objects.CommunityList>` - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ if not self.authenticated: raise exceptions.NotLoggedIn() response = requests.get(f"{self.api}/g/s/community/joined?v=1&start={start}&size={size}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return objects.CommunityList(json.loads(response.text)["communityList"]).CommunityList def sub_clients_profile(self, start: int = 0, size: int = 25): if not self.authenticated: raise exceptions.NotLoggedIn() response = requests.get(f"{self.api}/g/s/community/joined?v=1&start={start}&size={size}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return json.loads(response.text)["userInfoInCommunities"] def get_user_info(self, userId: str): """ Information of an User. **Parameters** - **userId** : ID of the User. **Returns** - **Success** : :meth:`User Object <amino.lib.util.objects.UserProfile>` - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.get(f"{self.api}/g/s/user-profile/{userId}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return objects.UserProfile(json.loads(response.text)["userProfile"]).UserProfile def get_chat_threads(self, start: int = 0, size: int = 25): """ List of Chats the account is in. **Parameters** - *start* : Where to start the list. - *size* : Size of the list. **Returns** - **Success** : :meth:`Chat List <amino.lib.util.objects.ThreadList>` - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.get(f"{self.api}/g/s/chat/thread?type=joined-me&start={start}&size={size}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return objects.ThreadList(json.loads(response.text)["threadList"]).ThreadList def get_chat_thread(self, chatId: str): """ Get the Chat Object from an Chat ID. **Parameters** - **chatId** : ID of the Chat. **Returns** - **Success** : :meth:`Chat Object <amino.lib.util.objects.Thread>` - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.get(f"{self.api}/g/s/chat/thread/{chatId}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return objects.Thread(json.loads(response.text)["thread"]).Thread def get_chat_users(self, chatId: str, start: int = 0, size: int = 25): response = requests.get(f"{self.api}/g/s/chat/thread/{chatId}/member?start={start}&size={size}&type=default&cv=1.2", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return objects.UserProfileList(json.loads(response.text)["memberList"]).UserProfileList def join_chat(self, chatId: str): """ Join an Chat. **Parameters** - **chatId** : ID of the Chat. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.post(f"{self.api}/g/s/chat/thread/{chatId}/member/{self.userId}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def random_community(self,search:str,start: int = 0, size: int = 25): response=requests.get(f"{self.api}/g/s/community/search?q={search}&language=en&completeKeyword=1&start={start}&size={size}",headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code == 200: com=[] r=response.json()["communityList"] for listt in r: com.append(listt["ndcId"]) return com else: return response.json() def leave_chat(self, chatId: str): """ Leave an Chat. **Parameters** - **chatId** : ID of the Chat. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.delete(f"{self.api}/g/s/chat/thread/{chatId}/member/{self.userId}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def start_chat(self, userId: [str, list], message: str, title: str = None, content: str = None, isGlobal: bool = False, publishToGlobal: bool = False): """ Start an Chat with an User or List of Users. **Parameters** - **userId** : ID of the User or List of User IDs. - **message** : Starting Message. - **title** : Title of Group Chat. - **content** : Content of Group Chat. - **isGlobal** : If Group Chat is Global. - **publishToGlobal** : If Group Chat should show in Global. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ if isinstance(userId, str): userIds = [userId] elif isinstance(userId, list): userIds = userId else: raise exceptions.WrongType() data = { "title": title, "inviteeUids": userIds, "initialMessageContent": message, "content": content, "timestamp": int(timestamp() * 1000) } if isGlobal is True: data["type"] = 2; data["eventSource"] = "GlobalComposeMenu" else: data["type"] = 0 if publishToGlobal is True: data["publishToGlobal"] = 1 else: data["publishToGlobal"] = 0 data = json.dumps(data) response = requests.post(f"{self.api}/g/s/chat/thread", data=data, headers=self.parse_headers(data=data), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def invite_to_chat(self, userId: [str, list], chatId: str): """ Invite a User or List of Users to a Chat. **Parameters** - **userId** : ID of the User or List of User IDs. - **chatId** : ID of the Chat. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ if isinstance(userId, str): userIds = [userId] elif isinstance(userId, list): userIds = userId else: raise exceptions.WrongType data = json.dumps({ "uids": userIds, "timestamp": int(timestamp() * 1000) }) response = requests.post(f"{self.api}/g/s/chat/thread/{chatId}/member/invite", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def kick(self, userId: str, chatId: str, allowRejoin: bool = True): if allowRejoin: allowRejoin = 1 if not allowRejoin: allowRejoin = 0 response = requests.delete(f"{self.api}/g/s/chat/thread/{chatId}/member/{userId}?allowRejoin={allowRejoin}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def get_chat_messages(self, chatId: str, size: int = 25, pageToken: str = None): """ List of Messages from an Chat. **Parameters** - **chatId** : ID of the Chat. - *size* : Size of the list. - *size* : Size of the list. - *pageToken* : Next Page Token. **Returns** - **Success** : :meth:`Message List <amino.lib.util.objects.MessageList>` - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ if pageToken is not None: url = f"{self.api}/g/s/chat/thread/{chatId}/message?v=2&pagingType=t&pageToken={pageToken}&size={size}" else: url = f"{self.api}/g/s/chat/thread/{chatId}/message?v=2&pagingType=t&size={size}" response = requests.get(url, headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return objects.GetMessages(json.loads(response.text)).GetMessages def get_message_info(self, chatId: str, messageId: str): """ Information of an Message from an Chat. **Parameters** - **chatId** : ID of the Chat. - **messageId** : ID of the Message. **Returns** - **Success** : :meth:`Message Object <amino.lib.util.objects.Message>` - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.get(f"{self.api}/g/s/chat/thread/{chatId}/message/{messageId}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return objects.Message(json.loads(response.text)["message"]).Message def get_community_info(self, comId: str): """ Information of an Community. **Parameters** - **comId** : ID of the Community. **Returns** - **Success** : :meth:`Community Object <amino.lib.util.objects.Community>` - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.get(f"{self.api}/g/s-x{comId}/community/info?withInfluencerList=1&withTopicList=true&influencerListOrderStrategy=fansCount", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return response.json() else: return objects.Community(json.loads(response.text)["community"]).Community def check_community(self, comId: str): """ Information of an Community. **Parameters** - **comId** : ID of the Community. **Returns** - **Success** : :meth:`Community Object <amino.lib.util.objects.Community>` - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.get(f"{self.api}/g/s-x{comId}/community/info?withInfluencerList=1&withTopicList=true&influencerListOrderStrategy=fansCount", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code == 200: return response.json()["api:statuscode"] else: return response.json()["api:statuscode"] def search_community(self, aminoId: str): """ Search a Community byt its Amino ID. **Parameters** - **aminoId** : Amino ID of the Community. **Returns** - **Success** : :meth:`Community List <amino.lib.util.objects.CommunityList>` - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.get(f"{self.api}/g/s/search/amino-id-and-link?q={aminoId}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: response = json.loads(response.text)["resultList"] if len(response) == 0: raise exceptions.CommunityNotFound(aminoId) else: return objects.CommunityList([com["refObject"] for com in response]).CommunityList def get_user_following(self, userId: str, start: int = 0, size: int = 25): """ List of Users that the User is Following. **Parameters** - **userId** : ID of the User. - *start* : Where to start the list. - *size* : Size of the list. **Returns** - **Success** : :meth:`User List <amino.lib.util.objects.UserProfileList>` - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.get(f"{self.api}/g/s/user-profile/{userId}/joined?start={start}&size={size}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return objects.UserProfileList(json.loads(response.text)["userProfileList"]).UserProfileList def get_user_followers(self, userId: str, start: int = 0, size: int = 25): """ List of Users that are Following the User. **Parameters** - **userId** : ID of the User. - *start* : Where to start the list. - *size* : Size of the list. **Returns** - **Success** : :meth:`User List <amino.lib.util.objects.UserProfileList>` - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.get(f"{self.api}/g/s/user-profile/{userId}/member?start={start}&size={size}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return objects.UserProfileList(json.loads(response.text)["userProfileList"]).UserProfileList def get_user_visitors(self, userId: str, start: int = 0, size: int = 25): """ List of Users that Visited the User. **Parameters** - **userId** : ID of the User. - *start* : Where to start the list. - *size* : Size of the list. **Returns** - **Success** : :meth:`Visitors List <amino.lib.util.objects.VisitorsList>` - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.get(f"{self.api}/g/s/user-profile/{userId}/visitors?start={start}&size={size}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return objects.VisitorsList(json.loads(response.text)).VisitorsList def get_blocked_users(self, start: int = 0, size: int = 25): """ List of Users that the User Blocked. **Parameters** - *start* : Where to start the list. - *size* : Size of the list. **Returns** - **Success** : :meth:`Users List <amino.lib.util.objects.UserProfileList>` - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.get(f"{self.api}/g/s/block?start={start}&size={size}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return objects.UserProfileList(json.loads(response.text)["userProfileList"]).UserProfileList def get_blog_info(self, blogId: str = None, wikiId: str = None, quizId: str = None, fileId: str = None): if blogId or quizId: if quizId is not None: blogId = quizId response = requests.get(f"{self.api}/g/s/blog/{blogId}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return objects.GetBlogInfo(json.loads(response.text)).GetBlogInfo elif wikiId: response = requests.get(f"{self.api}/g/s/item/{wikiId}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return objects.GetWikiInfo(json.loads(response.text)).GetWikiInfo elif fileId: response = requests.get(f"{self.api}/g/s/shared-folder/files/{fileId}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return objects.SharedFolderFile(json.loads(response.text)["file"]).SharedFolderFile else: raise exceptions.SpecifyType() def get_blog_comments(self, blogId: str = None, wikiId: str = None, quizId: str = None, fileId: str = None, sorting: str = "newest", start: int = 0, size: int = 25): if sorting == "newest": sorting = "newest" elif sorting == "oldest": sorting = "oldest" elif sorting == "top": sorting = "vote" else: raise exceptions.WrongType(sorting) if blogId or quizId: if quizId is not None: blogId = quizId response = requests.get(f"{self.api}/g/s/blog/{blogId}/comment?sort={sorting}&start={start}&size={size}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) elif wikiId: response = requests.get(f"{self.api}/g/s/item/{wikiId}/comment?sort={sorting}&start={start}&size={size}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) elif fileId: response = requests.get(f"{self.api}/g/s/shared-folder/files/{fileId}/comment?sort={sorting}&start={start}&size={size}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) else: raise exceptions.SpecifyType() if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return objects.CommentList(json.loads(response.text)["commentList"]).CommentList def get_blocker_users(self, start: int = 0, size: int = 25): """ List of Users that are Blocking the User. **Parameters** - *start* : Where to start the list. - *size* : Size of the list. **Returns** - **Success** : :meth:`List of User IDs <None>` - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.get(f"{self.api}/g/s/block/full-list?start={start}&size={size}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return json.loads(response.text)["blockerUidList"] def get_wall_comments(self, userId: str, sorting: str, start: int = 0, size: int = 25): """ List of Wall Comments of an User. **Parameters** - **userId** : ID of the User. - **sorting** : Order of the Comments. - ``newest``, ``oldest``, ``top`` - *start* : Where to start the list. - *size* : Size of the list. **Returns** - **Success** : :meth:`Comments List <amino.lib.util.objects.CommentList>` - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ if sorting.lower() == "newest": sorting = "newest" elif sorting.lower() == "oldest": sorting = "oldest" elif sorting.lower() == "top": sorting = "vote" else: raise exceptions.WrongType(sorting) response = requests.get(f"{self.api}/g/s/user-profile/{userId}/g-comment?sort={sorting}&start={start}&size={size}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return objects.CommentList(json.loads(response.text)["commentList"]).CommentList def flag(self, reason: str, flagType: int, userId: str = None, blogId: str = None, wikiId: str = None, asGuest: bool = False): """ Flag a User, Blog or Wiki. **Parameters** - **reason** : Reason of the Flag. - **flagType** : Type of the Flag. - **userId** : ID of the User. - **blogId** : ID of the Blog. - **wikiId** : ID of the Wiki. - *asGuest* : Execute as a Guest. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ if reason is None: raise exceptions.ReasonNeeded if flagType is None: raise exceptions.FlagTypeNeeded data = { "flagType": flagType, "message": reason, "timestamp": int(timestamp() * 1000) } if userId: data["objectId"] = userId data["objectType"] = 0 elif blogId: data["objectId"] = blogId data["objectType"] = 1 elif wikiId: data["objectId"] = wikiId data["objectType"] = 2 else: raise exceptions.SpecifyType if asGuest: flg = "g-flag" else: flg = "flag" data = json.dumps(data) response = requests.post(f"{self.api}/g/s/{flg}", data=data, headers=self.parse_headers(data=data), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def send_message(self, chatId: str, message: str = None, messageType: int = 0, file: BinaryIO = None, fileType: str = None, replyTo: str = None, mentionUserIds: list = None, stickerId: str = None, embedId: str = None, embedType: int = None, embedLink: str = None, embedTitle: str = None, embedContent: str = None, embedImage: BinaryIO = None): """ Send a Message to a Chat. **Parameters** - **message** : Message to be sent - **chatId** : ID of the Chat. - **file** : File to be sent. - **fileType** : Type of the file. - ``audio``, ``image``, ``gif`` - **messageType** : Type of the Message. - **mentionUserIds** : List of User IDS to mention. '@' needed in the Message. - **replyTo** : Message ID to reply to. - **stickerId** : Sticker ID to be sent. - **embedTitle** : Title of the Embed. - **embedContent** : Content of the Embed. - **embedLink** : Link of the Embed. - **embedImage** : Image of the Embed. - **embedId** : ID of the Embed. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ if message is not None and file is None: message = message.replace("<$", "‎‏").replace("$>", "‬‭") mentions = [] if mentionUserIds: for mention_uid in mentionUserIds: mentions.append({"uid": mention_uid}) if embedImage: embedImage = [[100, self.upload_media(embedImage, "image"), None]] data = { "type": messageType, "content": message, "clientRefId": int(timestamp() / 10 % 1000000000), "attachedObject": { "objectId": embedId, "objectType": embedType, "link": embedLink, "title": embedTitle, "content": embedContent, "mediaList": embedImage }, "extensions": {"mentionedArray": mentions}, "timestamp": int(timestamp() * 1000) } if replyTo: data["replyMessageId"] = replyTo if stickerId: data["content"] = None data["stickerId"] = stickerId data["type"] = 3 if file: data["content"] = None if fileType == "audio": data["type"] = 2 data["mediaType"] = 110 elif fileType == "image": data["mediaType"] = 100 data["mediaUploadValueContentType"] = "image/jpg" data["mediaUhqEnabled"] = True elif fileType == "gif": data["mediaType"] = 100 data["mediaUploadValueContentType"] = "image/gif" data["mediaUhqEnabled"] = True else: raise exceptions.SpecifyType data["mediaUploadValue"] = base64.b64encode(file.read()).decode() data = json.dumps(data) response = requests.post(f"{self.api}/g/s/chat/thread/{chatId}/message", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def delete_message(self, chatId: str, messageId: str, asStaff: bool = False, reason: str = None): """ Delete a Message from a Chat. **Parameters** - **messageId** : ID of the Message. - **chatId** : ID of the Chat. - **asStaff** : If execute as a Staff member (Leader or Curator). - **reason** : Reason of the action to show on the Moderation History. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ data = { "adminOpName": 102, "adminOpNote": {"content": reason}, "timestamp": int(timestamp() * 1000) } data = json.dumps(data) if not asStaff: response = requests.delete(f"{self.api}/g/s/chat/thread/{chatId}/message/{messageId}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) else: response = requests.post(f"{self.api}/g/s/chat/thread/{chatId}/message/{messageId}/admin", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def mark_as_read(self, chatId: str, messageId: str): """ Mark a Message from a Chat as Read. **Parameters** - **messageId** : ID of the Message. - **chatId** : ID of the Chat. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ data = json.dumps({ "messageId": messageId, "timestamp": int(timestamp() * 1000) }) response = requests.post(f"{self.api}/g/s/chat/thread/{chatId}/mark-as-read", headers=self.parse_headers(), data=data, proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def edit_chat(self, chatId: str, doNotDisturb: bool = None, pinChat: bool = None, title: str = None, icon: str = None, backgroundImage: str = None, content: str = None, announcement: str = None, coHosts: list = None, keywords: list = None, pinAnnouncement: bool = None, publishToGlobal: bool = None, canTip: bool = None, viewOnly: bool = None, canInvite: bool = None, fansOnly: bool = None): """ Send a Message to a Chat. **Parameters** - **chatId** : ID of the Chat. - **title** : Title of the Chat. - **content** : Content of the Chat. - **icon** : Icon of the Chat. - **backgroundImage** : Url of the Background Image of the Chat. - **announcement** : Announcement of the Chat. - **pinAnnouncement** : If the Chat Announcement should Pinned or not. - **coHosts** : List of User IDS to be Co-Host. - **keywords** : List of Keywords of the Chat. - **viewOnly** : If the Chat should be on View Only or not. - **canTip** : If the Chat should be Tippable or not. - **canInvite** : If the Chat should be Invitable or not. - **fansOnly** : If the Chat should be Fans Only or not. - **publishToGlobal** : If the Chat should show on Public Chats or not. - **doNotDisturb** : If the Chat should Do Not Disturb or not. - **pinChat** : If the Chat should Pinned or not. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ data = {"timestamp": int(timestamp() * 1000)} if title: data["title"] = title if content: data["content"] = content if icon: data["icon"] = icon if keywords: data["keywords"] = keywords if announcement: data["extensions"] = {"announcement": announcement} if pinAnnouncement: data["extensions"] = {"pinAnnouncement": pinAnnouncement} if fansOnly: data["extensions"] = {"fansOnly": fansOnly} if publishToGlobal: data["publishToGlobal"] = 0 if not publishToGlobal: data["publishToGlobal"] = 1 res = [] if doNotDisturb is not None: if doNotDisturb: data = json.dumps({"alertOption": 2, "timestamp": int(timestamp() * 1000)}) response = requests.post(f"{self.api}/g/s/chat/thread/{chatId}/member/{self.userId}/alert", data=data, headers=self.parse_headers(data=data), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: res.append(exceptions.CheckException(json.loads(response.text))) else: res.append(response.status_code) if not doNotDisturb: data = json.dumps({"alertOption": 1, "timestamp": int(timestamp() * 1000)}) response = requests.post(f"{self.api}/g/s/chat/thread/{chatId}/member/{self.userId}/alert", data=data, headers=self.parse_headers(data=data), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: res.append(exceptions.CheckException(json.loads(response.text))) else: res.append(response.status_code) if pinChat is not None: if pinChat: response = requests.post(f"{self.api}/g/s/chat/thread/{chatId}/pin", data=data, headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: res.append(exceptions.CheckException(json.loads(response.text))) else: res.append(response.status_code) if not pinChat: response = requests.post(f"{self.api}/g/s/chat/thread/{chatId}/unpin", data=data, headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: res.append(exceptions.CheckException(json.loads(response.text))) else: res.append(response.status_code) if backgroundImage is not None: data = json.dumps({"media": [100, backgroundImage, None], "timestamp": int(timestamp() * 1000)}) response = requests.post(f"{self.api}/g/s/chat/thread/{chatId}/member/{self.userId}/background", data=data, headers=self.parse_headers(data=data), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: res.append(exceptions.CheckException(json.loads(response.text))) else: res.append(response.status_code) if coHosts is not None: data = json.dumps({"uidList": coHosts, "timestamp": int(timestamp() * 1000)}) response = requests.post(f"{self.api}/g/s/chat/thread/{chatId}/co-host", data=data, headers=self.parse_headers(data=data), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: res.append(exceptions.CheckException(json.loads(response.text))) else: res.append(response.status_code) if viewOnly is not None: if viewOnly: response = requests.post(f"{self.api}/g/s/chat/thread/{chatId}/view-only/enable", data=data, headers=self.parse_headers(data=data), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: res.append(exceptions.CheckException(json.loads(response.text))) else: res.append(response.status_code) if not viewOnly: response = requests.post(f"{self.api}/g/s/chat/thread/{chatId}/view-only/disable", data=data, headers=self.parse_headers(data=data), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: res.append(exceptions.CheckException(json.loads(response.text))) else: res.append(response.status_code) if canInvite is not None: if canInvite: response = requests.post(f"{self.api}/g/s/chat/thread/{chatId}/members-can-invite/enable", data=data, headers=self.parse_headers(data=data), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: res.append(exceptions.CheckException(json.loads(response.text))) else: res.append(response.status_code) if not canInvite: response = requests.post(f"{self.api}/g/s/chat/thread/{chatId}/members-can-invite/disable", data=data, headers=self.parse_headers(data=data), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: res.append(exceptions.CheckException(json.loads(response.text))) else: res.append(response.status_code) if canTip is not None: if canTip: response = requests.post(f"{self.api}/g/s/chat/thread/{chatId}/tipping-perm-status/enable", data=data, headers=self.parse_headers(data=data), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: res.append(exceptions.CheckException(json.loads(response.text))) else: res.append(response.status_code) if not canTip: response = requests.post(f"{self.api}/g/s/chat/thread/{chatId}/tipping-perm-status/disable", data=data, headers=self.parse_headers(data=data), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: res.append(exceptions.CheckException(json.loads(response.text))) else: res.append(response.status_code) data = json.dumps(data) response = requests.post(f"{self.api}/g/s/chat/thread/{chatId}", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: res.append(exceptions.CheckException(json.loads(response.text))) else: res.append(response.status_code) return res def visit(self, userId: str): """ Visit an User. **Parameters** - **userId** : ID of the User. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.get(f"{self.api}/g/s/user-profile/{userId}?action=visit", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def send_coins(self, coins: int, blogId: str = None, chatId: str = None, objectId: str = None, transactionId: str = None): url = None if transactionId is None: transactionId = str(UUID(hexlify(urandom(16)).decode('ascii'))) data = { "coins": coins, "tippingContext": {"transactionId": transactionId}, "timestamp": int(timestamp() * 1000) } if blogId is not None: url = f"{self.api}/g/s/blog/{blogId}/tipping" if chatId is not None: url = f"{self.api}/g/s/chat/thread/{chatId}/tipping" if objectId is not None: data["objectId"] = objectId data["objectType"] = 2 url = f"{self.api}/g/s/tipping" if url is None: raise exceptions.SpecifyType() data = json.dumps(data) response = requests.post(url, headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def follow(self, userId: [str, list]): """ Follow an User or Multiple Users. **Parameters** - **userId** : ID of the User or List of IDs of the Users. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ if isinstance(userId, str): response = requests.post(f"{self.api}/g/s/user-profile/{userId}/member", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) elif isinstance(userId, list): data = json.dumps({"targetUidList": userId, "timestamp": int(timestamp() * 1000)}) response = requests.post(f"{self.api}/g/s/user-profile/{self.userId}/joined", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) else: raise exceptions.WrongType if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def unfollow(self, userId: str): """ Unfollow an User. **Parameters** - **userId** : ID of the User. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.delete(f"{self.api}/g/s/user-profile/{userId}/member/{self.userId}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def block(self, userId: str): """ Block an User. **Parameters** - **userId** : ID of the User. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.post(f"{self.api}/g/s/block/{userId}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def unblock(self, userId: str): """ Unblock an User. **Parameters** - **userId** : ID of the User. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.delete(f"{self.api}/g/s/block/{userId}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def join_community(self, comId: str, invitationId: str = None): """ Join a Community. **Parameters** - **comId** : ID of the Community. - **invitationId** : ID of the Invitation Code. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ data = {"timestamp": int(timestamp() * 1000)} if invitationId: data["invitationId"] = invitationId data = json.dumps(data) response = requests.post(f"{self.api}/x{comId}/s/community/join", data=data, headers=self.parse_headers(data=data), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def request_join_community(self, comId: str, message: str = None): """ Request to join a Community. **Parameters** - **comId** : ID of the Community. - **message** : Message to be sent. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ data = json.dumps({"message": message, "timestamp": int(timestamp() * 1000)}) response = requests.post(f"{self.api}/x{comId}/s/community/membership-request", data=data, headers=self.parse_headers(data=data), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def leave_community(self, comId: str): """ Leave a Community. **Parameters** - **comId** : ID of the Community. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.post(f"{self.api}/x{comId}/s/community/leave", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def flag_community(self, comId: str, reason: str, flagType: int, isGuest: bool = False): """ Flag a Community. **Parameters** - **comId** : ID of the Community. - **reason** : Reason of the Flag. - **flagType** : Type of Flag. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ if reason is None: raise exceptions.ReasonNeeded if flagType is None: raise exceptions.FlagTypeNeeded data = json.dumps({ "objectId": comId, "objectType": 16, "flagType": flagType, "message": reason, "timestamp": int(timestamp() * 1000) }) if isGuest: flg = "g-flag" else: flg = "flag" response = requests.post(f"{self.api}/x{comId}/s/{flg}", data=data, headers=self.parse_headers(data=data), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def edit_profile(self, nickname: str = None, content: str = None, icon: BinaryIO = None, backgroundColor: str = None, backgroundImage: str = None, defaultBubbleId: str = None): """ Edit account's Profile. **Parameters** - **nickname** : Nickname of the Profile. - **content** : Biography of the Profile. - **icon** : Icon of the Profile. - **backgroundImage** : Url of the Background Picture of the Profile. - **backgroundColor** : Hexadecimal Background Color of the Profile. - **defaultBubbleId** : Chat bubble ID. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ data = { "address": None, "latitude": 0, "longitude": 0, "mediaList": None, "eventSource": "UserProfileView", "timestamp": int(timestamp() * 1000) } if nickname: data["nickname"] = nickname if icon: data["icon"] = self.upload_media(icon, "image") if content: data["content"] = content if backgroundColor: data["extensions"] = {"style": {"backgroundColor": backgroundColor}} if backgroundImage: data["extensions"] = {"style": {"backgroundMediaList": [[100, backgroundImage, None, None, None]]}} if defaultBubbleId: data["extensions"] = {"defaultBubbleId": defaultBubbleId} data = json.dumps(data) response = requests.post(f"{self.api}/g/s/user-profile/{self.userId}", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def set_privacy_status(self, isAnonymous: bool = False, getNotifications: bool = False): """ Edit account's Privacy Status. **Parameters** - **isAnonymous** : If visibility should be Anonymous or not. - **getNotifications** : If account should get new Visitors Notifications. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ data = {"timestamp": int(timestamp() * 1000)} if not isAnonymous: data["privacyMode"] = 1 if isAnonymous: data["privacyMode"] = 2 if not getNotifications: data["notificationStatus"] = 2 if getNotifications: data["privacyMode"] = 1 data = json.dumps(data) response = requests.post(f"{self.api}/g/s/account/visit-settings", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def set_amino_id(self, aminoId: str): """ Edit account's Amino ID. **Parameters** - **aminoId** : Amino ID of the Account. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ data = json.dumps({"aminoId": aminoId, "timestamp": int(timestamp() * 1000)}) response = requests.post(f"{self.api}/g/s/account/change-amino-id", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def get_linked_communities(self, userId: str): """ Get a List of Linked Communities of an User. **Parameters** - **userId** : ID of the User. **Returns** - **Success** : :meth:`Community List <amino.lib.util.objects.CommunityList>` - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.get(f"{self.api}/g/s/user-profile/{userId}/linked-community", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return objects.CommunityList(json.loads(response.text)["linkedCommunityList"]).CommunityList def get_unlinked_communities(self, userId: str): """ Get a List of Unlinked Communities of an User. **Parameters** - **userId** : ID of the User. **Returns** - **Success** : :meth:`Community List <amino.lib.util.objects.CommunityList>` - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.get(f"{self.api}/g/s/user-profile/{userId}/linked-community", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return objects.CommunityList(json.loads(response.text)["unlinkedCommunityList"]).CommunityList def reorder_linked_communities(self, comIds: list): """ Reorder List of Linked Communities. **Parameters** - **comIds** : IDS of the Communities. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ data = json.dumps({"ndcIds": comIds, "timestamp": int(timestamp() * 1000)}) response = requests.post(f"{self.api}/g/s/user-profile/{self.userId}/linked-community/reorder", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def add_linked_community(self, comId: str): """ Add a Linked Community on your profile. **Parameters** - **comId** : ID of the Community. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.post(f"{self.api}/g/s/user-profile/{self.userId}/linked-community/{comId}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def remove_linked_community(self, comId: str): """ Remove a Linked Community on your profile. **Parameters** - **comId** : ID of the Community. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.delete(f"{self.api}/g/s/user-profile/{self.userId}/linked-community/{comId}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def comment(self, message: str, userId: str = None, blogId: str = None, wikiId: str = None, replyTo: str = None): """ Comment on a User's Wall, Blog or Wiki. **Parameters** - **message** : Message to be sent. - **userId** : ID of the User. (for Walls) - **blogId** : ID of the Blog. (for Blogs) - **wikiId** : ID of the Wiki. (for Wikis) - **replyTo** : ID of the Comment to Reply to. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ if message is None: raise exceptions.MessageNeeded data = { "content": message, "stickerId": None, "type": 0, "timestamp": int(timestamp() * 1000) } if replyTo: data["respondTo"] = replyTo if userId: data["eventSource"] = "UserProfileView" data = json.dumps(data) response = requests.post(f"{self.api}/g/s/user-profile/{userId}/g-comment", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) elif blogId: data["eventSource"] = "PostDetailView" data = json.dumps(data) response = requests.post(f"{self.api}/g/s/blog/{blogId}/g-comment", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) elif wikiId: data["eventSource"] = "PostDetailView" data = json.dumps(data) response = requests.post(f"{self.api}/g/s/item/{wikiId}/g-comment", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) else: raise exceptions.SpecifyType if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def delete_comment(self, commentId: str, userId: str = None, blogId: str = None, wikiId: str = None): """ Delete a Comment on a User's Wall, Blog or Wiki. **Parameters** - **commentId** : ID of the Comment. - **userId** : ID of the User. (for Walls) - **blogId** : ID of the Blog. (for Blogs) - **wikiId** : ID of the Wiki. (for Wikis) **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ if userId: response = requests.delete(f"{self.api}/g/s/user-profile/{userId}/g-comment/{commentId}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) elif blogId: response = requests.delete(f"{self.api}/g/s/blog/{blogId}/g-comment/{commentId}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) elif wikiId: response = requests.delete(f"{self.api}/g/s/item/{wikiId}/g-comment/{commentId}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) else: raise exceptions.SpecifyType if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def like_blog(self, blogId: [str, list] = None, wikiId: str = None): """ Like a Blog, Multiple Blogs or a Wiki. **Parameters** - **blogId** : ID of the Blog or List of IDs of the Blogs. (for Blogs) - **wikiId** : ID of the Wiki. (for Wikis) **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ data = { "value": 4, "timestamp": int(timestamp() * 1000) } if blogId: if isinstance(blogId, str): data["eventSource"] = "UserProfileView" data = json.dumps(data) response = requests.post(f"{self.api}/g/s/blog/{blogId}/g-vote?cv=1.2", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) elif isinstance(blogId, list): data["targetIdList"] = blogId data = json.dumps(data) response = requests.post(f"{self.api}/g/s/feed/g-vote", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) else: raise exceptions.WrongType(type(blogId)) elif wikiId: data["eventSource"] = "PostDetailView" data = json.dumps(data) response = requests.post(f"{self.api}/g/s/item/{wikiId}/g-vote?cv=1.2", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) else: raise exceptions.SpecifyType() if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def unlike_blog(self, blogId: str = None, wikiId: str = None): """ Remove a like from a Blog or Wiki. **Parameters** - **blogId** : ID of the Blog. (for Blogs) - **wikiId** : ID of the Wiki. (for Wikis) **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ if blogId: response = requests.delete(f"{self.api}/g/s/blog/{blogId}/g-vote?eventSource=UserProfileView", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) elif wikiId: response = requests.delete(f"{self.api}/g/s/item/{wikiId}/g-vote?eventSource=PostDetailView", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) else: raise exceptions.SpecifyType if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def like_comment(self, commentId: str, userId: str = None, blogId: str = None, wikiId: str = None): """ Like a Comment on a User's Wall, Blog or Wiki. **Parameters** - **commentId** : ID of the Comment. - **userId** : ID of the User. (for Walls) - **blogId** : ID of the Blog. (for Blogs) - **wikiId** : ID of the Wiki. (for Wikis) **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ data = { "value": 4, "timestamp": int(timestamp() * 1000) } if userId: data["eventSource"] = "UserProfileView" data = json.dumps(data) response = requests.post(f"{self.api}/g/s/user-profile/{userId}/comment/{commentId}/g-vote?cv=1.2&value=1", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) elif blogId: data["eventSource"] = "PostDetailView" data = json.dumps(data) response = requests.post(f"{self.api}/g/s/blog/{blogId}/comment/{commentId}/g-vote?cv=1.2&value=1", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) elif wikiId: data["eventSource"] = "PostDetailView" data = json.dumps(data) response = requests.post(f"{self.api}/g/s/item/{wikiId}/comment/{commentId}/g-vote?cv=1.2&value=1", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) else: raise exceptions.SpecifyType if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def unlike_comment(self, commentId: str, userId: str = None, blogId: str = None, wikiId: str = None): """ Remove a like from a Comment on a User's Wall, Blog or Wiki. **Parameters** - **commentId** : ID of the Comment. - **userId** : ID of the User. (for Walls) - **blogId** : ID of the Blog. (for Blogs) - **wikiId** : ID of the Wiki. (for Wikis) **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ if userId: response = requests.delete(f"{self.api}/g/s/user-profile/{userId}/comment/{commentId}/g-vote?eventSource=UserProfileView", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) elif blogId: response = requests.delete(f"{self.api}/g/s/blog/{blogId}/comment/{commentId}/g-vote?eventSource=PostDetailView", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) elif wikiId: response = requests.delete(f"{self.api}/g/s/item/{wikiId}/comment/{commentId}/g-vote?eventSource=PostDetailView", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) else: raise exceptions.SpecifyType if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def get_membership_info(self): """ Get Information about your Amino+ Membership. **Parameters** - No parameters required. **Returns** - **Success** : :meth:`Membership Object <amino.lib.util.objects.Membership>` - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.get(f"{self.api}/g/s/membership?force=true", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return objects.Membership(json.loads(response.text)).Membership def get_ta_announcements(self, language: str = "en", start: int = 0, size: int = 25): """ Get the list of Team Amino's Announcement Blogs. **Parameters** - **language** : Language of the Blogs. - ``en``, ``es``, ``pt``, ``ar``, ``ru``, ``fr``, ``de`` - *start* : Where to start the list. - *size* : Size of the list. **Returns** - **Success** : :meth:`Blogs List <amino.lib.util.objects.BlogList>` - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ if language not in self.get_supported_languages(): raise exceptions.UnsupportedLanguage(language) response = requests.get(f"{self.api}/g/s/announcement?language={language}&start={start}&size={size}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return objects.BlogList(json.loads(response.text)["blogList"]).BlogList def get_wallet_info(self): """ Get Information about the account's Wallet. **Parameters** - No parameters required. **Returns** - **Success** : :meth:`Wallet Object <amino.lib.util.objects.WalletInfo>` - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.get(f"{self.api}/g/s/wallet", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return objects.WalletInfo(json.loads(response.text)["wallet"]).WalletInfo def get_wallet_history(self, start: int = 0, size: int = 25): """ Get the Wallet's History Information. **Parameters** - *start* : Where to start the list. - *size* : Size of the list. **Returns** - **Success** : :meth:`Wallet Object <amino.lib.util.objects.WalletInfo>` - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.get(f"{self.api}/g/s/wallet/coin/history?start={start}&size={size}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return objects.WalletHistory(json.loads(response.text)["coinHistoryList"]).WalletHistory def get_from_deviceid(self, deviceId: str): """ Get the User ID from an Device ID. **Parameters** - **deviceID** : ID of the Device. **Returns** - **Success** : :meth:`User ID <amino.lib.util.objects.UserProfile.userId>` - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.get(f"{self.api}/g/s/auid?deviceId={deviceId}") if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return json.loads(response.text)["auid"] def get_from_code(self, code: str): """ Get the Object Information from the Amino URL Code. **Parameters** - **code** : Code from the Amino URL. - ``http://aminoapps.com/p/EXAMPLE``, the ``code`` is 'EXAMPLE'. **Returns** - **Success** : :meth:`From Code Object <amino.lib.util.objects.FromCode>` - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.get(f"{self.api}/g/s/link-resolution?q={code}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return objects.FromCode(json.loads(response.text)["linkInfoV2"]).FromCode def get_from_id(self, objectId: str, objectType: int, comId: str = None): """ Get the Object Information from the Object ID and Type. **Parameters** - **objectID** : ID of the Object. User ID, Blog ID, etc. - **objectType** : Type of the Object. - *comId* : ID of the Community. Use if the Object is in a Community. **Returns** - **Success** : :meth:`From Code Object <amino.lib.util.objects.FromCode>` - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ data = json.dumps({ "objectId": objectId, "targetCode": 1, "objectType": objectType, "timestamp": int(timestamp() * 1000) }) if comId: response = requests.post(f"{self.api}/g/s-x{comId}/link-resolution", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) else: response = requests.post(f"{self.api}/g/s/link-resolution", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return objects.FromCode(json.loads(response.text)["linkInfoV2"]).FromCode def get_supported_languages(self): """ Get the List of Supported Languages by Amino. **Parameters** - No parameters required. **Returns** - **Success** : :meth:`List of Supported Languages <List>` - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.get(f"{self.api}/g/s/community-collection/supported-languages?start=0&size=100", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return json.loads(response.text)["supportedLanguages"] def devicee(self): d=requests.get("https://ka-generator.herokuapp.com/device").text return d def claim_new_user_coupon(self): """ Claim the New User Coupon available when a new account is created. **Parameters** - No parameters required. **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.post(f"{self.api}/g/s/coupon/new-user-coupon/claim", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def get_subscriptions(self, start: int = 0, size: int = 25): """ Get Information about the account's Subscriptions. **Parameters** - *start* : Where to start the list. - *size* : Size of the list. **Returns** - **Success** : :meth:`List <List>` - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.get(f"{self.api}/g/s/store/subscription?objectType=122&start={start}&size={size}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return json.loads(response.text)["storeSubscriptionItemList"] def get_all_users(self, start: int = 0, size: int = 25): """ Get list of users of Amino. **Parameters** - *start* : Where to start the list. - *size* : Size of the list. **Returns** - **Success** : :meth:`User Profile Count List Object <amino.lib.util.objects.UserProfileCountList>` - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ response = requests.get(f"{self.api}/g/s/user-profile?type=recent&start={start}&size={size}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return objects.UserProfileCountList(json.loads(response.text)).UserProfileCountList def accept_host(self, chatId: str, requestId: str): data = json.dumps({}) response = requests.post(f"{self.api}/g/s/chat/thread/{chatId}/transfer-organizer/{requestId}/accept", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def accept_organizer(self, chatId: str, requestId: str): self.accept_host(chatId, requestId) # Contributed by 'https://github.com/LynxN1' def link_identify(self, code: str): response = requests.get(f"{self.api}/g/s/community/link-identify?q=http%3A%2F%2Faminoapps.com%2Finvite%2F{code}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) return json.loads(response.text) def invite_to_vc(self, chatId: str, userId: str): """ Invite a User to a Voice Chat **Parameters** - **chatId** - ID of the Chat - **userId** - ID of the User **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ data = json.dumps({ "uid": userId }) response = requests.post(f"{self.api}/g/s/chat/thread/{chatId}/vvchat-presenter/invite", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def wallet_config(self, level: int): """ Changes ads config **Parameters** - **level** - Level of the ads. - ``1``, ``2`` **Returns** - **Success** : 200 (int) - **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>` """ data = json.dumps({ "adsLevel": level, "timestamp": int(timestamp() * 1000) }) response = requests.post(f"{self.api}/g/s/wallet/ads/config", headers=self.parse_headers(data=data), data=data, proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return response.status_code def get_avatar_frames(self, start: int = 0, size: int = 25): response = requests.get(f"{self.api}/g/s/avatar-frame?start={start}&size={size}", headers=self.parse_headers(), proxies=self.proxies, verify=self.certificatePath) if response.status_code != 200: return exceptions.CheckException(json.loads(response.text)) else: return objects.AvatarFrameList(json.loads(response.text)["avatarFrameList"]).AvatarFrameList
PypiClean
/Kr0nOs-3.4.1.tar.gz/Kr0nOs-3.4.1/kronbot/core/bank.py
from __future__ import annotations import asyncio import datetime from functools import wraps from typing import TYPE_CHECKING, List, Optional, Union import discord from kronbot.core.utils.chat_formatting import humanize_number from . import Config, commands, errors from .errors import BankPruneError from .i18n import Translator if TYPE_CHECKING: from .bot import Kron _ = Translator("Bank API", __file__) __all__ = [ "Account", "get_balance", "set_balance", "withdraw_credits", "deposit_credits", "can_spend", "transfer_credits", "wipe_bank", "get_account", "is_global", "set_global", "get_bank_name", "set_bank_name", "get_currency_name", "set_currency_name", "get_default_balance", "set_default_balance", "get_max_balance", "set_max_balance", "cost", "AbortPurchase", "bank_prune", ] _MAX_BALANCE = 2 ** 63 - 1 _DEFAULT_GLOBAL = { "is_global": False, "bank_name": "Twentysix bank", "currency": "credits", "default_balance": 100, "max_balance": _MAX_BALANCE, } _DEFAULT_GUILD = { "bank_name": "Twentysix bank", "currency": "credits", "default_balance": 100, "max_balance": _MAX_BALANCE, } _DEFAULT_MEMBER = {"name": "", "balance": 0, "created_at": 0} _DEFAULT_USER = _DEFAULT_MEMBER _conf: Config = None def _init(): global _conf _conf = Config.get_conf(None, 384734293238749, cog_name="Bank", force_registration=True) _conf.register_global(**_DEFAULT_GLOBAL) _conf.register_guild(**_DEFAULT_GUILD) _conf.register_member(**_DEFAULT_MEMBER) _conf.register_user(**_DEFAULT_USER) class Account: """A single account. This class should ONLY be instantiated by the bank itself.""" def __init__(self, name: str, balance: int, created_at: datetime.datetime): self.name = name self.balance = balance self.created_at = created_at def _encoded_current_time() -> int: """Get the current UTC time as a timestamp. Returns ------- int The current UTC timestamp. """ now = datetime.datetime.utcnow() return _encode_time(now) def _encode_time(time: datetime.datetime) -> int: """Convert a datetime object to a serializable int. Parameters ---------- time : datetime.datetime The datetime to convert. Returns ------- int The timestamp of the datetime object. """ ret = int(time.timestamp()) return ret def _decode_time(time: int) -> datetime.datetime: """Convert a timestamp to a datetime object. Parameters ---------- time : int The timestamp to decode. Returns ------- datetime.datetime The datetime object from the timestamp. """ return datetime.datetime.utcfromtimestamp(time) async def get_balance(member: discord.Member) -> int: """Get the current balance of a member. Parameters ---------- member : discord.Member The member whose balance to check. Returns ------- int The member's balance """ acc = await get_account(member) return acc.balance async def can_spend(member: discord.Member, amount: int) -> bool: """Determine if a member can spend the given amount. Parameters ---------- member : discord.Member The member wanting to spend. amount : int The amount the member wants to spend. Returns ------- bool :code:`True` if the member has a sufficient balance to spend the amount, else :code:`False`. """ if _invalid_amount(amount): return False return await get_balance(member) >= amount async def set_balance(member: Union[discord.Member, discord.User], amount: int) -> int: """Set an account balance. Parameters ---------- member : Union[discord.Member, discord.User] The member whose balance to set. amount : int The amount to set the balance to. Returns ------- int New account balance. Raises ------ ValueError If attempting to set the balance to a negative number. RuntimeError If the bank is guild-specific and a discord.User object is provided. BalanceTooHigh If attempting to set the balance to a value greater than ``bank._MAX_BALANCE``. """ if amount < 0: raise ValueError("Not allowed to have negative balance.") guild = getattr(member, "guild", None) max_bal = await get_max_balance(guild) if amount > max_bal: currency = await get_currency_name(guild) raise errors.BalanceTooHigh( user=member.display_name, max_balance=max_bal, currency_name=currency ) if await is_global(): group = _conf.user(member) else: group = _conf.member(member) await group.balance.set(amount) if await group.created_at() == 0: time = _encoded_current_time() await group.created_at.set(time) if await group.name() == "": await group.name.set(member.display_name) return amount def _invalid_amount(amount: int) -> bool: return amount < 0 async def withdraw_credits(member: discord.Member, amount: int) -> int: """Remove a certain amount of credits from an account. Parameters ---------- member : discord.Member The member to withdraw credits from. amount : int The amount to withdraw. Returns ------- int New account balance. Raises ------ ValueError If the withdrawal amount is invalid or if the account has insufficient funds. TypeError If the withdrawal amount is not an `int`. """ if not isinstance(amount, int): raise TypeError("Withdrawal amount must be of type int, not {}.".format(type(amount))) if _invalid_amount(amount): raise ValueError( "Invalid withdrawal amount {} < 0".format( humanize_number(amount, override_locale="en_US") ) ) bal = await get_balance(member) if amount > bal: raise ValueError( "Insufficient funds {} > {}".format( humanize_number(amount, override_locale="en_US"), humanize_number(bal, override_locale="en_US"), ) ) return await set_balance(member, bal - amount) async def deposit_credits(member: discord.Member, amount: int) -> int: """Add a given amount of credits to an account. Parameters ---------- member : discord.Member The member to deposit credits to. amount : int The amount to deposit. Returns ------- int The new balance. Raises ------ ValueError If the deposit amount is invalid. TypeError If the deposit amount is not an `int`. """ if not isinstance(amount, int): raise TypeError("Deposit amount must be of type int, not {}.".format(type(amount))) if _invalid_amount(amount): raise ValueError( "Invalid deposit amount {} <= 0".format( humanize_number(amount, override_locale="en_US") ) ) bal = await get_balance(member) return await set_balance(member, amount + bal) async def transfer_credits( from_: Union[discord.Member, discord.User], to: Union[discord.Member, discord.User], amount: int, ): """Transfer a given amount of credits from one account to another. Parameters ---------- from_: Union[discord.Member, discord.User] The member to transfer from. to : Union[discord.Member, discord.User] The member to transfer to. amount : int The amount to transfer. Returns ------- int The new balance of the member gaining credits. Raises ------ ValueError If the amount is invalid or if ``from_`` has insufficient funds. TypeError If the amount is not an `int`. RuntimeError If the bank is guild-specific and a discord.User object is provided. BalanceTooHigh If the balance after the transfer would be greater than ``bank._MAX_BALANCE``. """ if not isinstance(amount, int): raise TypeError("Transfer amount must be of type int, not {}.".format(type(amount))) if _invalid_amount(amount): raise ValueError( "Invalid transfer amount {} <= 0".format( humanize_number(amount, override_locale="en_US") ) ) guild = getattr(to, "guild", None) max_bal = await get_max_balance(guild) if await get_balance(to) + amount > max_bal: currency = await get_currency_name(guild) raise errors.BalanceTooHigh( user=to.display_name, max_balance=max_bal, currency_name=currency ) await withdraw_credits(from_, amount) return await deposit_credits(to, amount) async def wipe_bank(guild: Optional[discord.Guild] = None) -> None: """Delete all accounts from the bank. Parameters ---------- guild : discord.Guild The guild to clear accounts for. If unsupplied and the bank is per-server, all accounts in every guild will be wiped. """ if await is_global(): await _conf.clear_all_users() else: await _conf.clear_all_members(guild) async def bank_prune(bot: Kron, guild: discord.Guild = None, user_id: int = None) -> None: """Prune bank accounts from the bank. Parameters ---------- bot : Kron The bot. guild : discord.Guild The guild to prune. This is required if the bank is set to local. user_id : int The id of the user whose account will be pruned. If supplied this will prune only this user's bank account otherwise it will prune all invalid users from the bank. Raises ------ BankPruneError If guild is :code:`None` and the bank is Local. """ global_bank = await is_global() if global_bank: _guilds = [g for g in bot.guilds if not g.unavailable and g.large and not g.chunked] _uguilds = [g for g in bot.guilds if g.unavailable] group = _conf._get_base_group(_conf.USER) else: if guild is None: raise BankPruneError("'guild' can't be None when pruning a local bank") _guilds = [guild] if not guild.unavailable and guild.large else [] _uguilds = [guild] if guild.unavailable else [] group = _conf._get_base_group(_conf.MEMBER, str(guild.id)) if user_id is None: await bot.request_offline_members(*_guilds) accounts = await group.all() tmp = accounts.copy() members = bot.get_all_members() if global_bank else guild.members user_list = {str(m.id) for m in members if m.guild not in _uguilds} async with group.all() as bank_data: # FIXME: use-config-bulk-update if user_id is None: for acc in tmp: if acc not in user_list: del bank_data[acc] else: user_id = str(user_id) if user_id in bank_data: del bank_data[user_id] async def get_leaderboard(positions: int = None, guild: discord.Guild = None) -> List[tuple]: """ Gets the bank's leaderboard Parameters ---------- positions : `int` The number of positions to get guild : discord.Guild The guild to get the leaderboard of. If the bank is global and this is provided, get only guild members on the leaderboard Returns ------- `list` of `tuple` The sorted leaderboard in the form of :code:`(user_id, raw_account)` Raises ------ TypeError If the bank is guild-specific and no guild was specified """ if await is_global(): raw_accounts = await _conf.all_users() if guild is not None: tmp = raw_accounts.copy() for acc in tmp: if not guild.get_member(acc): del raw_accounts[acc] else: if guild is None: raise TypeError("Expected a guild, got NoneType object instead!") raw_accounts = await _conf.all_members(guild) sorted_acc = sorted(raw_accounts.items(), key=lambda x: x[1]["balance"], reverse=True) if positions is None: return sorted_acc else: return sorted_acc[:positions] async def get_leaderboard_position( member: Union[discord.User, discord.Member] ) -> Union[int, None]: """ Get the leaderboard position for the specified user Parameters ---------- member : `discord.User` or `discord.Member` The user to get the leaderboard position of Returns ------- `int` The position of the user on the leaderboard Raises ------ TypeError If the bank is currently guild-specific and a `discord.User` object was passed in """ if await is_global(): guild = None else: guild = member.guild if hasattr(member, "guild") else None try: leaderboard = await get_leaderboard(None, guild) except TypeError: raise else: pos = discord.utils.find(lambda x: x[1][0] == member.id, enumerate(leaderboard, 1)) if pos is None: return None else: return pos[0] async def get_account(member: Union[discord.Member, discord.User]) -> Account: """Get the appropriate account for the given user or member. A member is required if the bank is currently guild specific. Parameters ---------- member : `discord.User` or `discord.Member` The user whose account to get. Returns ------- Account The user's account. """ if await is_global(): all_accounts = await _conf.all_users() else: all_accounts = await _conf.all_members(member.guild) if member.id not in all_accounts: acc_data = {"name": member.display_name, "created_at": _DEFAULT_MEMBER["created_at"]} try: acc_data["balance"] = await get_default_balance(member.guild) except AttributeError: acc_data["balance"] = await get_default_balance() else: acc_data = all_accounts[member.id] acc_data["created_at"] = _decode_time(acc_data["created_at"]) return Account(**acc_data) async def is_global() -> bool: """Determine if the bank is currently global. Returns ------- bool :code:`True` if the bank is global, otherwise :code:`False`. """ return await _conf.is_global() async def set_global(global_: bool) -> bool: """Set global status of the bank. .. important:: All accounts are reset when you switch! Parameters ---------- global_ : bool :code:`True` will set bank to global mode. Returns ------- bool New bank mode, :code:`True` is global. Raises ------ RuntimeError If bank is becoming global and a `discord.Member` was not provided. """ if (await is_global()) is global_: return global_ if await is_global(): await _conf.clear_all_users() else: await _conf.clear_all_members() await _conf.is_global.set(global_) return global_ async def get_bank_name(guild: discord.Guild = None) -> str: """Get the current bank name. Parameters ---------- guild : `discord.Guild`, optional The guild to get the bank name for (required if bank is guild-specific). Returns ------- str The bank's name. Raises ------ RuntimeError If the bank is guild-specific and guild was not provided. """ if await is_global(): return await _conf.bank_name() elif guild is not None: return await _conf.guild(guild).bank_name() else: raise RuntimeError("Guild parameter is required and missing.") async def set_bank_name(name: str, guild: discord.Guild = None) -> str: """Set the bank name. Parameters ---------- name : str The new name for the bank. guild : `discord.Guild`, optional The guild to set the bank name for (required if bank is guild-specific). Returns ------- str The new name for the bank. Raises ------ RuntimeError If the bank is guild-specific and guild was not provided. """ if await is_global(): await _conf.bank_name.set(name) elif guild is not None: await _conf.guild(guild).bank_name.set(name) else: raise RuntimeError("Guild must be provided if setting the name of a guild-specific bank.") return name async def get_currency_name(guild: discord.Guild = None) -> str: """Get the currency name of the bank. Parameters ---------- guild : `discord.Guild`, optional The guild to get the currency name for (required if bank is guild-specific). Returns ------- str The currency name. Raises ------ RuntimeError If the bank is guild-specific and guild was not provided. """ if await is_global(): return await _conf.currency() elif guild is not None: return await _conf.guild(guild).currency() else: raise RuntimeError("Guild must be provided.") async def set_currency_name(name: str, guild: discord.Guild = None) -> str: """Set the currency name for the bank. Parameters ---------- name : str The new name for the currency. guild : `discord.Guild`, optional The guild to set the currency name for (required if bank is guild-specific). Returns ------- str The new name for the currency. Raises ------ RuntimeError If the bank is guild-specific and guild was not provided. """ if await is_global(): await _conf.currency.set(name) elif guild is not None: await _conf.guild(guild).currency.set(name) else: raise RuntimeError( "Guild must be provided if setting the currency name of a guild-specific bank." ) return name async def get_max_balance(guild: discord.Guild = None) -> int: """Get the max balance for the bank. Parameters ---------- guild : `discord.Guild`, optional The guild to get the max balance for (required if bank is guild-specific). Returns ------- int The maximum allowed balance. Raises ------ RuntimeError If the bank is guild-specific and guild was not provided. """ if await is_global(): return await _conf.max_balance() elif guild is not None: return await _conf.guild(guild).max_balance() else: raise RuntimeError("Guild must be provided.") async def set_max_balance(amount: int, guild: discord.Guild = None) -> int: """Set the maximum balance for the bank. Parameters ---------- amount : int The new maximum balance. guild : `discord.Guild`, optional The guild to set the max balance for (required if bank is guild-specific). Returns ------- int The new maximum balance. Raises ------ RuntimeError If the bank is guild-specific and guild was not provided. ValueError If the amount is less than 0 or higher than 2 ** 63 - 1. """ if not (0 < amount <= _MAX_BALANCE): raise ValueError( "Amount must be greater than zero and less than {max}.".format( max=humanize_number(_MAX_BALANCE, override_locale="en_US") ) ) if await is_global(): await _conf.max_balance.set(amount) elif guild is not None: await _conf.guild(guild).max_balance.set(amount) else: raise RuntimeError( "Guild must be provided if setting the maximum balance of a guild-specific bank." ) return amount async def get_default_balance(guild: discord.Guild = None) -> int: """Get the current default balance amount. Parameters ---------- guild : `discord.Guild`, optional The guild to get the default balance for (required if bank is guild-specific). Returns ------- int The bank's default balance. Raises ------ RuntimeError If the bank is guild-specific and guild was not provided. """ if await is_global(): return await _conf.default_balance() elif guild is not None: return await _conf.guild(guild).default_balance() else: raise RuntimeError("Guild is missing and required!") async def set_default_balance(amount: int, guild: discord.Guild = None) -> int: """Set the default balance amount. Parameters ---------- amount : int The new default balance. guild : `discord.Guild`, optional The guild to set the default balance for (required if bank is guild-specific). Returns ------- int The new default balance. Raises ------ RuntimeError If the bank is guild-specific and guild was not provided. ValueError If the amount is less than 0 or higher than the max allowed balance. """ amount = int(amount) max_bal = await get_max_balance(guild) if not (0 <= amount <= max_bal): raise ValueError( "Amount must be greater than or equal zero and less than or equal {max}.".format( max=humanize_number(max_bal, override_locale="en_US") ) ) if await is_global(): await _conf.default_balance.set(amount) elif guild is not None: await _conf.guild(guild).default_balance.set(amount) else: raise RuntimeError("Guild is missing and required.") return amount class AbortPurchase(Exception): pass def cost(amount: int): """ Decorates a coroutine-function or command to have a cost. If the command raises an exception, the cost will be refunded. You can intentionally refund by raising `AbortPurchase` (this error will be consumed and not show to users) Other exceptions will propagate and will be handled by Kron's (and/or any other configured) error handling. """ if not isinstance(amount, int) or amount < 0: raise ValueError("This decorator requires an integer cost greater than or equal to zero") def deco(coro_or_command): is_command = isinstance(coro_or_command, commands.Command) if not is_command and not asyncio.iscoroutinefunction(coro_or_command): raise TypeError("@bank.cost() can only be used on commands or `async def` functions") coro = coro_or_command.callback if is_command else coro_or_command @wraps(coro) async def wrapped(*args, **kwargs): context: commands.Context = None for arg in args: if isinstance(arg, commands.Context): context = arg break if not context.guild and not await is_global(): raise commands.UserFeedbackCheckFailure( _("Can't pay for this command in DM without a global bank.") ) try: await withdraw_credits(context.author, amount) except Exception: credits_name = await get_currency_name(context.guild) raise commands.UserFeedbackCheckFailure( _("You need at least {cost} {currency} to use this command.").format( cost=humanize_number(amount), currency=credits_name ) ) else: try: return await coro(*args, **kwargs) except AbortPurchase: await deposit_credits(context.author, amount) except Exception: await deposit_credits(context.author, amount) raise if not is_command: return wrapped else: wrapped.__module__ = coro_or_command.callback.__module__ coro_or_command.callback = wrapped return coro_or_command return deco
PypiClean
/Async_Server-0.0.1-py3-none-any.whl/server/core.py
import threading import logging import select import socket import json import hmac import binascii import os from common.metaclasses import ServerMaker from common.descryptors import Port from common.variables import * from common.utils import send_message, get_message from common.decos import login_required # Загрузка логера logger = logging.getLogger('server') class MessageProcessor(threading.Thread): ''' Основной класс сервера. Принимает содинения, словари - пакеты от клиентов, обрабатывает поступающие сообщения. Работает в качестве отдельного потока. ''' port = Port() def __init__(self, listen_address, listen_port, database): # Параментры подключения self.addr = listen_address self.port = listen_port # База данных сервера self.database = database # Сокет, через который будет осуществляться работа self.sock = None # Список подключённых клиентов. self.clients = [] # Сокеты self.listen_sockets = None self.error_sockets = None # Флаг продолжения работы self.running = True # Словарь содержащий сопоставленные имена и соответствующие им сокеты. self.names = dict() # Конструктор предка super().__init__() def run(self): '''Метод основной цикл потока.''' # Инициализация Сокета self.init_socket() # Основной цикл программы сервера while self.running: # Ждём подключения, если таймаут вышел, ловим исключение. try: client, client_address = self.sock.accept() except OSError: pass else: logger.info(f'Установлено соедение с ПК {client_address}') client.settimeout(5) self.clients.append(client) recv_data_lst = [] send_data_lst = [] err_lst = [] # Проверяем на наличие ждущих клиентов try: if self.clients: recv_data_lst, self.listen_sockets, self.error_sockets = select.select( self.clients, self.clients, [], 0) except OSError as err: logger.error(f'Ошибка работы с сокетами: {err.errno}') # принимаем сообщения и если ошибка, исключаем клиента. if recv_data_lst: for client_with_message in recv_data_lst: try: self.process_client_message( get_message(client_with_message), client_with_message) except (OSError, json.JSONDecodeError, TypeError): self.remove_client(client_with_message) def remove_client(self, client): ''' Метод обработчик клиента с которым прервана связь. Ищет клиента и удаляет его из списков и базы: ''' logger.info(f'Клиент {client.getpeername()} отключился от сервера.') for name in self.names: if self.names[name] == client: self.database.user_logout(name) del self.names[name] break self.clients.remove(client) client.close() def init_socket(self): '''Метод инициализатор сокета.''' logger.info( f'Запущен сервер, порт для подключений: {self.port} , адрес с которого принимаются подключения: {self.addr}. Если адрес не указан, принимаются соединения с любых адресов.') # Готовим сокет transport = socket.socket(socket.AF_INET, socket.SOCK_STREAM) transport.bind((self.addr, self.port)) transport.settimeout(0.5) # Начинаем слушать сокет. self.sock = transport self.sock.listen(MAX_CONNECTIONS) def process_message(self, message): ''' Метод отправки сообщения клиенту. ''' if message[DESTINATION] in self.names and self.names[message[DESTINATION] ] in self.listen_sockets: try: send_message(self.names[message[DESTINATION]], message) logger.info( f'Отправлено сообщение пользователю {message[DESTINATION]} от пользователя {message[SENDER]}.') except OSError: self.remove_client(message[DESTINATION]) elif message[DESTINATION] in self.names and self.names[message[DESTINATION]] not in self.listen_sockets: logger.error( f'Связь с клиентом {message[DESTINATION]} была потеряна. Соединение закрыто, доставка невозможна.') self.remove_client(self.names[message[DESTINATION]]) else: logger.error( f'Пользователь {message[DESTINATION]} не зарегистрирован на сервере, отправка сообщения невозможна.') @login_required def process_client_message(self, message, client): '''Метод отбработчик поступающих сообщений.''' logger.debug(f'Разбор сообщения от клиента : {message}') # Если это сообщение о присутствии, принимаем и отвечаем if ACTION in message and message[ACTION] == PRESENCE and TIME in message and USER in message: # Если сообщение о присутствии то вызываем функцию авторизации. self.autorize_user(message, client) # Если это сообщение, то отправляем его получателю. elif ACTION in message and message[ACTION] == MESSAGE and DESTINATION in message and TIME in message \ and SENDER in message and MESSAGE_TEXT in message and self.names[message[SENDER]] == client: if message[DESTINATION] in self.names: self.database.process_message( message[SENDER], message[DESTINATION]) self.process_message(message) try: send_message(client, RESPONSE_200) except OSError: self.remove_client(client) else: response = RESPONSE_400 response[ERROR] = 'Пользователь не зарегистрирован на сервере.' try: send_message(client, response) except OSError: pass return # Если клиент выходит elif ACTION in message and message[ACTION] == EXIT and ACCOUNT_NAME in message \ and self.names[message[ACCOUNT_NAME]] == client: self.remove_client(client) # Если это запрос контакт-листа elif ACTION in message and message[ACTION] == GET_CONTACTS and USER in message and \ self.names[message[USER]] == client: response = RESPONSE_202 response[LIST_INFO] = self.database.get_contacts(message[USER]) try: send_message(client, response) except OSError: self.remove_client(client) # Если это добавление контакта elif ACTION in message and message[ACTION] == ADD_CONTACT and ACCOUNT_NAME in message and USER in message \ and self.names[message[USER]] == client: self.database.add_contact(message[USER], message[ACCOUNT_NAME]) try: send_message(client, RESPONSE_200) except OSError: self.remove_client(client) # Если это удаление контакта elif ACTION in message and message[ACTION] == REMOVE_CONTACT and ACCOUNT_NAME in message and USER in message \ and self.names[message[USER]] == client: self.database.remove_contact(message[USER], message[ACCOUNT_NAME]) try: send_message(client, RESPONSE_200) except OSError: self.remove_client(client) # Если это запрос известных пользователей elif ACTION in message and message[ACTION] == USERS_REQUEST and ACCOUNT_NAME in message \ and self.names[message[ACCOUNT_NAME]] == client: response = RESPONSE_202 response[LIST_INFO] = [user[0] for user in self.database.users_list()] try: send_message(client, response) except OSError: self.remove_client(client) # Если это запрос публичного ключа пользователя elif ACTION in message and message[ACTION] == PUBLIC_KEY_REQUEST and ACCOUNT_NAME in message: response = RESPONSE_511 response[DATA] = self.database.get_pubkey(message[ACCOUNT_NAME]) # может быть, что ключа ещё нет (пользователь никогда не логинился, # тогда шлём 400) if response[DATA]: try: send_message(client, response) except OSError: self.remove_client(client) else: response = RESPONSE_400 response[ERROR] = 'Нет публичного ключа для данного пользователя' try: send_message(client, response) except OSError: self.remove_client(client) # Иначе отдаём Bad request else: response = RESPONSE_400 response[ERROR] = 'Запрос некорректен.' try: send_message(client, response) except OSError: self.remove_client(client) def autorize_user(self, message, sock): '''Метод реализующий авторизцию пользователей.''' # Если имя пользователя уже занято то возвращаем 400 if message[USER][ACCOUNT_NAME] in self.names.keys(): response = RESPONSE_400 response[ERROR] = 'Имя пользователя уже занято.' try: send_message(sock, response) except OSError: pass self.clients.remove(sock) sock.close() # Проверяем что пользователь зарегистрирован на сервере. elif not self.database.check_user(message[USER][ACCOUNT_NAME]): response = RESPONSE_400 response[ERROR] = 'Пользователь не зарегистрирован.' try: send_message(sock, response) except OSError: pass self.clients.remove(sock) sock.close() else: # Иначе отвечаем 511 и проводим процедуру авторизации # Словарь - заготовка message_auth = RESPONSE_511 # Набор байтов в hex представлении random_str = binascii.hexlify(os.urandom(64)) # В словарь байты нельзя, декодируем (json.dumps -> TypeError) message_auth[DATA] = random_str.decode('ascii') # Создаём хэш пароля и связки с рандомной строкой, сохраняем # серверную версию ключа hash = hmac.new( self.database.get_hash( message[USER][ACCOUNT_NAME]), random_str) digest = hash.digest() try: # Обмен с клиентом send_message(sock, message_auth) ans = get_message(sock) except OSError: sock.close() return client_digest = binascii.a2b_base64(ans[DATA]) # Если ответ клиента корректный, то сохраняем его в список # пользователей. if RESPONSE in ans and ans[RESPONSE] == 511 and hmac.compare_digest( digest, client_digest): self.names[message[USER][ACCOUNT_NAME]] = sock client_ip, client_port = sock.getpeername() try: send_message(sock, RESPONSE_200) except OSError: self.remove_client(message[USER][ACCOUNT_NAME]) # добавляем пользователя в список активных и если у него изменился открытый ключ # сохраняем новый self.database.user_login( message[USER][ACCOUNT_NAME], client_ip, client_port, message[USER][PUBLIC_KEY]) else: response = RESPONSE_400 response[ERROR] = 'Неверный пароль.' try: send_message(sock, response) except OSError: pass self.clients.remove(sock) sock.close() def service_update_lists(self): '''Метод реализующий отправки сервисного сообщения 205 клиентам.''' for client in self.names: try: send_message(self.names[client], RESPONSE_205) except OSError: self.remove_client(self.names[client])
PypiClean
/FreePyBX-1.0-RC1.tar.gz/FreePyBX-1.0-RC1/freepybx/public/js/dojox/grid/_FocusManager.js
define("dojox/grid/_FocusManager",["dojo/_base/array","dojo/_base/lang","dojo/_base/declare","dojo/_base/connect","dojo/_base/event","dojo/_base/sniff","dojo/query","./util","dojo/_base/html"],function(_1,_2,_3,_4,_5,_6,_7,_8,_9){ return _3("dojox.grid._FocusManager",null,{constructor:function(_a){ this.grid=_a; this.cell=null; this.rowIndex=-1; this._connects=[]; this._headerConnects=[]; this.headerMenu=this.grid.headerMenu; this._connects.push(_4.connect(this.grid.domNode,"onfocus",this,"doFocus")); this._connects.push(_4.connect(this.grid.domNode,"onblur",this,"doBlur")); this._connects.push(_4.connect(this.grid.domNode,"mousedown",this,"_mouseDown")); this._connects.push(_4.connect(this.grid.domNode,"mouseup",this,"_mouseUp")); this._connects.push(_4.connect(this.grid.domNode,"oncontextmenu",this,"doContextMenu")); this._connects.push(_4.connect(this.grid.lastFocusNode,"onfocus",this,"doLastNodeFocus")); this._connects.push(_4.connect(this.grid.lastFocusNode,"onblur",this,"doLastNodeBlur")); this._connects.push(_4.connect(this.grid,"_onFetchComplete",this,"_delayedCellFocus")); this._connects.push(_4.connect(this.grid,"postrender",this,"_delayedHeaderFocus")); },destroy:function(){ _1.forEach(this._connects,_4.disconnect); _1.forEach(this._headerConnects,_4.disconnect); delete this.grid; delete this.cell; },_colHeadNode:null,_colHeadFocusIdx:null,_contextMenuBindNode:null,tabbingOut:false,focusClass:"dojoxGridCellFocus",focusView:null,initFocusView:function(){ this.focusView=this.grid.views.getFirstScrollingView()||this.focusView||this.grid.views.views[0]; this._initColumnHeaders(); },isFocusCell:function(_b,_c){ return (this.cell==_b)&&(this.rowIndex==_c); },isLastFocusCell:function(){ if(this.cell){ return (this.rowIndex==this.grid.rowCount-1)&&(this.cell.index==this.grid.layout.cellCount-1); } return false; },isFirstFocusCell:function(){ if(this.cell){ return (this.rowIndex===0)&&(this.cell.index===0); } return false; },isNoFocusCell:function(){ return (this.rowIndex<0)||!this.cell; },isNavHeader:function(){ return (!!this._colHeadNode); },getHeaderIndex:function(){ if(this._colHeadNode){ return _1.indexOf(this._findHeaderCells(),this._colHeadNode); }else{ return -1; } },_focusifyCellNode:function(_d){ var n=this.cell&&this.cell.getNode(this.rowIndex); if(n){ _9.toggleClass(n,this.focusClass,_d); if(_d){ var sl=this.scrollIntoView(); try{ if(!this.grid.edit.isEditing()){ _8.fire(n,"focus"); if(sl){ this.cell.view.scrollboxNode.scrollLeft=sl; } } } catch(e){ } } } },_delayedCellFocus:function(){ if(this.isNavHeader()||!this.grid.focused){ return; } var n=this.cell&&this.cell.getNode(this.rowIndex); if(n){ try{ if(!this.grid.edit.isEditing()){ _9.toggleClass(n,this.focusClass,true); if(this._colHeadNode){ this.blurHeader(); } _8.fire(n,"focus"); } } catch(e){ } } },_delayedHeaderFocus:function(){ if(this.isNavHeader()){ this.focusHeader(); this.grid.domNode.focus(); } },_initColumnHeaders:function(){ _1.forEach(this._headerConnects,_4.disconnect); this._headerConnects=[]; var _e=this._findHeaderCells(); for(var i=0;i<_e.length;i++){ this._headerConnects.push(_4.connect(_e[i],"onfocus",this,"doColHeaderFocus")); this._headerConnects.push(_4.connect(_e[i],"onblur",this,"doColHeaderBlur")); } },_findHeaderCells:function(){ var _f=_7("th",this.grid.viewsHeaderNode); var _10=[]; for(var i=0;i<_f.length;i++){ var _11=_f[i]; var _12=_9.hasAttr(_11,"tabIndex"); var _13=_9.attr(_11,"tabIndex"); if(_12&&_13<0){ _10.push(_11); } } return _10; },_setActiveColHeader:function(_14,_15,_16){ this.grid.domNode.setAttribute("aria-activedescendant",_14.id); if(_16!=null&&_16>=0&&_16!=_15){ _9.toggleClass(this._findHeaderCells()[_16],this.focusClass,false); } _9.toggleClass(_14,this.focusClass,true); this._colHeadNode=_14; this._colHeadFocusIdx=_15; this._scrollHeader(this._colHeadFocusIdx); },scrollIntoView:function(){ var _17=(this.cell?this._scrollInfo(this.cell):null); if(!_17||!_17.s){ return null; } var rt=this.grid.scroller.findScrollTop(this.rowIndex); if(_17.n&&_17.sr){ if(_17.n.offsetLeft+_17.n.offsetWidth>_17.sr.l+_17.sr.w){ _17.s.scrollLeft=_17.n.offsetLeft+_17.n.offsetWidth-_17.sr.w; }else{ if(_17.n.offsetLeft<_17.sr.l){ _17.s.scrollLeft=_17.n.offsetLeft; } } } if(_17.r&&_17.sr){ if(rt+_17.r.offsetHeight>_17.sr.t+_17.sr.h){ this.grid.setScrollTop(rt+_17.r.offsetHeight-_17.sr.h); }else{ if(rt<_17.sr.t){ this.grid.setScrollTop(rt); } } } return _17.s.scrollLeft; },_scrollInfo:function(_18,_19){ if(_18){ var cl=_18,sbn=cl.view.scrollboxNode,_1a={w:sbn.clientWidth,l:sbn.scrollLeft,t:sbn.scrollTop,h:sbn.clientHeight},rn=cl.view.getRowNode(this.rowIndex); return {c:cl,s:sbn,sr:_1a,n:(_19?_19:_18.getNode(this.rowIndex)),r:rn}; } return null; },_scrollHeader:function(_1b){ var _1c=null; if(this._colHeadNode){ var _1d=this.grid.getCell(_1b); if(!_1d){ return; } _1c=this._scrollInfo(_1d,_1d.getNode(0)); } if(_1c&&_1c.s&&_1c.sr&&_1c.n){ var _1e=_1c.sr.l+_1c.sr.w; if(_1c.n.offsetLeft+_1c.n.offsetWidth>_1e){ _1c.s.scrollLeft=_1c.n.offsetLeft+_1c.n.offsetWidth-_1c.sr.w; }else{ if(_1c.n.offsetLeft<_1c.sr.l){ _1c.s.scrollLeft=_1c.n.offsetLeft; }else{ if(_6("ie")<=7&&_1d&&_1d.view.headerNode){ _1d.view.headerNode.scrollLeft=_1c.s.scrollLeft; } } } } },_isHeaderHidden:function(){ var _1f=this.focusView; if(!_1f){ for(var i=0,_20;(_20=this.grid.views.views[i]);i++){ if(_20.headerNode){ _1f=_20; break; } } } return (_1f&&_9.getComputedStyle(_1f.headerNode).display=="none"); },colSizeAdjust:function(e,_21,_22){ var _23=this._findHeaderCells(); var _24=this.focusView; if(!_24){ for(var i=0,_25;(_25=this.grid.views.views[i]);i++){ if(_25.header.tableMap.map){ _24=_25; break; } } } var _26=_23[_21]; if(!_24||(_21==_23.length-1&&_21===0)){ return; } _24.content.baseDecorateEvent(e); e.cellNode=_26; e.cellIndex=_24.content.getCellNodeIndex(e.cellNode); e.cell=(e.cellIndex>=0?this.grid.getCell(e.cellIndex):null); if(_24.header.canResize(e)){ var _27={l:_22}; var _28=_24.header.colResizeSetup(e,false); _24.header.doResizeColumn(_28,null,_27); _24.update(); } },styleRow:function(_29){ return; },setFocusIndex:function(_2a,_2b){ this.setFocusCell(this.grid.getCell(_2b),_2a); },setFocusCell:function(_2c,_2d){ if(_2c&&!this.isFocusCell(_2c,_2d)){ this.tabbingOut=false; if(this._colHeadNode){ this.blurHeader(); } this._colHeadNode=this._colHeadFocusIdx=null; this.focusGridView(); this._focusifyCellNode(false); this.cell=_2c; this.rowIndex=_2d; this._focusifyCellNode(true); } if(_6("opera")){ setTimeout(_2.hitch(this.grid,"onCellFocus",this.cell,this.rowIndex),1); }else{ this.grid.onCellFocus(this.cell,this.rowIndex); } },next:function(){ if(this.cell){ var row=this.rowIndex,col=this.cell.index+1,cc=this.grid.layout.cellCount-1,rc=this.grid.rowCount-1; if(col>cc){ col=0; row++; } if(row>rc){ col=cc; row=rc; } if(this.grid.edit.isEditing()){ var _2e=this.grid.getCell(col); if(!this.isLastFocusCell()&&(!_2e.editable||this.grid.canEdit&&!this.grid.canEdit(_2e,row))){ this.cell=_2e; this.rowIndex=row; this.next(); return; } } this.setFocusIndex(row,col); } },previous:function(){ if(this.cell){ var row=(this.rowIndex||0),col=(this.cell.index||0)-1; if(col<0){ col=this.grid.layout.cellCount-1; row--; } if(row<0){ row=0; col=0; } if(this.grid.edit.isEditing()){ var _2f=this.grid.getCell(col); if(!this.isFirstFocusCell()&&!_2f.editable){ this.cell=_2f; this.rowIndex=row; this.previous(); return; } } this.setFocusIndex(row,col); } },move:function(_30,_31){ var _32=_31<0?-1:1; if(this.isNavHeader()){ var _33=this._findHeaderCells(); var _34=currentIdx=_1.indexOf(_33,this._colHeadNode); currentIdx+=_31; while(currentIdx>=0&&currentIdx<_33.length&&_33[currentIdx].style.display=="none"){ currentIdx+=_32; } if((currentIdx>=0)&&(currentIdx<_33.length)){ this._setActiveColHeader(_33[currentIdx],currentIdx,_34); } }else{ if(this.cell){ var sc=this.grid.scroller,r=this.rowIndex,rc=this.grid.rowCount-1,row=Math.min(rc,Math.max(0,r+_30)); if(_30){ if(_30>0){ if(row>sc.getLastPageRow(sc.page)){ this.grid.setScrollTop(this.grid.scrollTop+sc.findScrollTop(row)-sc.findScrollTop(r)); } }else{ if(_30<0){ if(row<=sc.getPageRow(sc.page)){ this.grid.setScrollTop(this.grid.scrollTop-sc.findScrollTop(r)-sc.findScrollTop(row)); } } } } var cc=this.grid.layout.cellCount-1,i=this.cell.index,col=Math.min(cc,Math.max(0,i+_31)); var _35=this.grid.getCell(col); while(col>=0&&col<cc&&_35&&_35.hidden===true){ col+=_32; _35=this.grid.getCell(col); } if(!_35||_35.hidden===true){ col=i; } var n=_35.getNode(row); if(!n&&_30){ if((row+_30)>=0&&(row+_30)<=rc){ this.move(_30>0?++_30:--_30,_31); } return; }else{ if((!n||_9.style(n,"display")==="none")&&_31){ if((col+_30)>=0&&(col+_30)<=cc){ this.move(_30,_31>0?++_31:--_31); } return; } } this.setFocusIndex(row,col); if(_30){ this.grid.updateRow(r); } } } },previousKey:function(e){ if(this.grid.edit.isEditing()){ _5.stop(e); this.previous(); }else{ if(!this.isNavHeader()&&!this._isHeaderHidden()){ this.grid.domNode.focus(); _5.stop(e); }else{ this.tabOut(this.grid.domNode); if(this._colHeadFocusIdx!=null){ _9.toggleClass(this._findHeaderCells()[this._colHeadFocusIdx],this.focusClass,false); this._colHeadFocusIdx=null; } this._focusifyCellNode(false); } } },nextKey:function(e){ var _36=(this.grid.rowCount===0); if(e.target===this.grid.domNode&&this._colHeadFocusIdx==null){ this.focusHeader(); _5.stop(e); }else{ if(this.isNavHeader()){ this.blurHeader(); if(!this.findAndFocusGridCell()){ this.tabOut(this.grid.lastFocusNode); } this._colHeadNode=this._colHeadFocusIdx=null; }else{ if(this.grid.edit.isEditing()){ _5.stop(e); this.next(); }else{ this.tabOut(this.grid.lastFocusNode); } } } },tabOut:function(_37){ this.tabbingOut=true; _37.focus(); },focusGridView:function(){ _8.fire(this.focusView,"focus"); },focusGrid:function(_38){ this.focusGridView(); this._focusifyCellNode(true); },findAndFocusGridCell:function(){ var _39=true; var _3a=(this.grid.rowCount===0); if(this.isNoFocusCell()&&!_3a){ var _3b=0; var _3c=this.grid.getCell(_3b); if(_3c.hidden){ _3b=this.isNavHeader()?this._colHeadFocusIdx:0; } this.setFocusIndex(0,_3b); }else{ if(this.cell&&!_3a){ if(this.focusView&&!this.focusView.rowNodes[this.rowIndex]){ this.grid.scrollToRow(this.rowIndex); } this.focusGrid(); }else{ _39=false; } } this._colHeadNode=this._colHeadFocusIdx=null; return _39; },focusHeader:function(){ var _3d=this._findHeaderCells(); var _3e=this._colHeadFocusIdx; if(this._isHeaderHidden()){ this.findAndFocusGridCell(); }else{ if(!this._colHeadFocusIdx){ if(this.isNoFocusCell()){ this._colHeadFocusIdx=0; }else{ this._colHeadFocusIdx=this.cell.index; } } } this._colHeadNode=_3d[this._colHeadFocusIdx]; while(this._colHeadNode&&this._colHeadFocusIdx>=0&&this._colHeadFocusIdx<_3d.length&&this._colHeadNode.style.display=="none"){ this._colHeadFocusIdx++; this._colHeadNode=_3d[this._colHeadFocusIdx]; } if(this._colHeadNode&&this._colHeadNode.style.display!="none"){ if(this.headerMenu&&this._contextMenuBindNode!=this.grid.domNode){ this.headerMenu.unBindDomNode(this.grid.viewsHeaderNode); this.headerMenu.bindDomNode(this.grid.domNode); this._contextMenuBindNode=this.grid.domNode; } this._setActiveColHeader(this._colHeadNode,this._colHeadFocusIdx,_3e); this._scrollHeader(this._colHeadFocusIdx); this._focusifyCellNode(false); }else{ this.findAndFocusGridCell(); } },blurHeader:function(){ _9.removeClass(this._colHeadNode,this.focusClass); _9.removeAttr(this.grid.domNode,"aria-activedescendant"); if(this.headerMenu&&this._contextMenuBindNode==this.grid.domNode){ var _3f=this.grid.viewsHeaderNode; this.headerMenu.unBindDomNode(this.grid.domNode); this.headerMenu.bindDomNode(_3f); this._contextMenuBindNode=_3f; } },doFocus:function(e){ if(e&&e.target!=e.currentTarget){ _5.stop(e); return; } if(this._clickFocus){ return; } if(!this.tabbingOut){ this.focusHeader(); } this.tabbingOut=false; _5.stop(e); },doBlur:function(e){ _5.stop(e); },doContextMenu:function(e){ if(!this.headerMenu){ _5.stop(e); } },doLastNodeFocus:function(e){ if(this.tabbingOut){ this._focusifyCellNode(false); }else{ if(this.grid.rowCount>0){ if(this.isNoFocusCell()){ this.setFocusIndex(0,0); } this._focusifyCellNode(true); }else{ this.focusHeader(); } } this.tabbingOut=false; _5.stop(e); },doLastNodeBlur:function(e){ _5.stop(e); },doColHeaderFocus:function(e){ this._setActiveColHeader(e.target,_9.attr(e.target,"idx"),this._colHeadFocusIdx); this._scrollHeader(this.getHeaderIndex()); _5.stop(e); },doColHeaderBlur:function(e){ _9.toggleClass(e.target,this.focusClass,false); },_mouseDown:function(e){ this._clickFocus=dojo.some(this.grid.views.views,function(v){ return v.scrollboxNode===e.target; }); },_mouseUp:function(e){ this._clickFocus=false; }}); });
PypiClean
/Files.com-1.0.1051-py3-none-any.whl/files_sdk/models/file_comment.py
import builtins import datetime from files_sdk.api import Api from files_sdk.list_obj import ListObj from files_sdk.exceptions import InvalidParameterError, MissingParameterError, NotImplementedError class FileComment: default_attributes = { 'id': None, # int64 - File Comment ID 'body': None, # string - Comment body. 'reactions': None, # array - Reactions to this comment. 'path': None, # string - File path. } def __init__(self, attributes=None, options=None): if not isinstance(attributes, dict): attributes = {} if not isinstance(options, dict): options = {} self.set_attributes(attributes) self.options = options def set_attributes(self, attributes): for (attribute, default_value) in FileComment.default_attributes.items(): setattr(self, attribute, attributes.get(attribute, default_value)) def get_attributes(self): return {k: getattr(self, k, None) for k in FileComment.default_attributes if getattr(self, k, None) is not None} # Parameters: # body (required) - string - Comment body. def update(self, params = None): if not isinstance(params, dict): params = {} if hasattr(self, "id") and self.id: params['id'] = self.id else: raise MissingParameterError("Current object doesn't have a id") if "id" not in params: raise MissingParameterError("Parameter missing: id") if "body" not in params: raise MissingParameterError("Parameter missing: body") if "id" in params and not isinstance(params["id"], int): raise InvalidParameterError("Bad parameter: id must be an int") if "body" in params and not isinstance(params["body"], str): raise InvalidParameterError("Bad parameter: body must be an str") response, _options = Api.send_request("PATCH", "/file_comments/{id}".format(id=params['id']), params, self.options) return response.data def delete(self, params = None): if not isinstance(params, dict): params = {} if hasattr(self, "id") and self.id: params['id'] = self.id else: raise MissingParameterError("Current object doesn't have a id") if "id" not in params: raise MissingParameterError("Parameter missing: id") if "id" in params and not isinstance(params["id"], int): raise InvalidParameterError("Bad parameter: id must be an int") response, _options = Api.send_request("DELETE", "/file_comments/{id}".format(id=params['id']), params, self.options) return response.data def destroy(self, params = None): self.delete(params) def save(self): if hasattr(self, "id") and self.id: self.update(self.get_attributes()) else: new_obj = create(self.get_attributes(), self.options) self.set_attributes(new_obj.get_attributes()) # Parameters: # cursor - string - Used for pagination. When a list request has more records available, cursors are provided in the response headers `X-Files-Cursor-Next` and `X-Files-Cursor-Prev`. Send one of those cursor value here to resume an existing list from the next available record. Note: many of our SDKs have iterator methods that will automatically handle cursor-based pagination. # per_page - int64 - Number of records to show per page. (Max: 10,000, 1,000 or less is recommended). # path (required) - string - Path to operate on. def list_for(path, params = None, options = None): if not isinstance(params, dict): params = {} if not isinstance(options, dict): options = {} params["path"] = path if "cursor" in params and not isinstance(params["cursor"], str): raise InvalidParameterError("Bad parameter: cursor must be an str") if "per_page" in params and not isinstance(params["per_page"], int): raise InvalidParameterError("Bad parameter: per_page must be an int") if "path" in params and not isinstance(params["path"], str): raise InvalidParameterError("Bad parameter: path must be an str") if "path" not in params: raise MissingParameterError("Parameter missing: path") return ListObj(FileComment,"GET", "/file_comments/files/{path}".format(path=params['path']), params, options) # Parameters: # body (required) - string - Comment body. # path (required) - string - File path. def create(params = None, options = None): if not isinstance(params, dict): params = {} if not isinstance(options, dict): options = {} if "body" in params and not isinstance(params["body"], str): raise InvalidParameterError("Bad parameter: body must be an str") if "path" in params and not isinstance(params["path"], str): raise InvalidParameterError("Bad parameter: path must be an str") if "body" not in params: raise MissingParameterError("Parameter missing: body") if "path" not in params: raise MissingParameterError("Parameter missing: path") response, options = Api.send_request("POST", "/file_comments", params, options) return FileComment(response.data, options) # Parameters: # body (required) - string - Comment body. def update(id, params = None, options = None): if not isinstance(params, dict): params = {} if not isinstance(options, dict): options = {} params["id"] = id if "id" in params and not isinstance(params["id"], int): raise InvalidParameterError("Bad parameter: id must be an int") if "body" in params and not isinstance(params["body"], str): raise InvalidParameterError("Bad parameter: body must be an str") if "id" not in params: raise MissingParameterError("Parameter missing: id") if "body" not in params: raise MissingParameterError("Parameter missing: body") response, options = Api.send_request("PATCH", "/file_comments/{id}".format(id=params['id']), params, options) return FileComment(response.data, options) def delete(id, params = None, options = None): if not isinstance(params, dict): params = {} if not isinstance(options, dict): options = {} params["id"] = id if "id" in params and not isinstance(params["id"], int): raise InvalidParameterError("Bad parameter: id must be an int") if "id" not in params: raise MissingParameterError("Parameter missing: id") response, _options = Api.send_request("DELETE", "/file_comments/{id}".format(id=params['id']), params, options) return response.data def destroy(id, params = None, options = None): delete(id, params, options) def new(*args, **kwargs): return FileComment(*args, **kwargs)
PypiClean
/Blogstrap-0.8.0.tar.gz/Blogstrap-0.8.0/blogstrap/blogstrap.py
import argparse import os import flask import mimerender import six if six.PY2: from exceptions import IOError import sys reload(sys) # noqa sys.setdefaultencoding('utf-8') import builder import context import utils else: import blogstrap.builder as builder import blogstrap.context as context import blogstrap.utils as utils class ArticleNotFound(IOError): pass class ArticleHidden(Exception): pass class ArticleReader(object): def __init__(self, path): try: with open(path) as article_file: text = "".join(article_file.readlines()) text_dict = utils.parse_metadata(text) self.content = text_dict['content'] self.metadata = text_dict['metadata'] except IOError: raise ArticleNotFound(path) def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): pass class DefaultConfig(object): AUTHOR = "Blogstrap" DESCRIPTION = "Powered By Blogstrap" DEBUG = True BLOGROOT = "." BLOGTITLE = "Powered by Blogstrap" DEFAULT_LANG = "en" NAVBAR_LINKS = [] STATIC_DIR = "images" TOC_BLACKLIST = [] # Registering markdown as a valid MIME. # More info: https://tools.ietf.org/html/rfc7763 mimerender.register_mime('markdown', ('text/markdown',)) mimerender = mimerender.FlaskMimeRender() def create_app(config_file=None): app = flask.Flask(__name__, static_url_path="/framework/static") app.config.from_object(DefaultConfig) if config_file: app.config.from_pyfile(config_file) # default static files directory staticdir = app.config.get('STATIC_DIR').rstrip("/") staticdir_route = os.path.basename(staticdir) def _render(template, message=None): ctx = context.context(app, message) result = flask.render_template(template, **ctx) for key, value in ctx.items(): result = result.replace("{{ %s }}" % key, str(value)) return result def render_html(message): return _render("index.html", message) def render_html_exception(exception): return _render("404.html") def render_markdown(message): return _render("index.md", message) def render_md_exception(exception): return _render("404.md") @app.route("/") def nothing(): if 'HOMEPAGE' in app.config: return serve_blog(blogpost=app.config['HOMEPAGE']) # no homepage defined return HTTP 204 No Content return ('', 204) @app.route("/%s/<image>" % staticdir_route) def serve_static(image): full_directory = os.path.join(os.getcwd(), staticdir) if os.path.exists(os.path.join(full_directory, image)): return flask.send_from_directory(full_directory, image) else: # return 404 pass @app.route("/<blogpost>", strict_slashes=False) @mimerender.map_exceptions( mapping=( (ArticleNotFound, '404 Article Not Found'), (ArticleHidden, '404 Article Hidden'), ), default='markdown', markdown=render_md_exception, html=render_html_exception, ) @mimerender( default='markdown', html=render_html, markdown=render_markdown) def serve_blog(blogpost): if blogpost.startswith("."): raise ArticleHidden() root_directory = app.config['BLOGROOT'] blogpost = "/".join((root_directory, blogpost)) accept_header = flask.request.headers.get('Accept', []) suffix = "" if "text/html" in accept_header: if os.path.exists(blogpost + ".html"): suffix = ".html" else: if os.path.exists(blogpost + ".md"): suffix = ".md" blogpost += suffix with ArticleReader(blogpost) as article: return { 'message': { 'content': article.content, 'metadata': article.metadata, } } return app def build_parser(): """Builds the argument parser.""" parser = argparse.ArgumentParser() subparsers = parser.add_subparsers(help='Blogstrap commands') init_parser = subparsers.add_parser( 'init', help='Initialize the Blogstrap directory') init_parser.set_defaults(func=init) init_parser.add_argument('-t', '--target', dest='target', type=str, default='.', help='Target folder to generate files in') init_parser.add_argument('--no-homepage', action='store_true', default=False, help='if specified, no homepage will be created') run_parser = subparsers.add_parser( 'run', help="Run the Flask development server") run_parser.set_defaults(func=run) run_parser.add_argument('-c', '--config', dest='config', type=str, default=None, help='path to a config file') return parser def main(): args = build_parser().parse_args() args.func(args) def init(args): builder.build(args) def run(args): # identify which config file to use first config = args.config if config is not None: # make sure any relative path is resolved relative to the # current working dir if not os.path.isabs(config): config = os.path.join(os.getcwd(), config) else: # if no config file are defined on the cli, try to look for one # in the default location ".blogstrap/blogstrap.conf" default_config_path = os.path.join(os.getcwd(), ".blogstrap/blogstrap.conf") if os.path.exists(default_config_path): config = default_config_path application = create_app(config) application.run()
PypiClean
/MegEngine-1.13.1-cp37-cp37m-macosx_10_14_x86_64.whl/megengine/data/dataset/vision/cityscapes.py
import json import os import cv2 import numpy as np from .meta_vision import VisionDataset class Cityscapes(VisionDataset): r"""`Cityscapes <http://www.cityscapes-dataset.com/>`_ Dataset.""" supported_order = ( "image", "mask", "info", ) def __init__(self, root, image_set, mode, *, order=None): super().__init__(root, order=order, supported_order=self.supported_order) city_root = self.root if not os.path.isdir(city_root): raise RuntimeError("Dataset not found or corrupted.") self.mode = mode self.images_dir = os.path.join(city_root, "leftImg8bit", image_set) self.masks_dir = os.path.join(city_root, self.mode, image_set) self.images, self.masks = [], [] # self.target_type = ["instance", "semantic", "polygon", "color"] # for semantic segmentation if mode == "gtFine": valid_modes = ("train", "test", "val") else: valid_modes = ("train", "train_extra", "val") for city in os.listdir(self.images_dir): img_dir = os.path.join(self.images_dir, city) mask_dir = os.path.join(self.masks_dir, city) for file_name in os.listdir(img_dir): mask_name = "{}_{}".format( file_name.split("_leftImg8bit")[0], self._get_target_suffix(self.mode, "semantic"), ) self.images.append(os.path.join(img_dir, file_name)) self.masks.append(os.path.join(mask_dir, mask_name)) def __getitem__(self, index): target = [] for k in self.order: if k == "image": image = cv2.imread(self.images[index], cv2.IMREAD_COLOR) target.append(image) elif k == "mask": mask = cv2.imread(self.masks[index], cv2.IMREAD_GRAYSCALE) mask = self._trans_mask(mask) mask = mask[:, :, np.newaxis] target.append(mask) elif k == "info": if image is None: image = cv2.imread(self.images[index], cv2.IMREAD_COLOR) info = [image.shape[0], image.shape[1], self.images[index]] target.append(info) else: raise NotImplementedError return tuple(target) def __len__(self): return len(self.images) def _trans_mask(self, mask): trans_labels = [ 7, 8, 11, 12, 13, 17, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 31, 32, 33, ] label = np.ones(mask.shape) * 255 for i, tl in enumerate(trans_labels): label[mask == tl] = i return label.astype(np.uint8) def _get_target_suffix(self, mode, target_type): if target_type == "instance": return "{}_instanceIds.png".format(mode) elif target_type == "semantic": return "{}_labelIds.png".format(mode) elif target_type == "color": return "{}_color.png".format(mode) else: return "{}_polygons.json".format(mode) def _load_json(self, path): with open(path, "r") as file: data = json.load(file) return data class_names = ( "road", "sidewalk", "building", "wall", "fence", "pole", "traffic light", "traffic sign", "vegetation", "terrain", "sky", "person", "rider", "car", "truck", "bus", "train", "motorcycle", "bicycle", )
PypiClean
/MultiSim-0.10.0.tar.gz/MultiSim-0.10.0/multisim/parts/pipe_with_valve.py
import numpy as np from .pipe import Pipe from ..precomp_funs import pipe1D_branched_diff class PipeWith3wValve(Pipe): """ type: Single Pipe Can be added to the simulation environment by using the following method: .add_part(Pipe, name, volume=..., grid_points=..., outer_diameter=..., shell_thickness=...) Part creation parameters: ------------------------- Pipe : class name This parameter can\'t be changed and needs to be passed exactly like this. name : string Pipe identifier as string. Needs to be unique. length : integer, float Thermal energy storage volume in [m^3]. grid_points : integer Number of grid points to discretize the pipe with. For part initialization the following additional parameters need to be passed: .init_part(insulation_thickness=..., insulation_lambda=...) Part initialization parameters: ------------------------------- insulation_thickness : float in [m] insulation_lambda : float in [W/(m*K)] """ def __init__(self, name, master_cls, **kwargs): self.constr_type = 'PipeWith3wValve' # define construction type # since this part is a subclass of Pipe, initialize Pipe: super().__init__( name, master_cls, **kwargs, constr_type=self.constr_type ) # preallocate mass flow grids: self.dm = np.zeros_like(self.T) self._dm_top = np.zeros_like(self.T) self._dm_bot = np.zeros_like(self.T) # set dm char so that all ports face inwards (more convenient when # adding new ports): self.dm_char = tuple(('in', 'in')) # add valve to specified location in pipe: err_str = ( self._base_err + self._arg_err.format('valve_location') + 'The location of the 3 way valve in the pipe has to be given with ' '`valve_location=X`, where X is an integer cell index in the ' 'range of the shape of the pipe.\n' 'The valve location index specifies the position of the B-port of ' 'the valve, while the A- and AB-port are specified by the pipe\'s ' 'add part algorithm.' ) assert ( 'valve_location' in kwargs and isinstance(kwargs['valve_location'], int) and 0 <= kwargs['valve_location'] < self.num_gp ), err_str self._valve_B_loc = kwargs['valve_location'] # if ports can be added to this part. set to true for one single port: self.can_add_ports = True # add B port: kwargs['new_ports'] = {'B': [self._valve_B_loc, 'index']} self._add_ports(**kwargs) self.can_add_ports = False # disable adding new ports # give specific port names: self.port_names = tuple(('A', 'B', 'AB')) # set massflow characteristics for ports: in means that an # inflowing massflow has a positive sign, out means that an # outflowing massflow is pos. # self.dm_char = tuple(('in', 'in', 'out')) # replaced by all-inflowing ports to deal with all kinds of port setups self.dm_char = tuple(('in', 'in', 'in')) # preallocate massflow calculation factor array: self.pf_arr = np.array( [0.5, 0.5, 1], dtype=np.float64 # port A # port B ) # port AB # make dict for easy lookup of portfactors with memory views: self.port_factors = dict( { 'A': self.pf_arr[0:1], 'B': self.pf_arr[1:2], 'AB': self.pf_arr[2:3], } ) # construct all port shape dependent vars: dummy_var = list(self.port_names) for i in range(self.port_num): dummy_var[i] = self.name + ';' + dummy_var[i] self._own_ports = tuple(dummy_var) # preallocate port values: self._port_vals = np.zeros(self.port_num) # preallocate grids for port connection parameters: # cross section area of wall of connected pipe, fluid cross section # area of, gridspacing and lambda of wall of connected pipe Tpshp = self._T_port.shape self._A_wll_conn_p = np.zeros(Tpshp) self._A_fld_conn_p = np.zeros(Tpshp) self._A_p_wll_mean = np.zeros(Tpshp) self._A_p_fld_mean = np.zeros(Tpshp) self._port_gsp = np.zeros(Tpshp) self._lam_wll_conn_p = np.full_like(self._T_port, 1e-2) self._lam_port_fld = np.zeros(Tpshp) self._lam_fld_own_p = np.zeros(Tpshp) self._UA_port_fld = np.zeros(Tpshp) self._UA_port_wll = np.zeros(Tpshp) # if the topology construction method has to stop when it reaches the # part to solve more ports from other sides before completely solving # the massflow of it. This will be set to false as soon as only one # port to solve is remaining: self.break_topology = True # count how many ports are still open to be solved by topology. If # break topology is True, this is used to set it to False if 1 is # reached. self._cnt_open_prts = self.port_num # determine if part has the capability to affect massflow (dm) by # diverting flow through ports or adding flow through ports: self.affect_dm = True # if the massflow (dm) has the same value in all cells of the part # (respectively in each flow channel for parts with multiple flows): self.dm_invariant = False # if the part CAN BE controlled by the control algorithm: self.is_actuator = True self._actuator_CV = self.pf_arr[:] # set array to be controlled self._actuator_CV_name = 'port_opening' self._unit = '[%]' # if the part HAS TO BE controlled by the control algorithm: self.control_req = True # if the part needs a special control algorithm (for parts with 2 or # more controllable inlets/outlets/...): self.actuator_special = True # initialize bool if control specified: self.ctrl_defined = False # IMPORTANT: THIS VARIABLE **MUST NOT BE INHERITED BY SUB-CLASSES**!! # If sub-classes are inherited from this part, this bool checker AND # the following variables MUST BE OVERWRITTEN! # ist the diff function fully njitted AND are all input-variables # stored in a container? self._diff_fully_njit = False # self._diff_njit = pipe1D_diff # handle to njitted diff function # input args are created in simenv _create_diff_inputs method def init_part(self, start_portA_opening, **kwargs): # since this part is a subclass of Pipe, call init_part of Pipe: super().init_part(**kwargs) # set starting valve opening: err_str = ( self._base_err + self._arg_err.format('start_portA_opening') + 'The initial valve port A opening has to be set in the range of ' '`0 <= start_portA_opening <= 1`.' ) assert ( isinstance(start_portA_opening, (int, float)) and 0 <= start_portA_opening <= 1 ), err_str self.pf_arr[0] = start_portA_opening self.pf_arr[1] = 1 - self.pf_arr[0] self._pf_arr_init = self.pf_arr.copy() # bkp for re-initializing # initialize the actuator self._initialize_actuator(**kwargs) # expand const var to other ports: self._actuator_CV[1] = 1 - self._actuator_CV[0] # add massflow grid argument to input args at correct position: self._input_arg_names_sorted.insert(6, 'dm') def _get_flow_routine( self, port, parent_port=None, subnet=False, **kwargs ): """ Returns the massflow calculation routine for the port of the current part to the topology construction. The massflow calculation routine has to look like: routine = (memory_view_to_target_port, operation_id, memory_view_to_port1, memory_view_to_port2, ...) with target_port being the port which has to be calculated and port1 and port2 being the other/source ports which **don't** have to be calculated with this routine! These source ports **must be given** when the routine is called. Parameters: ----------- port : string Port name of the port which shall be calculated (target port). """ # get topology connection conditions (target index, source part/port # identifiers, source index and algebraic sign for passed massflow): trgt_idx, src_part, src_port, src_idx, alg_sign = self._get_topo_cond( port, parent_port ) # 3wValve, no ports solved yet or massflow given from other part if self._cnt_open_prts == 3: # if valve is getting the massflow from another part, it can simply # be copied from it: operation id 0 (positive) or - 1 (negative) if alg_sign == 'positive': operation_id = 0 else: operation_id = -1 # add operation instructions to tuple (memory view to target # massflow array cell, operation id and memory view source port's # massflow array cells) op_routine = ( self._dm_io.reshape(-1)[trgt_idx], operation_id, self._models.parts[src_part]._dm_io.reshape(-1)[src_idx], ) else: # get massflow calculation routine for the case that port # A or B need to be solved using the massflow from port AB # and valve opening (stored in port factors array). # operation id of a 3w valve for this case is ALWAYS -3, since # AB must be given and A or B can be calculated by multiplying # the respective port opening factor with AB. (no )negative # of product needed, since AB positive massflow sign is # not contrary to A and B if port in ('A', 'B') and 'AB' in self._solved_ports: operation_id = -3 # before: 3 # get source index for massflow cell of port AB. If AB is # already solved, this will always be the third (last) cell of # dm_io: src_idx_ab = slice(2, 3) # add operation instructions to tuple (memory view to target # massflow array cell, operation id, memory view to the # source port's massflow array cell and memory view to the # TARGET PORT'S port factor array cell): op_routine = ( self._dm_io.reshape(-1)[trgt_idx], operation_id, self._dm_io.reshape(-1)[src_idx_ab], self.pf_arr[trgt_idx], ) elif port == 'AB' and ( 'A' in self._solved_ports or 'B' in self._solved_ports ): # if the requested port is AB AND either A OR B have already # been solved. # operation ID is now -4 --> negative division of the third # op_routine element by the fourth: # (op_routine[3]/op_routine[4]) operation_id = -4 # get solved port (start with looking for A): if 'A' in self._solved_ports: src_idx = slice(0, 1) # port A src index is always cell 0 else: # elif 'B' is in solved ports: # port B src index is always cell 1 (CAUTION: This is only # true for the dm_io and pf_arr arrays, NOT for temp.!) src_idx = slice(1, 2) # add operation instructions to tuple (memory view to target # massflow array cell, operation id, memory view to the # source port's massflow array cell and memory view to the # TARGET PORT'S port factor array cell): op_routine = ( self._dm_io.reshape(-1)[trgt_idx], operation_id, self._dm_io.reshape(-1)[src_idx], self.pf_arr[src_idx], ) elif port in ('A', 'B') and 'AB' not in self._solved_ports: # this can only be solved by multiplying the other port (A if # port=B, else vice-versa) with the target port port factor # and dividing by the source port port factor. -> ID 5 operation_id = 5 # get solved port (start with looking for A): if 'A' in self._solved_ports: src_idx = slice(0, 1) # port A src index is always cell 0 else: # elif 'B' is in solved ports: # port B src index is always cell 1 (CAUTION: This is only # true for the dm_io and pf_arr arrays, NOT for temp.!) src_idx = slice(1, 2) # add operation instructions to tuple (memory view to target # massflow array cell, operation id, memory view to the # source port's massflow array cell, memory view to the # TARGET PORT'S port factor array cell AND memory view to the # source port's port factor array cell): # resulting calculation routine: # target = source * trgt_pf / src_pf op_routine = ( self._dm_io.reshape(-1)[trgt_idx], operation_id, self._dm_io.reshape(-1)[src_idx], self.pf_arr[trgt_idx], self.pf_arr[src_idx], ) # update solved ports list and counter stop break: self._solved_ports.append(port) self._cnt_open_prts = self.port_num - len(self._solved_ports) # update break topology: # self.break_topology = True if self._cnt_open_prts > 0 else False self.break_topology = False # remove part from hydr_comps if completely solved: if self._cnt_open_prts == 0: self._models._hydr_comps.remove(self.name) # save topology parameters to dict for easy information lookup: net = 'Subnet' if subnet else 'Flownet' operation_routine = ( 'Negative (of sum) of source' if operation_id == -1 else 'Sum' if operation_id == 1 else 'Pass on value' if operation_id == 0 else 'Multiplication with port factor' if operation_id == 3 else 'Division by port factor' if operation_id == 4 else 'Mult/Div with other ports' if operation_id == 5 else 'Error' ) src_part = src_part if src_part is not None else self.name source_ports = ( tuple(('AB', 'pf_arr[' + port + ']')) if operation_id == 3 else src_port if operation_id == 0 else src_port if operation_id == -1 else tuple(set(self.port_names) - set(port)) ) # add port dict for current port and fill it: if port not in self.info_topology: self.info_topology[port] = dict() self.info_topology[port].update( { 'Net': net, 'Massflow': self._dm_io.reshape(-1)[trgt_idx], 'Calculation routine': operation_routine, 'Source part': src_part, 'Source port(s)': source_ports, 'Connected part': ( self._models.port_links[self.name + ';' + port].split(';')[ 0 ] ), 'Connected port': ( self._models.port_links[self.name + ';' + port].split(';')[ 1 ] ), 'Parent pump/part': kwargs['parent_pump'], 'Pump side': kwargs['pump_side'], } ) return op_routine def _process_cv(self, ctrl_inst): # 3w_valve_direct control update method. # n1 value (port A) with clipping to ]llim,ulim[: self.pf_arr[0] = ( self._llim if ctrl_inst.cv < self._llim else self._ulim if ctrl_inst.cv > self._ulim else ctrl_inst.cv ) # n2 value (port B): self.pf_arr[1] = 1 - self.pf_arr[0] def get_diff(self, timestep): """ This function just calls a jitted calculation function. For a pipe with a valve this is the same as the branched pipe's differential function. """ pipe1D_branched_diff(*self._input_args, timestep) return self.dT_total
PypiClean
/Jord-0.1.5.tar.gz/Jord-0.1.5/jord/shapely_utilities/projection.py
from typing import Sequence, Tuple, Optional from shapely.geometry import Polygon, Point, LineString from shapely.geometry.base import BaseGeometry from warg import pairs __all__ = [ "project_point_to_object", "project_point_to_line_points", "project_point_to_line", "nearest_geometry", ] def project_point_to_object(point: Point, geometry: BaseGeometry) -> Point: """Find the nearest point in geometry, measured from given point. :param point: a shapely Point :param geometry: a shapely geometry object (LineString, Polygon) :return: a shapely Point that lies on geometry closest to point """ nearest_point = None min_dist = float("inf") if isinstance(geometry, Polygon): for seg_start, seg_end in pairs(list(geometry.exterior.coords)): line_start = Point(seg_start) line_end = Point(seg_end) intersection_point = project_point_to_line_points( point, line_start, line_end ) cur_dist = point.distance(intersection_point) if cur_dist < min_dist: min_dist = cur_dist nearest_point = intersection_point elif isinstance(geometry, LineString): for seg_start, seg_end in pairs(list(geometry.coords)): line_start = Point(seg_start) line_end = Point(seg_end) intersection_point = project_point_to_line_points( point, line_start, line_end ) cur_dist = point.distance(intersection_point) if cur_dist < min_dist: min_dist = cur_dist nearest_point = intersection_point else: raise NotImplementedError( "project_point_to_object not implemented for" + " geometry type '" + geometry.type + "'." ) return nearest_point def project_point_to_line_points( point: Point, line_start: Point, line_end: Point ) -> Point: """Find the nearest point on a straight line, measured from given point. Source: http://gis.stackexchange.com/a/438/19627 :param point: a shapely Point object :param line_start: the line starting point as a shapely Point :param line_end: the line end point as a shapely Point :return: a shapely Point that lies on the straight line closest to point """ line_magnitude = line_start.distance(line_end) u = ( (point.x - line_start.x) * (line_end.x - line_start.x) + (point.y - line_start.y) * (line_end.y - line_start.y) ) / (line_magnitude**2) # closest point does not fall within the line segment, # take the shorter distance to an endpoint if u < 0.00001 or u > 1: ix = point.distance(line_start) iy = point.distance(line_end) if ix > iy: return line_end else: return line_start ix = line_start.x + u * (line_end.x - line_start.x) iy = line_start.y + u * (line_end.y - line_start.y) return Point([ix, iy]) def project_point_to_line(point: Point, line: LineString) -> Point: line_coords = line.coords # assert line_coords == 2 return project_point_to_line_points(point, *[Point(*xy) for xy in line_coords]) def line_line_intersection(line: LineString, other: LineString) -> Optional[Point]: """ p = p1_start r = (p1_end - p1_start) q = p2_start s = (p2_end - p2_start) t = np.cross(q - p, s) / (np.cross(r, s)) # This is the intersection point i = p + t * r :param line: :param other: :return: """ import sympy.geometry l1 = sympy.geometry.Line(*[sympy.geometry.Point(*xy) for xy in line.coords]) l2 = sympy.geometry.Line(*[sympy.geometry.Point(*xy) for xy in other.coords]) l1_l2_intersection = l1.intersection( l2 ) # These are two infinite lines defined by two points on the line if len(l1_l2_intersection) == 1: if isinstance(l1_l2_intersection[0], sympy.geometry.Line2D): # Same return return Point(*l1_l2_intersection[0]) def nearest_geometry( geometries: Sequence[BaseGeometry], point: Point ) -> Tuple[BaseGeometry, float, int]: """Find the nearest geometry among a list, measured from fixed point. :param geometries: a list of shapely geometry objects :param point: a shapely Point :return: Tuple (geom, min_dist, min_index) of the geometry with minimum distance to point, its distance min_dist and the list index of geom, so that geom = geometries[min_index]. """ min_dist, min_index = min( (point.distance(geom), k) for (k, geom) in enumerate(geometries) ) return geometries[min_index], min_dist, min_index if __name__ == "__main__": print( line_line_intersection( LineString([[0, 0], [1, 1]]), LineString([[1, 0], [0, 1]]) ) ) print( line_line_intersection( LineString([[0, 0], [1, 1]]), LineString([[6, 0], [5, 1]]) ) ) print( line_line_intersection( LineString([[0, 0], [1, 1]]), LineString([[0, 0], [1, 1]]) ) ) print( line_line_intersection( LineString([[0, 0], [1, 1]]), LineString([[1, 0], [2, 1]]) ) )
PypiClean
/Enarksh-0.9.0.tar.gz/Enarksh-0.9.0/enarksh/xml_reader/XmlReader.py
import logging import os from lxml import etree import enarksh from enarksh.xml_reader.Host import Host from enarksh.xml_reader.node import create_node class XmlReader: # ------------------------------------------------------------------------------------------------------------------ @staticmethod def parse_schedule(xml, filename): """ Parses a schedule definition in XML. :param str xml: The XML with a schedule definition :param str filename: :rtype: enarksh.xml_reader.node.ScheduleNode """ with open(os.path.join(enarksh.HOME, 'etc/enarksh.xsd'), 'rb') as f: xsd = f.read() etree.clear_error_log() schema_root = etree.XML(xsd) schema = etree.XMLSchema(schema_root) parser = etree.XMLParser(schema=schema, encoding='utf8') try: root = etree.fromstring(bytes(xml, 'utf8'), parser) # Root element must be a schedule. if root.tag != 'Schedule': raise Exception("Root element must be 'Schedule' but '{0!s}' was found.".format(root.tag)) schedule = create_node('Schedule') schedule.read_xml(root) error = schedule.validate() if error: raise Exception( "File '{0!s}' is not a valid schedule configuration file.\n{1!s}".format(filename, error)) # Set recursion and dependency levels. schedule.set_levels() except etree.XMLSyntaxError as exception: log = logging.getLogger('enarksh') log.error(exception.error_log.filter_from_level(etree.ErrorLevels.WARNING)) raise exception return schedule # ------------------------------------------------------------------------------------------------------------------ @staticmethod def parse_dynamic_worker(xml, parent): """ Parses a schedule definition in XML. :param str xml: The XML with a schedule definition :param parent: :rtype: enarksh.xml_reader.node.CompoundJobNode """ with open(os.path.join(enarksh.HOME, 'etc/enarksh.xsd'), 'rb') as f: xsd = f.read() schema_root = etree.XML(xsd) schema = etree.XMLSchema(schema_root) parser = etree.XMLParser(schema=schema, encoding='utf8') root = etree.fromstring(bytes(xml, 'utf8'), parser) # Root element must be a dynamic inner worker. if root.tag != 'DynamicInnerWorker': raise Exception("Root element must be 'DynamicInnerWorker' but '{0!s}' was found.".format(root.tag)) worker = create_node('DynamicInnerWorker') worker.read_xml(root) error = worker.validate(parent) if error: raise Exception("XML message is not a valid dynamic worker configuration.\n{0!s}".format(error)) # Set recursion and dependency levels. worker.set_levels() return worker # ------------------------------------------------------------------------------------------------------------------ @staticmethod def parse_host(filename): """ Parses a host definition in XML. :param str filename: The XML file with a host definition :rtype: enarksh.xml_reader.Host.Host """ with open(filename, 'rt', encoding='utf-8') as f: xml = f.read() with open(os.path.join(enarksh.HOME, 'etc/enarksh.xsd'), 'rb') as f: xsd = f.read() schema_root = etree.XML(xsd) schema = etree.XMLSchema(schema_root) parser = etree.XMLParser(schema=schema, encoding='utf8') root = etree.fromstring(bytes(xml, 'utf8'), parser) # Root element must be a schedule. if root.tag != 'Host': raise Exception("Root element must be 'Host' but '{0!s}' was found.".format(root.tag)) host = Host() host.read_xml(root) error = host.validate() if error: raise Exception("File '{0!s}' is not a valid host configuration file.\n{1!s}".format(filename, error)) return host # ----------------------------------------------------------------------------------------------------------------------
PypiClean
/Firefly%20III%20API%20Python%20Client-1.5.6.post2.tar.gz/Firefly III API Python Client-1.5.6.post2/firefly_iii_client/model/transaction_link_array.py
import re # noqa: F401 import sys # noqa: F401 from firefly_iii_client.model_utils import ( # noqa: F401 ApiTypeError, ModelComposed, ModelNormal, ModelSimple, cached_property, change_keys_js_to_python, convert_js_args_to_python_args, date, datetime, file_type, none_type, validate_get_composed_info, ) from ..model_utils import OpenApiModel from firefly_iii_client.exceptions import ApiAttributeError def lazy_import(): from firefly_iii_client.model.meta import Meta from firefly_iii_client.model.page_link import PageLink from firefly_iii_client.model.transaction_link_read import TransactionLinkRead globals()['Meta'] = Meta globals()['PageLink'] = PageLink globals()['TransactionLinkRead'] = TransactionLinkRead class TransactionLinkArray(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. Attributes: allowed_values (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict with a capitalized key describing the allowed value and an allowed value. These dicts store the allowed enum values. attribute_map (dict): The key is attribute name and the value is json key in definition. discriminator_value_class_map (dict): A dict to go from the discriminator variable value to the discriminator class name. validations (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict that stores validations for max_length, min_length, max_items, min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, inclusive_minimum, and regex. additional_properties_type (tuple): A tuple of classes accepted as additional properties values. """ allowed_values = { } validations = { } @cached_property def additional_properties_type(): """ This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ lazy_import() return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 _nullable = False @cached_property def openapi_types(): """ This must be a method because a model may have properties that are of type self, this must run after the class is loaded Returns openapi_types (dict): The key is attribute name and the value is attribute type. """ lazy_import() return { 'data': ([TransactionLinkRead],), # noqa: E501 'links': (PageLink,), # noqa: E501 'meta': (Meta,), # noqa: E501 } @cached_property def discriminator(): return None attribute_map = { 'data': 'data', # noqa: E501 'links': 'links', # noqa: E501 'meta': 'meta', # noqa: E501 } read_only_vars = { } _composed_schemas = {} @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, data, links, meta, *args, **kwargs): # noqa: E501 """TransactionLinkArray - a model defined in OpenAPI Args: data ([TransactionLinkRead]): links (PageLink): meta (Meta): Keyword Args: _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. Defaults to True _path_to_item (tuple/list): This is a list of keys or values to drill down to the model in received_data when deserializing a response _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _configuration (Configuration): the instance to use when deserializing a file_type parameter. If passed, type conversion is attempted If omitted no type conversion is done. _visited_composed_classes (tuple): This stores a tuple of classes that we have traveled through so that if we see that class again we will not use its discriminator again. When traveling through a discriminator, the composed schema that is is traveled through is added to this set. For example if Animal has a discriminator petType and we pass in "Dog", and the class Dog allOf includes Animal, we move through Animal once using the discriminator, and pick Dog. Then in Dog, we will make an instance of the Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) """ _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise ApiTypeError( "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( args, self.__class__.__name__, ), path_to_item=_path_to_item, valid_classes=(self.__class__,), ) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) self.data = data self.links = links self.meta = meta for var_name, var_value in kwargs.items(): if var_name not in self.attribute_map and \ self._configuration is not None and \ self._configuration.discard_unknown_keys and \ self.additional_properties_type is None: # discard variable. continue setattr(self, var_name, var_value) return self required_properties = set([ '_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', ]) @convert_js_args_to_python_args def __init__(self, data, links, meta, *args, **kwargs): # noqa: E501 """TransactionLinkArray - a model defined in OpenAPI Args: data ([TransactionLinkRead]): links (PageLink): meta (Meta): Keyword Args: _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. Defaults to True _path_to_item (tuple/list): This is a list of keys or values to drill down to the model in received_data when deserializing a response _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _configuration (Configuration): the instance to use when deserializing a file_type parameter. If passed, type conversion is attempted If omitted no type conversion is done. _visited_composed_classes (tuple): This stores a tuple of classes that we have traveled through so that if we see that class again we will not use its discriminator again. When traveling through a discriminator, the composed schema that is is traveled through is added to this set. For example if Animal has a discriminator petType and we pass in "Dog", and the class Dog allOf includes Animal, we move through Animal once using the discriminator, and pick Dog. Then in Dog, we will make an instance of the Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) """ _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError( "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( args, self.__class__.__name__, ), path_to_item=_path_to_item, valid_classes=(self.__class__,), ) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) self.data = data self.links = links self.meta = meta for var_name, var_value in kwargs.items(): if var_name not in self.attribute_map and \ self._configuration is not None and \ self._configuration.discard_unknown_keys and \ self.additional_properties_type is None: # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " f"class with read only attributes.")
PypiClean
/CellStar-2.0.3-py3-none-any.whl/cellstar/core/polar_transform.py
import math import threading import numpy as np from cellstar.utils.calc_util import to_int from cellstar.utils.calc_util import sub2ind from cellstar.utils.image_util import image_dilate_with_element, get_circle_kernel class PolarTransform(object): """ Object wrapping polar transform calculations and cached properties @type N: int @ivar N: number of points for polar transform calculation @type distance: float @ivar distance: maximal distance from cell center to cell border @type step: float @ivar step: length of step for active contour along its axis in pixels @type steps: int @ivar steps: number of steps considered for active contour along single axis @type max_r: int @ivar max_r: maximal radius of active contour @type R: numpy.array @ivar R: consecutive radii values for single axis of active contour @type center: int @ivar center: Polar transform center coordinate @type edge: int @ivar edge: dimension of polar transform @type halfedge: int @ivar halfedge: half of polar transform dimension @type t: numpy.array @ivar t: angles (in radians) of consecutive rays casted from 'center' @type x: numpy.array @ivar x: cartesian x-coordinates of points in polar coordinates system coordinates ordered by radius of polar points --> x[r,a] = P(r,a).x @type y: numpy.array @ivar y: cartesian y-coordinates of points in polar coordinates system coordinates ordered by radius of polar points --> y[r,a] = P(r,a).y @type dot_voronoi: numpy.array @ivar dot_voronoi - voronoi - "gravity field" of contour points dot_voronoi[x,y] = id(closest_contour_point(x,y)) @type to_polar: dict @ivar to_polar - dictionary of lists for each point: to_polar[index(P(R,a)] - list of point id in voronoi of contour points {P(r,a)| 0 < r < R} to_polar[index(P(R,a)] = [gravity_field(dot_voronoi, p) for p in {P(r,a) | 0 < r < R}] to_polar[index(P(R,a)] = [index(x,y) for x,y in range((0,0),(edge,edge)) if dot_voronoi[x,y] == gravity_index(p) for p in {P(r,a) | 0 < r < R}] """ __singleton_lock = threading.Lock() __singleton_instances = {} @classmethod def instance(cls, avg_cell_diameter, points, step, max_size): init_params = avg_cell_diameter, points, step, max_size if not cls.__singleton_instances.get(init_params, False): with cls.__singleton_lock: if not cls.__singleton_instances.get(init_params, False): cls.__singleton_instances[init_params] = cls(avg_cell_diameter, points, step, max_size) return cls.__singleton_instances.get(init_params, None) def __init__(self, avg_cell_diameter, points_number, step, max_size): self.N = points_number self.distance = max_size * avg_cell_diameter self.step = max(step * avg_cell_diameter, 0.2) self.steps = 1 + int(round((self.distance + 2) / self.step)) self.max_r = min(1 + int(round(self.distance / self.step)), self.steps - 1) self.R = None self.center = None self.edge = None self.half_edge = None self.x = None self.y = None self.dot_voronoi = None self.to_polar = {} self._calculate_polar_transform() def _calculate_polar_transform(self): self.R = np.arange(1, self.steps + 1).reshape((1, self.steps)).transpose() * self.step # rays angle from cell center self.t = np.linspace(0, 2 * math.pi, self.N + 1) self.t = self.t[:-1] # sinus and cosinus of rays angle repeated steps-times # function value for angles and every radius (for a given angle same for every radius) sin_t = np.kron(np.ones((len(self.R), 1)), np.sin(self.t)) cos_t = np.kron(np.ones((len(self.R), 1)), np.cos(self.t)) # N-times repeated vector of subsequent radiuses RR = np.kron(np.ones((1, len(self.t))), self.R) # From polar definition: # x - matrix of xs for angle alpha and radius R # y - matrix of ys for angle alpha and radius R self.x = RR * cos_t self.y = RR * sin_t self.half_edge = math.ceil(self.R[-1] + 2) self.center = to_int(self.half_edge + 1) self.edge = to_int(self.center + self.half_edge) # clear black image [edge x edge] self.dot_voronoi = np.zeros((self.edge, self.edge), dtype=int) px = self.center + self.x py = self.center + self.y # create list of coordinates (x,y) on the checked contour index = np.column_stack(((py - .5).astype(int).T.flat, (px - .5).astype(int).T.flat)) # create list of subsequent id for above points cont = np.arange(1, px.size + 1) # mark on 'dot_voronoi' every point using unique id self.dot_voronoi[tuple(index.T)] = cont # in every iteration smooth 'dot_voronoi' marking gravity field of given points for i in range(0, int(self.center)): ndv = image_dilate_with_element(self.dot_voronoi, 3) mask = np.logical_and((self.dot_voronoi == 0), (ndv != 0)) mask = mask.nonzero() self.dot_voronoi[mask] = ndv[mask] # apply circle mask on 'dot_voronoi' circ_mask = get_circle_kernel(self.half_edge) self.dot_voronoi[np.logical_not(circ_mask)] = 0 self.dot_voronoi[self.center - 1, self.center - 1] = 0 # for every angle for a in range(self.t.size): # create new clear mask mask = np.zeros((self.edge, self.edge), dtype=bool) # for point for r in range(self.R.size): # find index of point P(r,a) idx = sub2ind(px.shape[0], (r, a)) val = idx + 1 # find places which belong to that index in 'dot_voronoi' indices = np.array(list(zip(*np.nonzero(self.dot_voronoi == val)))) # set mask to 1 in above places if len(indices) > 0: mask[tuple(indices.T)] = 1 # to_polar[idx] is a list of coordinates (x,y) from points on the mask self.to_polar[idx] = map(lambda pair: pair, zip(*np.nonzero(mask)))
PypiClean
/FamcyDev-0.3.71-py3-none-any.whl/Famcy/node_modules/bootstrap-fileinput/js/locales/nl.js
(function ($) { "use strict"; $.fn.fileinputLocales['nl'] = { fileSingle: 'bestand', filePlural: 'bestanden', browseLabel: 'Zoek &hellip;', removeLabel: 'Verwijder', removeTitle: 'Verwijder geselecteerde bestanden', cancelLabel: 'Annuleren', cancelTitle: 'Annuleer upload', pauseLabel: 'Pause', pauseTitle: 'Pause ongoing upload', uploadLabel: 'Upload', uploadTitle: 'Upload geselecteerde bestanden', msgNo: 'Nee', msgNoFilesSelected: '', msgPaused: 'Paused', msgCancelled: 'Geannuleerd', msgPlaceholder: 'Selecteer {files} ...', msgZoomModalHeading: 'Gedetailleerd voorbeeld', msgFileRequired: 'U moet een bestand kiezen om te uploaden.', msgSizeTooSmall: 'Bestand "{name}" (<b>{size} KB</b>) is te klein en moet groter zijn dan <b>{minSize} KB</b>.', msgSizeTooLarge: 'Bestand "{name}" (<b>{size} KB</b>) is groter dan de toegestane <b>{maxSize} KB</b>.', msgFilesTooLess: 'U moet minstens <b>{n}</b> {files} selecteren om te uploaden.', msgFilesTooMany: 'Aantal geselecteerde bestanden <b>({n})</b> is meer dan de toegestane <b>{m}</b>.', msgTotalFilesTooMany: 'You can upload a maximum of <b>{m}</b> files (<b>{n}</b> files detected).', msgFileNotFound: 'Bestand "{name}" niet gevonden!', msgFileSecured: 'Bestand kan niet gelezen worden in verband met beveiligings redenen "{name}".', msgFileNotReadable: 'Bestand "{name}" is niet leesbaar.', msgFilePreviewAborted: 'Bestand weergaven geannuleerd voor "{name}".', msgFilePreviewError: 'Er is een fout opgetreden met het lezen van "{name}".', msgInvalidFileName: 'Ongeldige of niet ondersteunde karakters in bestandsnaam "{name}".', msgInvalidFileType: 'Geen geldig bestand "{name}". Alleen "{types}" zijn toegestaan.', msgInvalidFileExtension: 'Geen geldige extensie "{name}". Alleen "{extensions}" zijn toegestaan.', msgFileTypes: { 'image': 'afbeelding', 'html': 'HTML', 'text': 'tekst', 'video': 'video', 'audio': 'geluid', 'flash': 'flash', 'pdf': 'PDF', 'object': 'object' }, msgUploadAborted: 'Het uploaden van bestanden is afgebroken', msgUploadThreshold: 'Verwerken &hellip;', msgUploadBegin: 'Initialiseren &hellip;', msgUploadEnd: 'Gedaan', msgUploadResume: 'Resuming upload &hellip;', msgUploadEmpty: 'Geen geldige data beschikbaar voor upload.', msgUploadError: 'Upload Error', msgDeleteError: 'Delete Error', msgProgressError: 'Error', msgValidationError: 'Bevestiging fout', msgLoading: 'Bestanden laden {index} van de {files} &hellip;', msgProgress: 'Bestanden laden {index} van de {files} - {name} - {percent}% compleet.', msgSelected: '{n} {files} geselecteerd', msgProcessing: 'Processing ...', msgFoldersNotAllowed: 'Drag & drop alleen bestanden! {n} overgeslagen map(pen).', msgImageWidthSmall: 'Breedte van het foto-bestand "{name}" moet minstens {size} px zijn.', msgImageHeightSmall: 'Hoogte van het foto-bestand "{name}" moet minstens {size} px zijn.', msgImageWidthLarge: 'Breedte van het foto-bestand "{name}" kan niet hoger zijn dan {size} px.', msgImageHeightLarge: 'Hoogte van het foto bestand "{name}" kan niet hoger zijn dan {size} px.', msgImageResizeError: 'Kon de foto afmetingen niet lezen om te verkleinen.', msgImageResizeException: 'Fout bij het verkleinen van de foto.<pre>{errors}</pre>', msgAjaxError: 'Er ging iets mis met de {operation} actie. Gelieve later opnieuw te proberen!', msgAjaxProgressError: '{operation} mislukt', msgDuplicateFile: 'File "{name}" of same size "{size} KB" has already been selected earlier. Skipping duplicate selection.', msgResumableUploadRetriesExceeded: 'Upload aborted beyond <b>{max}</b> retries for file <b>{file}</b>! Error Details: <pre>{error}</pre>', msgPendingTime: '{time} remaining', msgCalculatingTime: 'calculating time remaining', ajaxOperations: { deleteThumb: 'bestand verwijderen', uploadThumb: 'bestand uploaden', uploadBatch: 'alle bestanden uploaden', uploadExtra: 'form data upload' }, dropZoneTitle: 'Drag & drop bestanden hier &hellip;', dropZoneClickTitle: '<br>(of klik hier om {files} te selecteren)', fileActionSettings: { removeTitle: 'Verwijder bestand', uploadTitle: 'bestand uploaden', uploadRetryTitle: 'Opnieuw uploaden', downloadTitle: 'Download file', zoomTitle: 'Bekijk details', dragTitle: 'Verplaatsen / herindelen', indicatorNewTitle: 'Nog niet geupload', indicatorSuccessTitle: 'geupload', indicatorErrorTitle: 'fout uploaden', indicatorPausedTitle: 'Upload Paused', indicatorLoadingTitle: 'uploaden &hellip;' }, previewZoomButtonTitles: { prev: 'Toon vorig bestand', next: 'Toon volgend bestand', toggleheader: 'Toggle header', fullscreen: 'Toggle volledig scherm', borderless: 'Toggle randloze modus', close: 'Sluit gedetailleerde weergave' } }; })(window.jQuery);
PypiClean
/NlpToolkit-Classification-1.0.16.tar.gz/NlpToolkit-Classification-1.0.16/Classification/Model/KnnModel.py
from functools import cmp_to_key from io import TextIOWrapper from Classification.DistanceMetric.DistanceMetric import DistanceMetric from Classification.DistanceMetric.EuclidianDistance import EuclidianDistance from Classification.Instance.CompositeInstance import CompositeInstance from Classification.Instance.Instance import Instance from Classification.InstanceList.InstanceList import InstanceList from Classification.Model.KnnInstance import KnnInstance from Classification.Model.Model import Model class KnnModel(Model): __data: InstanceList __k: int __distance_metric: DistanceMetric def constructor1(self, data: InstanceList, k: int, distanceMetric: DistanceMetric): """ Constructor that sets the data InstanceList, k value and the DistanceMetric. PARAMETERS ---------- data : InstanceList InstanceList input. k : int K value. distanceMetric : DistanceMetric DistanceMetric input. """ self.__data = data self.__k = k self.__distance_metric = distanceMetric def constructor2(self, fileName: str): self.__distance_metric = EuclidianDistance() inputFile = open(fileName, 'r') self.__k = int(inputFile.readline().strip()) self.__data = self.loadInstanceList(inputFile) inputFile.close() def loadInstanceList(self, inputFile: TextIOWrapper) -> InstanceList: types = inputFile.readline().strip().split(" ") instance_count = int(inputFile.readline().strip()) instance_list = InstanceList() for i in range(instance_count): instance_list.add(self.loadInstance(inputFile.readline().strip(), types)) return instance_list def __init__(self, data: object, k: int = None, distanceMetric: DistanceMetric = None): if isinstance(data, InstanceList): self.constructor1(data, k, distanceMetric) elif isinstance(data, str): self.constructor2(data) def predict(self, instance: Instance) -> str: """ The predict method takes an Instance as an input and finds the nearest neighbors of given instance. Then it returns the first possible class label as the predicted class. PARAMETERS ---------- instance : Instance Instance to make prediction. RETURNS ------- str The first possible class label as the predicted class. """ nearest_neighbors = self.nearestNeighbors(instance) if isinstance(instance, CompositeInstance) and nearest_neighbors.size() == 0: predicted_class = instance.getPossibleClassLabels()[0] else: predicted_class = Model.getMaximum(nearest_neighbors.getClassLabels()) return predicted_class def predictProbability(self, instance: Instance) -> dict: nearest_neighbors = self.nearestNeighbors(instance) return nearest_neighbors.classDistribution().getProbabilityDistribution() def makeComparator(self): def compare(instanceA: KnnInstance, instanceB: KnnInstance): if instanceA.distance < instanceB.distance: return -1 elif instanceA.distance > instanceB.distance: return 1 else: return 0 return compare def nearestNeighbors(self, instance: Instance) -> InstanceList: """ The nearestNeighbors method takes an Instance as an input. First it gets the possible class labels, then loops through the data InstanceList and creates new list of KnnInstances and adds the corresponding data with the distance between data and given instance. After sorting this newly created list, it loops k times and returns the first k instances as an InstanceList. PARAMETERS ---------- instance : Instance Instance to find nearest neighbors RETURNS ------- InstanceList The first k instances which are nearest to the given instance as an InstanceList. """ result = InstanceList() instances = [] possible_class_labels = [] if isinstance(instance, CompositeInstance): possible_class_labels = instance.getPossibleClassLabels() for i in range(self.__data.size()): if not isinstance(instance, CompositeInstance) or self.__data.get( i).getClassLabel() in possible_class_labels: instances.append(KnnInstance(self.__data.get(i), self.__distance_metric.distance(self.__data.get(i), instance))) instances.sort(key=cmp_to_key(self.makeComparator())) for i in range(min(self.__k, len(instances))): result.add(instances[i].instance) return result
PypiClean
/ARGs_OAP-2.3.2.tar.gz/ARGs_OAP-2.3.2/ARGs_OAP/bin/bbmap/repair.sh
usage(){ echo " Written by Brian Bushnell Last modified November 9, 2016 Description: Re-pairs reads that became disordered or had some mates eliminated. Please read bbmap/docs/guides/RepairGuide.txt for more information. Usage: repair.sh in=<input file> out=<pair output> outs=<singleton output> Input may be fasta, fastq, or sam, compressed or uncompressed. Parameters: in=<file> The 'in=' flag is needed if the input file is not the first parameter. 'in=stdin' will pipe from standard in. in2=<file> Use this if 2nd read of pairs are in a different file. out=<file> The 'out=' flag is needed if the output file is not the second parameter. 'out=stdout' will pipe to standard out. out2=<file> Use this to write 2nd read of pairs to a different file. outs=<file> (outsingle) Write singleton reads here. overwrite=t (ow) Set to false to force the program to abort rather than overwrite an existing file. showspeed=t (ss) Set to 'f' to suppress display of processing speed. ziplevel=2 (zl) Set to 1 (lowest) through 9 (max) to change compression level; lower compression is faster. fint=f (fixinterleaving) Fixes corrupted interleaved files using read names. Only use on files with broken interleaving - correctly interleaved files from which some reads were removed. repair=t (rp) Fixes arbitrarily corrupted paired reads by using read names. Uses much more memory than 'fint' mode. ain=f (allowidenticalnames) When detecting pair names, allows identical names, instead of requiring /1 and /2 or 1: and 2: Java Parameters: -Xmx This will set Java's memory usage, overriding autodetection. -Xmx20g will specify 20 gigs of RAM, and -Xmx200m will specify 200 megs. The max is typically 85% of physical memory. -eoom This flag will cause the process to exit if an out-of-memory exception occurs. Requires Java 8u92+. -da Disable assertions. Please contact Brian Bushnell at [email protected] if you encounter any problems. " } #This block allows symlinked shellscripts to correctly set classpath. pushd . > /dev/null DIR="${BASH_SOURCE[0]}" while [ -h "$DIR" ]; do cd "$(dirname "$DIR")" DIR="$(readlink "$(basename "$DIR")")" done cd "$(dirname "$DIR")" DIR="$(pwd)/" popd > /dev/null #DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/" CP="$DIR""current/" z="-Xmx4g" z2="-Xms4g" set=0 if [ -z "$1" ] || [[ $1 == -h ]] || [[ $1 == --help ]]; then usage exit fi calcXmx () { source "$DIR""/calcmem.sh" setEnvironment parseXmx "$@" if [[ $set == 1 ]]; then return fi freeRam 4000m 84 z="-Xmx${RAM}m" z2="-Xms${RAM}m" } calcXmx "$@" repair() { local CMD="java $EA $EOOM $z -cp $CP jgi.SplitPairsAndSingles rp $@" echo $CMD >&2 eval $CMD } repair "$@"
PypiClean
/ElectronCounting-1.0.0.tar.gz/ElectronCounting-1.0.0/CountingNN/archive/transform.py
import math from typing import List, Tuple, Dict, Optional import torch import torch.nn.functional as F import torchvision from torch import Tensor class ImageList: """ Structure that holds a list of images (of possibly varying sizes) as a single tensor. This works by padding the images to the same size, and storing in a field the original sizes of each image Args: tensors (tensor): Tensor containing images. image_sizes (list[tuple[int, int]]): List of Tuples each containing size of images. """ def __init__(self, tensors: Tensor, image_sizes: List[Tuple[int, int]]) -> None: self.tensors = tensors self.image_sizes = image_sizes def to(self, device: torch.device) -> "ImageList": cast_tensor = self.tensors.to(device) return ImageList(cast_tensor, self.image_sizes) class Transformer: def __init__(self, min_size, max_size, image_mean, image_std,size_divisible): self.min_size = min_size self.max_size = max_size self.image_mean = image_mean self.image_std = image_std self.size_divisible = size_divisible def __call__(self, images: List[Tensor], targets: Optional[List[Dict[str, Tensor]]] = None ) -> Tuple[ImageList, Optional[List[Dict[str, Tensor]]]]: images = [img for img in images] if targets is not None: targets = [{k: v for k,v in t.items()} for t in targets] image_sizes = [img.shape[-2:] for img in images] images = self.batch_images(images, size_divisible=self.size_divisible) image_sizes_list: List[Tuple[int, int]] = [] for image_size in image_sizes: torch._assert( len(image_size) == 2, f"Input tensors expected to have in the last two elements H and W, instead got {image_size}", ) image_sizes_list.append((image_size[0], image_size[1])) image_list = ImageList(images, image_sizes_list) return image_list, targets def normalize(self, image): if image.shape[0] == 1: image = image.repeat(3, 1, 1) dtype, device = image.dtype, image.device mean = torch.tensor(self.image_mean, dtype=dtype, device=device) std = torch.tensor(self.image_std, dtype=dtype, device=device) return (image - mean[:, None, None]) / std[:, None, None] def resize(self, image, target): ori_image_shape = image.shape[-2:] min_size = float(min(image.shape[-2:])) max_size = float(max(image.shape[-2:])) scale_factor = min(self.min_size / min_size, self.max_size / max_size) size = [round(s * scale_factor) for s in ori_image_shape] image = F.interpolate(image[None], size=size, mode='bilinear', align_corners=False)[0] if target is None: return image, target box = target['boxes'] box[:, [0, 2]] = box[:, [0, 2]] * image.shape[-1] / ori_image_shape[1] box[:, [1, 3]] = box[:, [1, 3]] * image.shape[-2] / ori_image_shape[0] target['boxes'] = box if 'masks' in target: mask = target['masks'] mask = F.interpolate(mask[None].float(), size=size)[0].byte() target['masks'] = mask return image, target # def batched_image(self, image, stride=32): # size = image.shape[-2:] # max_size = tuple(math.ceil(s / stride) * stride for s in size) # # batch_shape = (1,) + max_size # batched_img = image.new_full(batch_shape, 0) # batched_img[:, :image.shape[-2], :image.shape[-1]] = image # # return batched_img[None] # _onnx_batch_images() is an implementation of # batch_images() that is supported by ONNX tracing. @torch.jit.unused def _onnx_batch_images(self, images: List[Tensor], size_divisible: int = 32) -> Tensor: max_size = [] for i in range(images[0].dim()): max_size_i = torch.max(torch.stack([img.shape[i] for img in images]).to(torch.float32)).to(torch.int64) max_size.append(max_size_i) stride = size_divisible max_size[1] = (torch.ceil((max_size[1].to(torch.float32)) / stride) * stride).to(torch.int64) max_size[2] = (torch.ceil((max_size[2].to(torch.float32)) / stride) * stride).to(torch.int64) max_size = tuple(max_size) # work around for # pad_img[: img.shape[0], : img.shape[1], : img.shape[2]].copy_(img) # which is not yet supported in onnx padded_imgs = [] for img in images: padding = [(s1 - s2) for s1, s2 in zip(max_size, tuple(img.shape))] padded_img = torch.nn.functional.pad(img, (0, padding[2], 0, padding[1], 0, padding[0])) padded_imgs.append(padded_img) return torch.stack(padded_imgs) def max_by_axis(self, the_list: List[List[int]]) -> List[int]: maxes = the_list[0] for sublist in the_list[1:]: for index, item in enumerate(sublist): maxes[index] = max(maxes[index], item) return maxes def batch_images(self, images: List[Tensor], size_divisible: int = 32) -> Tensor: if torchvision._is_tracing(): # batch_images() does not export well to ONNX # call _onnx_batch_images() instead return self._onnx_batch_images(images, size_divisible) max_size = self.max_by_axis([list(img.shape) for img in images]) stride = float(size_divisible) max_size = list(max_size) max_size[1] = int(math.ceil(float(max_size[1]) / stride) * stride) max_size[2] = int(math.ceil(float(max_size[2]) / stride) * stride) batch_shape = [len(images)] + max_size batched_imgs = images[0].new_full(batch_shape, 0) for i in range(batched_imgs.shape[0]): img = images[i] batched_imgs[i, : img.shape[0], : img.shape[1], : img.shape[2]].copy_(img) return batched_imgs # def postprocess(self, result, image_shape, ori_image_shape): # box = result['boxes'] # box[:, [0, 2]] = box[:, [0, 2]] * ori_image_shape[1] / image_shape[1] # box[:, [1, 3]] = box[:, [1, 3]] * ori_image_shape[0] / image_shape[0] # result['boxes'] = box # # if 'masks' in result: # mask = result['masks'] # mask = paste_masks_in_image(mask, box, 1, ori_image_shape) # result['masks'] = mask # # return result def postprocess( self, result: List[Dict[str, Tensor]], image_shapes: List[Tuple[int, int]], original_image_sizes: List[Tuple[int, int]], ) -> List[Dict[str, Tensor]]: if self.training: return result for i, (pred, im_s, o_im_s) in enumerate(zip(result, image_shapes, original_image_sizes)): boxes = pred["boxes"] result[i]["boxes"] = boxes if "masks" in pred: masks = pred["masks"] masks = paste_masks_in_image(masks, boxes, o_im_s) result[i]["masks"] = masks return result def expand_detection(mask, box, padding): M = mask.shape[-1] scale = (M + 2 * padding) / M padded_mask = torch.nn.functional.pad(mask, (padding,) * 4) w_half = (box[:, 2] - box[:, 0]) * 0.5 h_half = (box[:, 3] - box[:, 1]) * 0.5 x_c = (box[:, 2] + box[:, 0]) * 0.5 y_c = (box[:, 3] + box[:, 1]) * 0.5 w_half = w_half * scale h_half = h_half * scale box_exp = torch.zeros_like(box) box_exp[:, 0] = x_c - w_half box_exp[:, 2] = x_c + w_half box_exp[:, 1] = y_c - h_half box_exp[:, 3] = y_c + h_half return padded_mask, box_exp.to(torch.int64) def paste_masks_in_image(mask, box, padding, image_shape): mask, box = expand_detection(mask, box, padding) N = mask.shape[0] size = (N,) + tuple(image_shape) im_mask = torch.zeros(size, dtype=mask.dtype, device=mask.device) for m, b, im in zip(mask, box, im_mask): b = b.tolist() w = max(b[2] - b[0], 1) h = max(b[3] - b[1], 1) m = F.interpolate(m[None, None], size=(h, w), mode='bilinear', align_corners=False)[0][0] x1 = max(b[0], 0) y1 = max(b[1], 0) x2 = min(b[2], image_shape[1]) y2 = min(b[3], image_shape[0]) im[y1:y2, x1:x2] = m[(y1 - b[1]):(y2 - b[1]), (x1 - b[0]):(x2 - b[0])] return im_mask
PypiClean
/Nasdaq%20Data%20Link-1.0.4.tar.gz/Nasdaq Data Link-1.0.4/nasdaqdatalink/connection.py
import re import requests import urllib from urllib3.util.retry import Retry from requests.adapters import HTTPAdapter from .util import Util from .version import VERSION from .api_config import ApiConfig from nasdaqdatalink.errors.data_link_error import ( DataLinkError, LimitExceededError, InternalServerError, AuthenticationError, ForbiddenError, InvalidRequestError, NotFoundError, ServiceUnavailableError) class Connection: @classmethod def request(cls, http_verb, url, **options): if 'headers' in options: headers = options['headers'] else: headers = {} accept_value = 'application/json' if ApiConfig.api_version: accept_value += ", application/vnd.data.nasdaq+json;version=%s" % ApiConfig.api_version headers = Util.merge_to_dicts({'accept': accept_value, 'request-source': 'python', 'request-source-version': VERSION}, headers) if ApiConfig.api_key: headers = Util.merge_to_dicts({'x-api-token': ApiConfig.api_key}, headers) options['headers'] = headers abs_url = '%s/%s' % (ApiConfig.api_base, url) return cls.execute_request(http_verb, abs_url, **options) @classmethod def execute_request(cls, http_verb, url, **options): session = cls.get_session() try: response = session.request(method=http_verb, url=url, verify=ApiConfig.verify_ssl, **options) if response.status_code < 200 or response.status_code >= 300: cls.handle_api_error(response) else: return response except requests.exceptions.RequestException as e: if e.response: cls.handle_api_error(e.response) raise e @classmethod def get_session(cls): session = requests.Session() adapter = HTTPAdapter(max_retries=cls.get_retries()) session.mount(ApiConfig.api_protocol, adapter) proxies = urllib.request.getproxies() if proxies is not None: session.proxies.update(proxies) return session @classmethod def get_retries(cls): if not ApiConfig.use_retries: return Retry(total=0) Retry.BACKOFF_MAX = ApiConfig.max_wait_between_retries retries = Retry(total=ApiConfig.number_of_retries, connect=ApiConfig.number_of_retries, read=ApiConfig.number_of_retries, status_forcelist=ApiConfig.retry_status_codes, backoff_factor=ApiConfig.retry_backoff_factor, raise_on_status=False) return retries @classmethod def parse(cls, response): try: return response.json() except ValueError: raise DataLinkError(http_status=response.status_code, http_body=response.text) @classmethod def handle_api_error(cls, resp): error_body = cls.parse(resp) # if our app does not form a proper data_link_error response # throw generic error if 'quandl_error' not in error_body: raise DataLinkError(http_status=resp.status_code, http_body=resp.text) code = error_body['quandl_error']['code'] message = error_body['quandl_error']['message'] prog = re.compile('^QE([a-zA-Z])x') if prog.match(code): code_letter = prog.match(code).group(1) d_klass = { 'L': LimitExceededError, 'M': InternalServerError, 'A': AuthenticationError, 'P': ForbiddenError, 'S': InvalidRequestError, 'C': NotFoundError, 'X': ServiceUnavailableError } klass = d_klass.get(code_letter, DataLinkError) raise klass(message, resp.status_code, resp.text, resp.headers, code)
PypiClean
/Hector_Observations_Pipeline-1.4-py3-none-any.whl/hop/hexabundle_allocation/hector/magnets/rectangular.py
from ...general_operations.geometry_shapes.rectangle import rectangle from ...hector.constants import rectangle_magnet_length,rectangle_magnet_width,robot_arm_width from math import sin, cos, pi from ...hector.magnets.pickup_areas import inward, outward # from ### SHOULD ALSO INHERIT FROM PROBE CLASS HERE! class rectangular_magnet(rectangle): # initializing a rectangular magnet with all respective parameters def __init__(self,center,orientation,index,galaxyORstar,Re,mu_1re,Mstar,magnet_label,hexabundle,rads,rotation_pickup,rotation_putdown,azAngs,rectangular_magnet_input_orientation,IDs, angs, plot_orientation): length = rectangle_magnet_length width = rectangle_magnet_width super().__init__(center,length,width,orientation) self.index = index self.placement_index = 0 self.galaxyORstar = galaxyORstar self.Re = Re self.mu_1re = mu_1re self.Mstar = Mstar self.magnet_label = magnet_label self.hexabundle = hexabundle self.rads = rads self.rotation_pickup = rotation_pickup self.rotation_putdown = rotation_putdown self.azAngs = azAngs self.rectangular_magnet_input_orientation = rectangular_magnet_input_orientation self.IDs = IDs self.angs = angs self.rectangular_magnet_absolute_orientation_degree = orientation self.plot_orientation = plot_orientation # calculating the distance between magnet center to pickup area center def calculate_center_magnet_to_center_pickup_area_length(self): # center_magnet_to_center_pickup_area_length = 0.25 * (self.length + robot_arm_width) # Tiphaine's version # if (robot_arm_width < ((rectangle_magnet_length - robot_arm_width) / 2)): # center_magnet_to_center_pickup_area_length = 0.25 * (rectangle_magnet_length - robot_arm_width) # elif (robot_arm_width >= ((rectangle_magnet_length - robot_arm_width) / 2)): # center_magnet_to_center_pickup_area_length = robot_arm_width / 2 center_magnet_to_center_pickup_area_length = 0.25 * (10 + robot_arm_width) # generic one for all RAW sizes return center_magnet_to_center_pickup_area_length # calculating the center coordinate of pickup area- Outward def calculate_center_coordinate_outward_pickuparea(self): center_coordinates = \ [self.center[0] + self.calculate_center_magnet_to_center_pickup_area_length() * sin((pi / 180.0) * (self.orientation)), self.center[1] + self.calculate_center_magnet_to_center_pickup_area_length() * cos((pi / 180.0) * (self.orientation))] return center_coordinates # calculating the center coordinate of pickup area- Inwards def calculate_center_coordinate_inward_pickuparea(self): center_coordinates = \ [self.center[0] - self.calculate_center_magnet_to_center_pickup_area_length() * sin((pi / 180.0) * (self.orientation)), self.center[1] - self.calculate_center_magnet_to_center_pickup_area_length() * cos((pi / 180.0) * (self.orientation))] return center_coordinates # calculating pickup areas using their respective center coordinates def create_pickup_areas(self): self.pickup_areas = \ [inward(self.calculate_center_coordinate_inward_pickuparea(), self.orientation), outward(self.calculate_center_coordinate_outward_pickuparea(),self.orientation)] return self.pickup_areas # check for the magnet being rectangular type def is_rectangular_magnet(magnet): return isinstance(magnet, rectangular_magnet)
PypiClean
/Flask-Statics-Helper-1.0.0.tar.gz/Flask-Statics-Helper-1.0.0/flask_statics/static/angular/i18n/angular-locale_tzm-latn.js
'use strict'; angular.module("ngLocale", [], ["$provide", function($provide) { var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"}; function getDecimals(n) { n = n + ''; var i = n.indexOf('.'); return (i == -1) ? 0 : n.length - i - 1; } function getVF(n, opt_precision) { var v = opt_precision; if (undefined === v) { v = Math.min(getDecimals(n), 3); } var base = Math.pow(10, v); var f = ((n * base) | 0) % base; return {v: v, f: f}; } $provide.value("$locale", { "DATETIME_FORMATS": { "AMPMS": [ "Zdat azal", "\u1e0ceffir aza" ], "DAY": [ "Asamas", "Aynas", "Asinas", "Akras", "Akwas", "Asimwas", "Asi\u1e0dyas" ], "MONTH": [ "Yennayer", "Yebrayer", "Mars", "Ibrir", "Mayyu", "Yunyu", "Yulyuz", "\u0194uct", "Cutanbir", "K\u1e6duber", "Nwanbir", "Dujanbir" ], "SHORTDAY": [ "Asa", "Ayn", "Asn", "Akr", "Akw", "Asm", "As\u1e0d" ], "SHORTMONTH": [ "Yen", "Yeb", "Mar", "Ibr", "May", "Yun", "Yul", "\u0194uc", "Cut", "K\u1e6du", "Nwa", "Duj" ], "fullDate": "EEEE, d MMMM y", "longDate": "d MMMM y", "medium": "d MMM y h:mm:ss a", "mediumDate": "d MMM y", "mediumTime": "h:mm:ss a", "short": "dd/MM/y h:mm a", "shortDate": "dd/MM/y", "shortTime": "h:mm a" }, "NUMBER_FORMATS": { "CURRENCY_SYM": "\u20ac", "DECIMAL_SEP": ",", "GROUP_SEP": "\u00a0", "PATTERNS": [ { "gSize": 3, "lgSize": 3, "maxFrac": 3, "minFrac": 0, "minInt": 1, "negPre": "-", "negSuf": "", "posPre": "", "posSuf": "" }, { "gSize": 3, "lgSize": 3, "maxFrac": 2, "minFrac": 2, "minInt": 1, "negPre": "-", "negSuf": "\u00a0\u00a4", "posPre": "", "posSuf": "\u00a0\u00a4" } ] }, "id": "tzm-latn", "pluralCat": function(n, opt_precision) { var i = n | 0; var vf = getVF(n, opt_precision); if (i == 1 && vf.v == 0) { return PLURAL_CATEGORY.ONE; } return PLURAL_CATEGORY.OTHER;} }); }]);
PypiClean
/Hikka_Pyro_New-2.0.103-py3-none-any.whl/hikkapyro/methods/messages/send_poll.py
from datetime import datetime from typing import Union, List import hikkapyro from hikkapyro import raw, utils from hikkapyro import types, enums class SendPoll: async def send_poll( self: "hikkapyro.Client", chat_id: Union[int, str], question: str, options: List[str], is_anonymous: bool = True, type: "enums.PollType" = enums.PollType.REGULAR, allows_multiple_answers: bool = None, correct_option_id: int = None, explanation: str = None, explanation_parse_mode: "enums.ParseMode" = None, explanation_entities: List["types.MessageEntity"] = None, open_period: int = None, close_date: datetime = None, is_closed: bool = None, disable_notification: bool = None, protect_content: bool = None, reply_to_message_id: int = None, schedule_date: datetime = None, reply_markup: Union[ "types.InlineKeyboardMarkup", "types.ReplyKeyboardMarkup", "types.ReplyKeyboardRemove", "types.ForceReply" ] = None ) -> "types.Message": """Send a new poll. .. include:: /_includes/usable-by/users-bots.rst Parameters: chat_id (``int`` | ``str``): Unique identifier (int) or username (str) of the target chat. For your personal cloud (Saved Messages) you can simply use "me" or "self". For a contact that exists in your Telegram address book you can use his phone number (str). question (``str``): Poll question, 1-255 characters. options (List of ``str``): List of answer options, 2-10 strings 1-100 characters each. is_anonymous (``bool``, *optional*): True, if the poll needs to be anonymous. Defaults to True. type (:obj`~pyrogram.enums.PollType`, *optional*): Poll type, :obj:`~pyrogram.enums.PollType.QUIZ` or :obj:`~pyrogram.enums.PollType.REGULAR`. Defaults to :obj:`~pyrogram.enums.PollType.REGULAR`. allows_multiple_answers (``bool``, *optional*): True, if the poll allows multiple answers, ignored for polls in quiz mode. Defaults to False. correct_option_id (``int``, *optional*): 0-based identifier of the correct answer option, required for polls in quiz mode. explanation (``str``, *optional*): Text that is shown when a user chooses an incorrect answer or taps on the lamp icon in a quiz-style poll, 0-200 characters with at most 2 line feeds after entities parsing. explanation_parse_mode (:obj:`~pyrogram.enums.ParseMode`, *optional*): By default, texts are parsed using both Markdown and HTML styles. You can combine both syntaxes together. explanation_entities (List of :obj:`~pyrogram.types.MessageEntity`): List of special entities that appear in the poll explanation, which can be specified instead of *parse_mode*. open_period (``int``, *optional*): Amount of time in seconds the poll will be active after creation, 5-600. Can't be used together with *close_date*. close_date (:py:obj:`~datetime.datetime`, *optional*): Point in time when the poll will be automatically closed. Must be at least 5 and no more than 600 seconds in the future. Can't be used together with *open_period*. is_closed (``bool``, *optional*): Pass True, if the poll needs to be immediately closed. This can be useful for poll preview. disable_notification (``bool``, *optional*): Sends the message silently. Users will receive a notification with no sound. protect_content (``bool``, *optional*): Protects the contents of the sent message from forwarding and saving. reply_to_message_id (``int``, *optional*): If the message is a reply, ID of the original message. schedule_date (:py:obj:`~datetime.datetime`, *optional*): Date when the message will be automatically sent. reply_markup (:obj:`~pyrogram.types.InlineKeyboardMarkup` | :obj:`~pyrogram.types.ReplyKeyboardMarkup` | :obj:`~pyrogram.types.ReplyKeyboardRemove` | :obj:`~pyrogram.types.ForceReply`, *optional*): Additional interface options. An object for an inline keyboard, custom reply keyboard, instructions to remove reply keyboard or to force a reply from the user. Returns: :obj:`~pyrogram.types.Message`: On success, the sent poll message is returned. Example: .. code-block:: python await app.send_poll(chat_id, "Is this a poll question?", ["Yes", "No", "Maybe"]) """ solution, solution_entities = (await utils.parse_text_entities( self, explanation, explanation_parse_mode, explanation_entities )).values() r = await self.invoke( raw.functions.messages.SendMedia( peer=await self.resolve_peer(chat_id), media=raw.types.InputMediaPoll( poll=raw.types.Poll( id=self.rnd_id(), question=question, answers=[ raw.types.PollAnswer(text=text, option=bytes([i])) for i, text in enumerate(options) ], closed=is_closed, public_voters=not is_anonymous, multiple_choice=allows_multiple_answers, quiz=type == enums.PollType.QUIZ or False, close_period=open_period, close_date=utils.datetime_to_timestamp(close_date) ), correct_answers=[bytes([correct_option_id])] if correct_option_id is not None else None, solution=solution, solution_entities=solution_entities or [] ), message="", silent=disable_notification, reply_to_msg_id=reply_to_message_id, random_id=self.rnd_id(), schedule_date=utils.datetime_to_timestamp(schedule_date), noforwards=protect_content, reply_markup=await reply_markup.write(self) if reply_markup else None ) ) for i in r.updates: if isinstance(i, (raw.types.UpdateNewMessage, raw.types.UpdateNewChannelMessage, raw.types.UpdateNewScheduledMessage)): return await types.Message._parse( self, i.message, {i.id: i for i in r.users}, {i.id: i for i in r.chats}, is_scheduled=isinstance(i, raw.types.UpdateNewScheduledMessage) )
PypiClean
/Bedframe-0.13.5.tar.gz/Bedframe-0.13.5/README.rst
######## Bedframe ######## Bedframe is a resource-oriented web services framework. ************ Installation ************ A Bedframe service runs on an underlying web server. Support for each particular web server is provided via a corresponding plugin. Bedframe releases include some web server plugins, which are activated by installing their corresponding package extras. These are the currently supported extras: ``tornado`` Support for the Tornado_ web server (via tornado.web.Application_). ``tornado_wsgi`` Support for the Tornado_ WSGI web server (via tornado.wsgi.WSGIApplication_). For example, to install Bedframe with support for the Tornado WSGI web server, you can run .. code-block:: bash pip install bedframe[tornado_wsgi] In addition, Bedframe supports these other extras: ``ldap`` Support for the `Lightweight Directory Access Protocol`_ (LDAP) for authentication (via python-ldap_). ``memcached`` Support for memcached_ for authentication (via python-memcached_). ``test_ldap`` Support for the `Lightweight Directory Access Protocol`_ (LDAP) for automated testing (via Spruce-ldap_ and OpenLDAP_). .. _Lightweight Directory Access Protocol: https://tools.ietf.org/html/rfc4510 .. _memcached: http://www.memcached.org/ .. _OpenLDAP: http://www.openldap.org/ .. _python-ldap: https://pypi.python.org/pypi/python-ldap .. _python-memcached: https://pypi.python.org/pypi/python-memcached .. _Spruce-ldap: https://pypi.python.org/pypi/Spruce-ldap .. _Tornado: http://www.tornadoweb.org/ .. _tornado.web.Application: http://www.tornadoweb.org/en/stable/web.html#tornado.web.Application .. _tornado.wsgi.WSGIApplication: http://www.tornadoweb.org/en/stable/wsgi.html#tornado.wsgi.WSGIApplication ******** Examples ******** "Hello, world" service ====================== .. code-block:: python import bedframe as _bedframe import bedframe.webtypes as _webtypes class HelloWorldResource(_bedframe.WebResource): @_bedframe.webmethod(_webtypes.unicode) def get(self): return u'Hello, world!' service = _bedframe.WebService(uris=('http://localhost:8080',)) service.resources[r'/helloworld'] = HelloWorldResource service.start() Example usage (Napper): .. code-block:: python >>> import bedframe.webtypes as _webtypes >>> import napper as _napper >>> uri = 'http://localhost:8080/helloworld' >>> response = _napper.request_uri('get', uri) >>> hello = _napper.extract_retval(response, _webtypes.unicode) >>> print hello Hello, world! Example usage (`Requests <https://pypi.python.org/pypi/requests>`_): .. code-block:: python >>> import requests as _requests >>> uri = 'http://localhost:8080/helloworld' >>> headers = {'Accept': ', '.join(('application/json', '*/*; q=0.01'))} >>> response = _requests.get(uri, headers=headers) >>> hello = response.json()['retval'] >>> print hello Hello, world! Example usage (`HTTPie <https://pypi.python.org/pypi/httpie>`_): .. code-block:: bash $ uri='http://localhost:8080/helloworld' $ http get "$uri" Accept:'application/json,*/*; q=0.01' --body { "auth_info": { "accepted": null, "realm": null, "user": null }, "retval": "Hello, world!", "type": "bedframe._responses._return:WebReturnResponse" }
PypiClean
/Box2D-2.3.2.tar.gz/Box2D-2.3.2/examples/settings.py
class fwSettings(object): # The default backend to use in (can be: pyglet, pygame, etc.) backend = 'pygame' # Physics options hz = 60.0 velocityIterations = 8 positionIterations = 3 # Makes physics results more accurate (see Box2D wiki) enableWarmStarting = True enableContinuous = True # Calculate time of impact enableSubStepping = False # Drawing drawStats = True drawShapes = True drawJoints = True drawCoreShapes = False drawAABBs = False drawOBBs = False drawPairs = False drawContactPoints = False maxContactPoints = 100 drawContactNormals = False drawFPS = True drawMenu = True # toggle by pressing F1 drawCOMs = False # Centers of mass pointSize = 2.5 # pixel radius for drawing points # Miscellaneous testbed options pause = False singleStep = False # run the test's initialization without graphics, and then quit (for # testing) onlyInit = False # text variable checkboxes = (("Warm Starting", "enableWarmStarting"), ("Time of Impact", "enableContinuous"), ("Sub-Stepping", "enableSubStepping"), ("Draw", None), ("Shapes", "drawShapes"), ("Joints", "drawJoints"), ("AABBs", "drawAABBs"), ("Pairs", "drawPairs"), ("Contact Points", "drawContactPoints"), ("Contact Normals", "drawContactNormals"), ("Center of Masses", "drawCOMs"), ("Statistics", "drawStats"), ("FPS", "drawFPS"), ("Control", None), ("Pause", "pause"), ("Single Step", "singleStep")) sliders = [ {'name': 'hz', 'text': 'Hertz', 'min': 5, 'max': 200}, {'name': 'positionIterations', 'text': 'Pos Iters', 'min': 0, 'max': 100}, {'name': 'velocityIterations', 'text': 'Vel Iters', 'min': 1, 'max': 500}, ] from optparse import OptionParser parser = OptionParser() list_options = [i for i in dir(fwSettings) if not i.startswith('_')] for opt_name in list_options: value = getattr(fwSettings, opt_name) if isinstance(value, bool): if value: parser.add_option('', '--no-' + opt_name, dest=opt_name, default=value, action='store_' + str(not value).lower(), help="don't " + opt_name) else: parser.add_option('', '--' + opt_name, dest=opt_name, default=value, action='store_' + str(not value).lower(), help=opt_name) else: if isinstance(value, int): opttype = 'int' elif isinstance(value, float): opttype = 'float' else: opttype = 'string' parser.add_option('', '--' + opt_name, dest=opt_name, default=value, type=opttype, help='sets the %s option' % (opt_name,)) fwSettings, args = parser.parse_args()
PypiClean
/CynSMS_Python_Package-1.0-py3-none-any.whl/cyn_sms_python_package/http/http_request.py
from cyn_sms_python_package.api_helper import APIHelper class HttpRequest(object): """Information about an HTTP Request including its method, headers, parameters, URL, and Basic Auth details Attributes: http_method (HttpMethodEnum): The HTTP Method that this request should perform when called. headers (dict): A dictionary of headers (key : value) that should be sent along with the request. query_url (string): The URL that the request should be sent to. parameters (dict): A dictionary of parameters that are to be sent along with the request in the form body of the request """ def __init__(self, http_method, query_url, headers=None, query_parameters=None, parameters=None, files=None): """Constructor for the HttpRequest class Args: http_method (HttpMethodEnum): The HTTP Method. query_url (string): The URL to send the request to. headers (dict, optional): The headers for the HTTP Request. query_parameters (dict, optional): Query parameters to add in the URL. parameters (dict, optional): Form or body parameters to be included in the body. files (dict, optional): Files to be sent with the request. """ self.http_method = http_method self.query_url = query_url self.headers = headers self.query_parameters = query_parameters self.parameters = parameters self.files = files def add_header(self, name, value): """ Add a header to the HttpRequest. Args: name (string): The name of the header. value (string): The value of the header. """ self.headers[name] = value def add_parameter(self, name, value): """ Add a parameter to the HttpRequest. Args: name (string): The name of the parameter. value (string): The value of the parameter. """ self.parameters[name] = value def add_query_parameter(self, name, value): """ Add a query parameter to the HttpRequest. Args: name (string): The name of the query parameter. value (string): The value of the query parameter. """ self.query_url = APIHelper.append_url_with_query_parameters(self.query_url, {name:value}) self.query_url = APIHelper.clean_url(self.query_url)
PypiClean
/ORR_Optimization-0.2-py3-none-any.whl/orr_optimizer/graph_theory.py
class Graph(object): def __init__(self, graph_dict=None): """ initializes a graph object If no dictionary or None is given, an empty dictionary will be used """ if graph_dict == None: graph_dict = {} self.__graph_dict = graph_dict def copy_data(self): ''' returns a copy of the graph with the same dictionary ''' c = Graph() c.__graph_dict = {} for vertex in self.__graph_dict: c.__graph_dict[vertex] = [] for nn in self.__graph_dict[vertex]: c.__graph_dict[vertex].append(nn) return c def vertices(self): """ returns the vertices of a graph """ return list(self.__graph_dict.keys()) def edges(self): """ returns the edges of a graph """ return self.__generate_edges() def add_vertex(self, vertex): """ If the vertex "vertex" is not in self.__graph_dict, a key "vertex" with an empty list as a value is added to the dictionary. Otherwise nothing has to be done. """ if vertex not in self.__graph_dict: self.__graph_dict[vertex] = [] def remove_vertex(self, vertex): ''' Delete a node from the graph ''' # Delete vertex from the neighbor list of all of its neighbors adj_vertices = self.__graph_dict[vertex] for nn in adj_vertices: self.__graph_dict[nn].remove(vertex) # Remove vertex from the graph completely self.__graph_dict.pop(vertex, None) def add_edge(self, edge): """ assumes that edge is of type set, tuple or list; between two vertices can be multiple edges! """ edge = set(edge) (vertex1, vertex2) = tuple(edge) if vertex1 in self.__graph_dict: self.__graph_dict[vertex1].append(vertex2) else: self.__graph_dict[vertex1] = [vertex2] if vertex2 in self.__graph_dict: self.__graph_dict[vertex2].append(vertex1) else: self.__graph_dict[vertex2] = [vertex1] def is_node(self,vertex): return vertex in self.__graph_dict def get_neighbors(self, vertex): return self.__graph_dict[vertex] def __generate_edges(self): """ A static method generating the edges of the graph "graph". Edges are represented as sets with one (a loop back to the vertex) or two vertices """ edges = [] for vertex in self.__graph_dict: for neighbour in self.__graph_dict[vertex]: if {neighbour, vertex} not in edges: edges.append({vertex, neighbour}) return edges def __str__(self): res = "vertices: " for k in self.__graph_dict: res += str(k) + " " res += "\nedges: " for edge in self.__generate_edges(): res += str(edge) + " " return res def get_coordination_number(self, vertex): """ The degree of a vertex is the number of edges connecting it, i.e. the number of adjacent vertices. Loops are counted double, i.e. every occurence of vertex in the list of adjacent vertices. """ return len(self.__graph_dict[vertex]) def get_generalized_coordination_number(self, vertex, CN_max=12): """ Compute the GCN of a vertex :param vertex: Index of the node/vertex :param CN_max: Maximum coordination number. It is 12 for an fcc metal :returns: The generalized coordination number of vertex """ GCN = 0 adj_vertices = self.__graph_dict[vertex] for nn in adj_vertices: GCN += len( self.__graph_dict[nn] ) / float(CN_max) return GCN
PypiClean
/DjangoDjangoAppCenter-0.0.11-py3-none-any.whl/DjangoAppCenter/simpleui/static/admin/simpleui-x/elementui/umd/locale/nb-NO.js
(function (global, factory) { if (typeof define === "function" && define.amd) { define('element/locale/nb-NO', ['module', 'exports'], factory); } else if (typeof exports !== "undefined") { factory(module, exports); } else { var mod = { exports: {} }; factory(mod, mod.exports); global.ELEMENT.lang = global.ELEMENT.lang || {}; global.ELEMENT.lang.nbNO = mod.exports; } })(this, function (module, exports) { 'use strict'; exports.__esModule = true; exports.default = { el: { colorpicker: { confirm: 'OK', clear: 'Tøm' }, datepicker: { now: 'Nå', today: 'I dag', cancel: 'Avbryt', clear: 'Tøm', confirm: 'OK', selectDate: 'Velg dato', selectTime: 'Velg tidspunkt', startDate: 'Start Dato', startTime: 'Start Tidspunkt', endDate: 'Sluttdato', endTime: 'Sluttidspunkt', prevYear: 'Previous Year', // to be translated nextYear: 'Next Year', // to be translated prevMonth: 'Previous Month', // to be translated nextMonth: 'Next Month', // to be translated year: '', month1: 'Januar', month2: 'Februar', month3: 'Mars', month4: 'April', month5: 'Mai', month6: 'Juni', month7: 'Juli', month8: 'August', month9: 'September', month10: 'Oktober', month11: 'November', month12: 'Desember', // week: 'week', weeks: { sun: 'Søn', mon: 'Man', tue: 'Tir', wed: 'Ons', thu: 'Tor', fri: 'Fre', sat: 'Lør' }, months: { jan: 'Jan', feb: 'Feb', mar: 'Mar', apr: 'Apr', may: 'Mai', jun: 'Jun', jul: 'Jul', aug: 'Aug', sep: 'Sep', oct: 'Okt', nov: 'Nov', dec: 'Des' } }, select: { loading: 'Laster', noMatch: 'Ingen samsvarende data', noData: 'Ingen data', placeholder: 'Velg' }, cascader: { noMatch: 'Ingen samsvarende data', loading: 'Laster', placeholder: 'Velg', noData: 'Ingen data' }, pagination: { goto: 'Gå til', pagesize: '/side', total: 'Total {total}', pageClassifier: '' }, messagebox: { confirm: 'OK', cancel: 'Avbryt', error: 'Ugyldig input' }, upload: { deleteTip: 'press delete to remove', // to be translated delete: 'Slett', preview: 'Forhåndsvisning', continue: 'Fortsett' }, table: { emptyText: 'Ingen Data', confirmFilter: 'Bekreft', resetFilter: 'Tilbakestill', clearFilter: 'Alle', sumText: 'Sum' // to be translated }, tree: { emptyText: 'Ingen Data' }, transfer: { noMatch: 'Ingen samsvarende data', noData: 'Ingen data', titles: ['List 1', 'List 2'], // to be translated filterPlaceholder: 'Enter keyword', // to be translated noCheckedFormat: '{total} items', // to be translated hasCheckedFormat: '{checked}/{total} checked' // to be translated }, image: { error: 'FAILED' // to be translated }, pageHeader: { title: 'Back' // to be translated } } }; module.exports = exports['default']; });
PypiClean
/LinOTP-2.11.1.tar.gz/LinOTP-2.11.1/tools/README-migrate.txt
# # LinOTP - the open source solution for two factor authentication # Copyright (C) 2010 - 2019 KeyIdentity GmbH # # This file is part of LinOTP server. # # This program is free software: you can redistribute it and/or # modify it under the terms of the GNU Affero General Public # License, version 3, as published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the # GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # # E-mail: [email protected] # Contact: www.linotp.org # Support: www.keyidentity.com # Background information ====================== This tool is to migrate data from a LinOTP 1.0 installation to a new LinOTP2 installation. Please note, that the data within LinOTP 1.0 were not encrypted. A LinOTP 1.0 installation might hold the following data within the LDAP server: LinOtpKey: 1a9782105af443def89d07d5ea3eb323 LinOtpFailcount: 0 LinOtpMaxfail: 50 LinOtpIsactive: TRUE LinOtpCount: 10 LinOtpPin: abc This tool reads the data LinOtpKey LinOtpIsactive LinOtpCount LinOtpPin and stores them to the new LinOTP2 installation. Gathering information ===================== You need the following information for running the migration BindDN: of your current LinOTP 1.0 installation, who is allowed to read the above information from the LDAP server BindPW: for the above BindDN LDAP-URI: Where your LDAP server is located and whether it runs ldap or ldaps. You LDAP URI might look like ldap://192.168.20.118 or ldaps://linotpserver.domain.com Filter: The filter, where your LinOTP did find your users. You might get the hint from the file /etc/otpadm/otpadmrc This file may contain a line like --filer cn=%s,ou=users,dc=domain,dc=com Then your Filter will be cn=*,ou=users,dc=domain,dc=com LinOTP2 Server: You need to have the URL of your new LinOTP2 server. This might look like https://linotp2.domain.com LinOTP2 admin account: You also need a username and a password of the LinOTP2 management account. Migrating data ============== The migration script will not change any data on your existing installation. So you need not to be afraid to break anything in your productive installation. You may run the migration again at any later moment. You need to perform the following steps: 1. Install LinOTP 2 server 2. Define LinOTP 2 useridresolver 3. Run the migration script 4. Test LinOTP 2 1. Install LinOTP 2 server -------------------------- Before running the migration script, you need to setup a running LinOTP2 server. You may install this on a new machine. 2. Define LinOTP 2 useridresolver --------------------------------- You need to define a useridresolver and put this to the default realm. You need not to use the old user store in the new LinOTP 2 installation. The only requisite is that the loginname in the new useridresolver needs to be the same as the loginname in the old LinOTP 1.0 installation. This is because the old loginname is read from the LinOTP 1.0 by the filter cn=*,ou=... or uid=*,ou=... And this very same login name will be used to assign the Token to the user in the new LinOTP 2 3. Run the migration script --------------------------- You now need to run the migration script on the LinOTP 2 server. The migration script also needs libraries from the linotpadminclient package. So please assure, that this package is installed on the system. The script may be started like this: python linotpmigrate.py --binddn='cn=admin,dc=az,dc=local' --ldap=ldap://192.168.20.118 --filter='cn=*,ou=users,dc=az,dc=local' --loginattr=cn --linotp2=https://localhost --admin=admin > migration.log Log output is then written to migration.log. 4. Test LinOTP 2 server ----------------------- Check the log file for any strange output. The Token information, secret key and OTP PIN should now be migrated to the LinOTP 2 server. You may now test a Token by checking the URL within your browser: https://linotpserver2/validate/simplecheck?user=<username>&pass=<otppin><otpvalue> On success a :-) smily will be returned.
PypiClean
/ClueDojo-1.4.3-1.tar.gz/ClueDojo-1.4.3-1/src/cluedojo/static/dojo/_base/lang.js
if(!dojo._hasResource["dojo._base.lang"]){ dojo._hasResource["dojo._base.lang"]=true; dojo.provide("dojo._base.lang"); (function(){ var d=dojo,_1=Object.prototype.toString; dojo.isString=function(it){ return (typeof it=="string"||it instanceof String); }; dojo.isArray=function(it){ return it&&(it instanceof Array||typeof it=="array"); }; dojo.isFunction=function(it){ return _1.call(it)==="[object Function]"; }; dojo.isObject=function(it){ return it!==undefined&&(it===null||typeof it=="object"||d.isArray(it)||d.isFunction(it)); }; dojo.isArrayLike=function(it){ return it&&it!==undefined&&!d.isString(it)&&!d.isFunction(it)&&!(it.tagName&&it.tagName.toLowerCase()=="form")&&(d.isArray(it)||isFinite(it.length)); }; dojo.isAlien=function(it){ return it&&!d.isFunction(it)&&/\{\s*\[native code\]\s*\}/.test(String(it)); }; dojo.extend=function(_2,_3){ for(var i=1,l=arguments.length;i<l;i++){ d._mixin(_2.prototype,arguments[i]); } return _2; }; dojo._hitchArgs=function(_4,_5){ var _6=d._toArray(arguments,2); var _7=d.isString(_5); return function(){ var _8=d._toArray(arguments); var f=_7?(_4||d.global)[_5]:_5; return f&&f.apply(_4||this,_6.concat(_8)); }; }; dojo.hitch=function(_9,_a){ if(arguments.length>2){ return d._hitchArgs.apply(d,arguments); } if(!_a){ _a=_9; _9=null; } if(d.isString(_a)){ _9=_9||d.global; if(!_9[_a]){ throw (["dojo.hitch: scope[\"",_a,"\"] is null (scope=\"",_9,"\")"].join("")); } return function(){ return _9[_a].apply(_9,arguments||[]); }; } return !_9?_a:function(){ return _a.apply(_9,arguments||[]); }; }; dojo.delegate=dojo._delegate=(function(){ function _b(){ }; return function(_c,_d){ _b.prototype=_c; var _e=new _b(); _b.prototype=null; if(_d){ d._mixin(_e,_d); } return _e; }; })(); var _f=function(obj,_10,_11){ return (_11||[]).concat(Array.prototype.slice.call(obj,_10||0)); }; var _12=function(obj,_13,_14){ var arr=_14||[]; for(var x=_13||0;x<obj.length;x++){ arr.push(obj[x]); } return arr; }; dojo._toArray=d.isIE?function(obj){ return ((obj.item)?_12:_f).apply(this,arguments); }:_f; dojo.partial=function(_15){ var arr=[null]; return d.hitch.apply(d,arr.concat(d._toArray(arguments))); }; var _16=d._extraNames,_17=_16.length,_18={}; dojo.clone=function(o){ if(!o||typeof o!="object"||d.isFunction(o)){ return o; } if(o.nodeType&&"cloneNode" in o){ return o.cloneNode(true); } if(o instanceof Date){ return new Date(o.getTime()); } var r,i,l,s,_19; if(d.isArray(o)){ r=[]; for(i=0,l=o.length;i<l;++i){ if(i in o){ r.push(d.clone(o[i])); } } }else{ r=o.constructor?new o.constructor():{}; } for(_19 in o){ s=o[_19]; if(!(_19 in r)||(r[_19]!==s&&(!(_19 in _18)||_18[_19]!==s))){ r[_19]=d.clone(s); } } if(_17){ for(i=0;i<_17;++i){ _19=_16[i]; s=o[_19]; if(!(_19 in r)||(r[_19]!==s&&(!(_19 in _18)||_18[_19]!==s))){ r[_19]=s; } } } return r; }; dojo.trim=String.prototype.trim?function(str){ return str.trim(); }:function(str){ return str.replace(/^\s\s*/,"").replace(/\s\s*$/,""); }; var _1a=/\{([^\}]+)\}/g; dojo.replace=function(_1b,map,_1c){ return _1b.replace(_1c||_1a,d.isFunction(map)?map:function(_1d,k){ return d.getObject(k,false,map); }); }; })(); }
PypiClean
/NVDA-addonTemplate-0.5.2.zip/NVDA-addonTemplate-0.5.2/NVDAAddonTemplate/data/{{cookiecutter.project_slug}}/scons-local-2.5.0/SCons/SConf.py
__revision__ = "src/engine/SCons/SConf.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog" import SCons.compat import io import os import re import sys import traceback import SCons.Action import SCons.Builder import SCons.Errors import SCons.Job import SCons.Node.FS import SCons.Taskmaster import SCons.Util import SCons.Warnings import SCons.Conftest from SCons.Debug import Trace # Turn off the Conftest error logging SCons.Conftest.LogInputFiles = 0 SCons.Conftest.LogErrorMessages = 0 # Set build_type = None build_types = ['clean', 'help'] def SetBuildType(type): global build_type build_type = type # to be set, if we are in dry-run mode dryrun = 0 AUTO=0 # use SCons dependency scanning for up-to-date checks FORCE=1 # force all tests to be rebuilt CACHE=2 # force all tests to be taken from cache (raise an error, if necessary) cache_mode = AUTO def SetCacheMode(mode): """Set the Configure cache mode. mode must be one of "auto", "force", or "cache".""" global cache_mode if mode == "auto": cache_mode = AUTO elif mode == "force": cache_mode = FORCE elif mode == "cache": cache_mode = CACHE else: raise ValueError("SCons.SConf.SetCacheMode: Unknown mode " + mode) progress_display = SCons.Util.display # will be overwritten by SCons.Script def SetProgressDisplay(display): """Set the progress display to use (called from SCons.Script)""" global progress_display progress_display = display SConfFS = None _ac_build_counter = 0 # incremented, whenever TryBuild is called _ac_config_logs = {} # all config.log files created in this build _ac_config_hs = {} # all config.h files created in this build sconf_global = None # current sconf object def _createConfigH(target, source, env): t = open(str(target[0]), "w") defname = re.sub('[^A-Za-z0-9_]', '_', str(target[0]).upper()) t.write("""#ifndef %(DEFNAME)s_SEEN #define %(DEFNAME)s_SEEN """ % {'DEFNAME' : defname}) t.write(source[0].get_contents()) t.write(""" #endif /* %(DEFNAME)s_SEEN */ """ % {'DEFNAME' : defname}) t.close() def _stringConfigH(target, source, env): return "scons: Configure: creating " + str(target[0]) def NeedConfigHBuilder(): if len(_ac_config_hs) == 0: return False else: return True def CreateConfigHBuilder(env): """Called if necessary just before the building targets phase begins.""" action = SCons.Action.Action(_createConfigH, _stringConfigH) sconfigHBld = SCons.Builder.Builder(action=action) env.Append( BUILDERS={'SConfigHBuilder':sconfigHBld} ) for k in _ac_config_hs.keys(): env.SConfigHBuilder(k, env.Value(_ac_config_hs[k])) class SConfWarning(SCons.Warnings.Warning): pass SCons.Warnings.enableWarningClass(SConfWarning) # some error definitions class SConfError(SCons.Errors.UserError): def __init__(self,msg): SCons.Errors.UserError.__init__(self,msg) class ConfigureDryRunError(SConfError): """Raised when a file or directory needs to be updated during a Configure process, but the user requested a dry-run""" def __init__(self,target): if not isinstance(target, SCons.Node.FS.File): msg = 'Cannot create configure directory "%s" within a dry-run.' % str(target) else: msg = 'Cannot update configure test "%s" within a dry-run.' % str(target) SConfError.__init__(self,msg) class ConfigureCacheError(SConfError): """Raised when a use explicitely requested the cache feature, but the test is run the first time.""" def __init__(self,target): SConfError.__init__(self, '"%s" is not yet built and cache is forced.' % str(target)) # define actions for building text files def _createSource( target, source, env ): fd = open(str(target[0]), "w") fd.write(source[0].get_contents()) fd.close() def _stringSource( target, source, env ): return (str(target[0]) + ' <-\n |' + source[0].get_contents().replace( '\n', "\n |" ) ) class SConfBuildInfo(SCons.Node.FS.FileBuildInfo): """ Special build info for targets of configure tests. Additional members are result (did the builder succeed last time?) and string, which contains messages of the original build phase. """ __slots__ = ('result', 'string') def __init__(self): self.result = None # -> 0/None -> no error, != 0 error self.string = None # the stdout / stderr output when building the target def set_build_result(self, result, string): self.result = result self.string = string class Streamer(object): """ 'Sniffer' for a file-like writable object. Similar to the unix tool tee. """ def __init__(self, orig): self.orig = orig self.s = io.StringIO() def write(self, str): if self.orig: self.orig.write(str) try: self.s.write(str) except TypeError as e: # "unicode argument expected" bug in IOStream (python 2.x) self.s.write(str.decode()) def writelines(self, lines): for l in lines: self.write(l + '\n') def getvalue(self): """ Return everything written to orig since the Streamer was created. """ return self.s.getvalue() def flush(self): if self.orig: self.orig.flush() self.s.flush() class SConfBuildTask(SCons.Taskmaster.AlwaysTask): """ This is almost the same as SCons.Script.BuildTask. Handles SConfErrors correctly and knows about the current cache_mode. """ def display(self, message): if sconf_global.logstream: sconf_global.logstream.write("scons: Configure: " + message + "\n") def display_cached_string(self, bi): """ Logs the original builder messages, given the SConfBuildInfo instance bi. """ if not isinstance(bi, SConfBuildInfo): SCons.Warnings.warn(SConfWarning, "The stored build information has an unexpected class: %s" % bi.__class__) else: self.display("The original builder output was:\n" + (" |" + str(bi.string)).replace("\n", "\n |")) def failed(self): # check, if the reason was a ConfigureDryRunError or a # ConfigureCacheError and if yes, reraise the exception exc_type = self.exc_info()[0] if issubclass(exc_type, SConfError): raise elif issubclass(exc_type, SCons.Errors.BuildError): # we ignore Build Errors (occurs, when a test doesn't pass) # Clear the exception to prevent the contained traceback # to build a reference cycle. self.exc_clear() else: self.display('Caught exception while building "%s":\n' % self.targets[0]) sys.excepthook(*self.exc_info()) return SCons.Taskmaster.Task.failed(self) def collect_node_states(self): # returns (is_up_to_date, cached_error, cachable) # where is_up_to_date is 1, if the node(s) are up_to_date # cached_error is 1, if the node(s) are up_to_date, but the # build will fail # cachable is 0, if some nodes are not in our cache T = 0 changed = False cached_error = False cachable = True for t in self.targets: if T: Trace('%s' % (t)) bi = t.get_stored_info().binfo if isinstance(bi, SConfBuildInfo): if T: Trace(': SConfBuildInfo') if cache_mode == CACHE: t.set_state(SCons.Node.up_to_date) if T: Trace(': set_state(up_to-date)') else: if T: Trace(': get_state() %s' % t.get_state()) if T: Trace(': changed() %s' % t.changed()) if (t.get_state() != SCons.Node.up_to_date and t.changed()): changed = True if T: Trace(': changed %s' % changed) cached_error = cached_error or bi.result else: if T: Trace(': else') # the node hasn't been built in a SConf context or doesn't # exist cachable = False changed = ( t.get_state() != SCons.Node.up_to_date ) if T: Trace(': changed %s' % changed) if T: Trace('\n') return (not changed, cached_error, cachable) def execute(self): if not self.targets[0].has_builder(): return sconf = sconf_global is_up_to_date, cached_error, cachable = self.collect_node_states() if cache_mode == CACHE and not cachable: raise ConfigureCacheError(self.targets[0]) elif cache_mode == FORCE: is_up_to_date = 0 if cached_error and is_up_to_date: self.display("Building \"%s\" failed in a previous run and all " "its sources are up to date." % str(self.targets[0])) binfo = self.targets[0].get_stored_info().binfo self.display_cached_string(binfo) raise SCons.Errors.BuildError # will be 'caught' in self.failed elif is_up_to_date: self.display("\"%s\" is up to date." % str(self.targets[0])) binfo = self.targets[0].get_stored_info().binfo self.display_cached_string(binfo) elif dryrun: raise ConfigureDryRunError(self.targets[0]) else: # note stdout and stderr are the same here s = sys.stdout = sys.stderr = Streamer(sys.stdout) try: env = self.targets[0].get_build_env() if cache_mode == FORCE: # Set up the Decider() to force rebuilds by saying # that every source has changed. Note that we still # call the environment's underlying source decider so # that the correct .sconsign info will get calculated # and keep the build state consistent. def force_build(dependency, target, prev_ni, env_decider=env.decide_source): env_decider(dependency, target, prev_ni) return True if env.decide_source.func_code is not force_build.func_code: env.Decider(force_build) env['PSTDOUT'] = env['PSTDERR'] = s try: sconf.cached = 0 self.targets[0].build() finally: sys.stdout = sys.stderr = env['PSTDOUT'] = \ env['PSTDERR'] = sconf.logstream except KeyboardInterrupt: raise except SystemExit: exc_value = sys.exc_info()[1] raise SCons.Errors.ExplicitExit(self.targets[0],exc_value.code) except Exception, e: for t in self.targets: binfo = SConfBuildInfo() binfo.merge(t.get_binfo()) binfo.set_build_result(1, s.getvalue()) sconsign_entry = SCons.SConsign.SConsignEntry() sconsign_entry.binfo = binfo #sconsign_entry.ninfo = self.get_ninfo() # We'd like to do this as follows: # t.store_info(binfo) # However, we need to store it as an SConfBuildInfo # object, and store_info() will turn it into a # regular FileNodeInfo if the target is itself a # regular File. sconsign = t.dir.sconsign() sconsign.set_entry(t.name, sconsign_entry) sconsign.merge() raise e else: for t in self.targets: binfo = SConfBuildInfo() binfo.merge(t.get_binfo()) binfo.set_build_result(0, s.getvalue()) sconsign_entry = SCons.SConsign.SConsignEntry() sconsign_entry.binfo = binfo #sconsign_entry.ninfo = self.get_ninfo() # We'd like to do this as follows: # t.store_info(binfo) # However, we need to store it as an SConfBuildInfo # object, and store_info() will turn it into a # regular FileNodeInfo if the target is itself a # regular File. sconsign = t.dir.sconsign() sconsign.set_entry(t.name, sconsign_entry) sconsign.merge() class SConfBase(object): """This is simply a class to represent a configure context. After creating a SConf object, you can call any tests. After finished with your tests, be sure to call the Finish() method, which returns the modified environment. Some words about caching: In most cases, it is not necessary to cache Test results explicitly. Instead, we use the scons dependency checking mechanism. For example, if one wants to compile a test program (SConf.TryLink), the compiler is only called, if the program dependencies have changed. However, if the program could not be compiled in a former SConf run, we need to explicitly cache this error. """ def __init__(self, env, custom_tests = {}, conf_dir='$CONFIGUREDIR', log_file='$CONFIGURELOG', config_h = None, _depth = 0): """Constructor. Pass additional tests in the custom_tests-dictionary, e.g. custom_tests={'CheckPrivate':MyPrivateTest}, where MyPrivateTest defines a custom test. Note also the conf_dir and log_file arguments (you may want to build tests in the VariantDir, not in the SourceDir) """ global SConfFS if not SConfFS: SConfFS = SCons.Node.FS.default_fs or \ SCons.Node.FS.FS(env.fs.pathTop) if sconf_global is not None: raise SCons.Errors.UserError self.env = env if log_file is not None: log_file = SConfFS.File(env.subst(log_file)) self.logfile = log_file self.logstream = None self.lastTarget = None self.depth = _depth self.cached = 0 # will be set, if all test results are cached # add default tests default_tests = { 'CheckCC' : CheckCC, 'CheckCXX' : CheckCXX, 'CheckSHCC' : CheckSHCC, 'CheckSHCXX' : CheckSHCXX, 'CheckFunc' : CheckFunc, 'CheckType' : CheckType, 'CheckTypeSize' : CheckTypeSize, 'CheckDeclaration' : CheckDeclaration, 'CheckHeader' : CheckHeader, 'CheckCHeader' : CheckCHeader, 'CheckCXXHeader' : CheckCXXHeader, 'CheckLib' : CheckLib, 'CheckLibWithHeader' : CheckLibWithHeader, 'CheckProg' : CheckProg, } self.AddTests(default_tests) self.AddTests(custom_tests) self.confdir = SConfFS.Dir(env.subst(conf_dir)) if config_h is not None: config_h = SConfFS.File(config_h) self.config_h = config_h self._startup() def Finish(self): """Call this method after finished with your tests: env = sconf.Finish() """ self._shutdown() return self.env def Define(self, name, value = None, comment = None): """ Define a pre processor symbol name, with the optional given value in the current config header. If value is None (default), then #define name is written. If value is not none, then #define name value is written. comment is a string which will be put as a C comment in the header, to explain the meaning of the value (appropriate C comments /* and */ will be put automatically.""" lines = [] if comment: comment_str = "/* %s */" % comment lines.append(comment_str) if value is not None: define_str = "#define %s %s" % (name, value) else: define_str = "#define %s" % name lines.append(define_str) lines.append('') self.config_h_text = self.config_h_text + '\n'.join(lines) def BuildNodes(self, nodes): """ Tries to build the given nodes immediately. Returns 1 on success, 0 on error. """ if self.logstream is not None: # override stdout / stderr to write in log file oldStdout = sys.stdout sys.stdout = self.logstream oldStderr = sys.stderr sys.stderr = self.logstream # the engine assumes the current path is the SConstruct directory ... old_fs_dir = SConfFS.getcwd() old_os_dir = os.getcwd() SConfFS.chdir(SConfFS.Top, change_os_dir=1) # Because we take responsibility here for writing out our # own .sconsign info (see SConfBuildTask.execute(), above), # we override the store_info() method with a null place-holder # so we really control how it gets written. for n in nodes: n.store_info = 0 if not hasattr(n, 'attributes'): n.attributes = SCons.Node.Node.Attrs() n.attributes.keep_targetinfo = 1 ret = 1 try: # ToDo: use user options for calc save_max_drift = SConfFS.get_max_drift() SConfFS.set_max_drift(0) tm = SCons.Taskmaster.Taskmaster(nodes, SConfBuildTask) # we don't want to build tests in parallel jobs = SCons.Job.Jobs(1, tm ) jobs.run() for n in nodes: state = n.get_state() if (state != SCons.Node.executed and state != SCons.Node.up_to_date): # the node could not be built. we return 0 in this case ret = 0 finally: SConfFS.set_max_drift(save_max_drift) os.chdir(old_os_dir) SConfFS.chdir(old_fs_dir, change_os_dir=0) if self.logstream is not None: # restore stdout / stderr sys.stdout = oldStdout sys.stderr = oldStderr return ret def pspawn_wrapper(self, sh, escape, cmd, args, env): """Wrapper function for handling piped spawns. This looks to the calling interface (in Action.py) like a "normal" spawn, but associates the call with the PSPAWN variable from the construction environment and with the streams to which we want the output logged. This gets slid into the construction environment as the SPAWN variable so Action.py doesn't have to know or care whether it's spawning a piped command or not. """ return self.pspawn(sh, escape, cmd, args, env, self.logstream, self.logstream) def TryBuild(self, builder, text = None, extension = ""): """Low level TryBuild implementation. Normally you don't need to call that - you can use TryCompile / TryLink / TryRun instead """ global _ac_build_counter # Make sure we have a PSPAWN value, and save the current # SPAWN value. try: self.pspawn = self.env['PSPAWN'] except KeyError: raise SCons.Errors.UserError('Missing PSPAWN construction variable.') try: save_spawn = self.env['SPAWN'] except KeyError: raise SCons.Errors.UserError('Missing SPAWN construction variable.') nodesToBeBuilt = [] f = "conftest_" + str(_ac_build_counter) pref = self.env.subst( builder.builder.prefix ) suff = self.env.subst( builder.builder.suffix ) target = self.confdir.File(pref + f + suff) try: # Slide our wrapper into the construction environment as # the SPAWN function. self.env['SPAWN'] = self.pspawn_wrapper sourcetext = self.env.Value(text) if text is not None: textFile = self.confdir.File(f + extension) textFileNode = self.env.SConfSourceBuilder(target=textFile, source=sourcetext) nodesToBeBuilt.extend(textFileNode) source = textFileNode else: source = None nodes = builder(target = target, source = source) if not SCons.Util.is_List(nodes): nodes = [nodes] nodesToBeBuilt.extend(nodes) result = self.BuildNodes(nodesToBeBuilt) finally: self.env['SPAWN'] = save_spawn _ac_build_counter = _ac_build_counter + 1 if result: self.lastTarget = nodes[0] else: self.lastTarget = None return result def TryAction(self, action, text = None, extension = ""): """Tries to execute the given action with optional source file contents <text> and optional source file extension <extension>, Returns the status (0 : failed, 1 : ok) and the contents of the output file. """ builder = SCons.Builder.Builder(action=action) self.env.Append( BUILDERS = {'SConfActionBuilder' : builder} ) ok = self.TryBuild(self.env.SConfActionBuilder, text, extension) del self.env['BUILDERS']['SConfActionBuilder'] if ok: outputStr = self.lastTarget.get_contents() return (1, outputStr) return (0, "") def TryCompile( self, text, extension): """Compiles the program given in text to an env.Object, using extension as file extension (e.g. '.c'). Returns 1, if compilation was successful, 0 otherwise. The target is saved in self.lastTarget (for further processing). """ return self.TryBuild(self.env.Object, text, extension) def TryLink( self, text, extension ): """Compiles the program given in text to an executable env.Program, using extension as file extension (e.g. '.c'). Returns 1, if compilation was successful, 0 otherwise. The target is saved in self.lastTarget (for further processing). """ return self.TryBuild(self.env.Program, text, extension ) def TryRun(self, text, extension ): """Compiles and runs the program given in text, using extension as file extension (e.g. '.c'). Returns (1, outputStr) on success, (0, '') otherwise. The target (a file containing the program's stdout) is saved in self.lastTarget (for further processing). """ ok = self.TryLink(text, extension) if( ok ): prog = self.lastTarget pname = prog.get_internal_path() output = self.confdir.File(os.path.basename(pname)+'.out') node = self.env.Command(output, prog, [ [ pname, ">", "${TARGET}"] ]) ok = self.BuildNodes(node) if ok: outputStr = output.get_contents() return( 1, outputStr) return (0, "") class TestWrapper(object): """A wrapper around Tests (to ensure sanity)""" def __init__(self, test, sconf): self.test = test self.sconf = sconf def __call__(self, *args, **kw): if not self.sconf.active: raise SCons.Errors.UserError context = CheckContext(self.sconf) ret = self.test(context, *args, **kw) if self.sconf.config_h is not None: self.sconf.config_h_text = self.sconf.config_h_text + context.config_h context.Result("error: no result") return ret def AddTest(self, test_name, test_instance): """Adds test_class to this SConf instance. It can be called with self.test_name(...)""" setattr(self, test_name, SConfBase.TestWrapper(test_instance, self)) def AddTests(self, tests): """Adds all the tests given in the tests dictionary to this SConf instance """ for name in tests.keys(): self.AddTest(name, tests[name]) def _createDir( self, node ): dirName = str(node) if dryrun: if not os.path.isdir( dirName ): raise ConfigureDryRunError(dirName) else: if not os.path.isdir( dirName ): os.makedirs( dirName ) def _startup(self): """Private method. Set up logstream, and set the environment variables necessary for a piped build """ global _ac_config_logs global sconf_global global SConfFS self.lastEnvFs = self.env.fs self.env.fs = SConfFS self._createDir(self.confdir) self.confdir.up().add_ignore( [self.confdir] ) if self.logfile is not None and not dryrun: # truncate logfile, if SConf.Configure is called for the first time # in a build if self.logfile in _ac_config_logs: log_mode = "a" else: _ac_config_logs[self.logfile] = None log_mode = "w" fp = open(str(self.logfile), log_mode) self.logstream = SCons.Util.Unbuffered(fp) # logfile may stay in a build directory, so we tell # the build system not to override it with a eventually # existing file with the same name in the source directory self.logfile.dir.add_ignore( [self.logfile] ) tb = traceback.extract_stack()[-3-self.depth] old_fs_dir = SConfFS.getcwd() SConfFS.chdir(SConfFS.Top, change_os_dir=0) self.logstream.write('file %s,line %d:\n\tConfigure(confdir = %s)\n' % (tb[0], tb[1], str(self.confdir)) ) SConfFS.chdir(old_fs_dir) else: self.logstream = None # we use a special builder to create source files from TEXT action = SCons.Action.Action(_createSource, _stringSource) sconfSrcBld = SCons.Builder.Builder(action=action) self.env.Append( BUILDERS={'SConfSourceBuilder':sconfSrcBld} ) self.config_h_text = _ac_config_hs.get(self.config_h, "") self.active = 1 # only one SConf instance should be active at a time ... sconf_global = self def _shutdown(self): """Private method. Reset to non-piped spawn""" global sconf_global, _ac_config_hs if not self.active: raise SCons.Errors.UserError("Finish may be called only once!") if self.logstream is not None and not dryrun: self.logstream.write("\n") self.logstream.close() self.logstream = None # remove the SConfSourceBuilder from the environment blds = self.env['BUILDERS'] del blds['SConfSourceBuilder'] self.env.Replace( BUILDERS=blds ) self.active = 0 sconf_global = None if not self.config_h is None: _ac_config_hs[self.config_h] = self.config_h_text self.env.fs = self.lastEnvFs class CheckContext(object): """Provides a context for configure tests. Defines how a test writes to the screen and log file. A typical test is just a callable with an instance of CheckContext as first argument: def CheckCustom(context, ...): context.Message('Checking my weird test ... ') ret = myWeirdTestFunction(...) context.Result(ret) Often, myWeirdTestFunction will be one of context.TryCompile/context.TryLink/context.TryRun. The results of those are cached, for they are only rebuild, if the dependencies have changed. """ def __init__(self, sconf): """Constructor. Pass the corresponding SConf instance.""" self.sconf = sconf self.did_show_result = 0 # for Conftest.py: self.vardict = {} self.havedict = {} self.headerfilename = None self.config_h = "" # config_h text will be stored here # we don't regenerate the config.h file after each test. That means, # that tests won't be able to include the config.h file, and so # they can't do an #ifdef HAVE_XXX_H. This shouldn't be a major # issue, though. If it turns out, that we need to include config.h # in tests, we must ensure, that the dependencies are worked out # correctly. Note that we can't use Conftest.py's support for config.h, # cause we will need to specify a builder for the config.h file ... def Message(self, text): """Inform about what we are doing right now, e.g. 'Checking for SOMETHING ... ' """ self.Display(text) self.sconf.cached = 1 self.did_show_result = 0 def Result(self, res): """Inform about the result of the test. If res is not a string, displays 'yes' or 'no' depending on whether res is evaluated as true or false. The result is only displayed when self.did_show_result is not set. """ if isinstance(res, str): text = res elif res: text = "yes" else: text = "no" if self.did_show_result == 0: # Didn't show result yet, do it now. self.Display(text + "\n") self.did_show_result = 1 def TryBuild(self, *args, **kw): return self.sconf.TryBuild(*args, **kw) def TryAction(self, *args, **kw): return self.sconf.TryAction(*args, **kw) def TryCompile(self, *args, **kw): return self.sconf.TryCompile(*args, **kw) def TryLink(self, *args, **kw): return self.sconf.TryLink(*args, **kw) def TryRun(self, *args, **kw): return self.sconf.TryRun(*args, **kw) def __getattr__( self, attr ): if( attr == 'env' ): return self.sconf.env elif( attr == 'lastTarget' ): return self.sconf.lastTarget else: raise AttributeError("CheckContext instance has no attribute '%s'" % attr) #### Stuff used by Conftest.py (look there for explanations). def BuildProg(self, text, ext): self.sconf.cached = 1 # TODO: should use self.vardict for $CC, $CPPFLAGS, etc. return not self.TryBuild(self.env.Program, text, ext) def CompileProg(self, text, ext): self.sconf.cached = 1 # TODO: should use self.vardict for $CC, $CPPFLAGS, etc. return not self.TryBuild(self.env.Object, text, ext) def CompileSharedObject(self, text, ext): self.sconf.cached = 1 # TODO: should use self.vardict for $SHCC, $CPPFLAGS, etc. return not self.TryBuild(self.env.SharedObject, text, ext) def RunProg(self, text, ext): self.sconf.cached = 1 # TODO: should use self.vardict for $CC, $CPPFLAGS, etc. st, out = self.TryRun(text, ext) return not st, out def AppendLIBS(self, lib_name_list): oldLIBS = self.env.get( 'LIBS', [] ) self.env.Append(LIBS = lib_name_list) return oldLIBS def PrependLIBS(self, lib_name_list): oldLIBS = self.env.get( 'LIBS', [] ) self.env.Prepend(LIBS = lib_name_list) return oldLIBS def SetLIBS(self, val): oldLIBS = self.env.get( 'LIBS', [] ) self.env.Replace(LIBS = val) return oldLIBS def Display(self, msg): if self.sconf.cached: # We assume that Display is called twice for each test here # once for the Checking for ... message and once for the result. # The self.sconf.cached flag can only be set between those calls msg = "(cached) " + msg self.sconf.cached = 0 progress_display(msg, append_newline=0) self.Log("scons: Configure: " + msg + "\n") def Log(self, msg): if self.sconf.logstream is not None: self.sconf.logstream.write(msg) #### End of stuff used by Conftest.py. def SConf(*args, **kw): if kw.get(build_type, True): kw['_depth'] = kw.get('_depth', 0) + 1 for bt in build_types: try: del kw[bt] except KeyError: pass return SConfBase(*args, **kw) else: return SCons.Util.Null() def CheckFunc(context, function_name, header = None, language = None): res = SCons.Conftest.CheckFunc(context, function_name, header = header, language = language) context.did_show_result = 1 return not res def CheckType(context, type_name, includes = "", language = None): res = SCons.Conftest.CheckType(context, type_name, header = includes, language = language) context.did_show_result = 1 return not res def CheckTypeSize(context, type_name, includes = "", language = None, expect = None): res = SCons.Conftest.CheckTypeSize(context, type_name, header = includes, language = language, expect = expect) context.did_show_result = 1 return res def CheckDeclaration(context, declaration, includes = "", language = None): res = SCons.Conftest.CheckDeclaration(context, declaration, includes = includes, language = language) context.did_show_result = 1 return not res def createIncludesFromHeaders(headers, leaveLast, include_quotes = '""'): # used by CheckHeader and CheckLibWithHeader to produce C - #include # statements from the specified header (list) if not SCons.Util.is_List(headers): headers = [headers] l = [] if leaveLast: lastHeader = headers[-1] headers = headers[:-1] else: lastHeader = None for s in headers: l.append("#include %s%s%s\n" % (include_quotes[0], s, include_quotes[1])) return ''.join(l), lastHeader def CheckHeader(context, header, include_quotes = '<>', language = None): """ A test for a C or C++ header file. """ prog_prefix, hdr_to_check = \ createIncludesFromHeaders(header, 1, include_quotes) res = SCons.Conftest.CheckHeader(context, hdr_to_check, prog_prefix, language = language, include_quotes = include_quotes) context.did_show_result = 1 return not res def CheckCC(context): res = SCons.Conftest.CheckCC(context) context.did_show_result = 1 return not res def CheckCXX(context): res = SCons.Conftest.CheckCXX(context) context.did_show_result = 1 return not res def CheckSHCC(context): res = SCons.Conftest.CheckSHCC(context) context.did_show_result = 1 return not res def CheckSHCXX(context): res = SCons.Conftest.CheckSHCXX(context) context.did_show_result = 1 return not res # Bram: Make this function obsolete? CheckHeader() is more generic. def CheckCHeader(context, header, include_quotes = '""'): """ A test for a C header file. """ return CheckHeader(context, header, include_quotes, language = "C") # Bram: Make this function obsolete? CheckHeader() is more generic. def CheckCXXHeader(context, header, include_quotes = '""'): """ A test for a C++ header file. """ return CheckHeader(context, header, include_quotes, language = "C++") def CheckLib(context, library = None, symbol = "main", header = None, language = None, autoadd = 1): """ A test for a library. See also CheckLibWithHeader. Note that library may also be None to test whether the given symbol compiles without flags. """ if library == []: library = [None] if not SCons.Util.is_List(library): library = [library] # ToDo: accept path for the library res = SCons.Conftest.CheckLib(context, library, symbol, header = header, language = language, autoadd = autoadd) context.did_show_result = 1 return not res # XXX # Bram: Can only include one header and can't use #ifdef HAVE_HEADER_H. def CheckLibWithHeader(context, libs, header, language, call = None, autoadd = 1): # ToDo: accept path for library. Support system header files. """ Another (more sophisticated) test for a library. Checks, if library and header is available for language (may be 'C' or 'CXX'). Call maybe be a valid expression _with_ a trailing ';'. As in CheckLib, we support library=None, to test if the call compiles without extra link flags. """ prog_prefix, dummy = \ createIncludesFromHeaders(header, 0) if libs == []: libs = [None] if not SCons.Util.is_List(libs): libs = [libs] res = SCons.Conftest.CheckLib(context, libs, None, prog_prefix, call = call, language = language, autoadd = autoadd) context.did_show_result = 1 return not res def CheckProg(context, prog_name): """Simple check if a program exists in the path. Returns the path for the application, or None if not found. """ res = SCons.Conftest.CheckProg(context, prog_name) context.did_show_result = 1 return res # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
PypiClean
/Montreal-Forced-Aligner-3.0.0a3.tar.gz/Montreal-Forced-Aligner-3.0.0a3/docs/source/user_guide/workflows/alignment.rst
.. _pretrained_alignment: Align with an acoustic model ``(mfa align)`` ============================================ This is the primary workflow of MFA, where you can use pretrained :term:`acoustic models` to align your dataset. There are a number of :xref:`pretrained_acoustic_models` to use, but you can also adapt a pretrained model to your data (see :ref:`adapt_acoustic_model`) or train an acoustic model from scratch using your dataset (see :ref:`train_acoustic_model`). .. seealso:: * :ref:`alignment_evaluation` for details on how to evaluate alignments against a gold standard. * :ref:`fine_tune_alignments` for implementation details on how alignments are fine tuned. * :ref:`phone_models` for implementation details on using phone bigram models for generating alignments. * :ref:`alignment_analysis` for details on the fields generated in the ``alignment_analysis.csv`` file in the output folder Command reference ----------------- .. click:: montreal_forced_aligner.command_line.align:align_corpus_cli :prog: mfa align :nested: full Configuration reference ----------------------- - :ref:`configuration_global` API reference ------------- - :ref:`alignment_api` .. _align_one: Align a single file ``(mfa align_one)`` ======================================= This workflow is identical to :ref:`pretrained_alignment`, but rather than aligning a full dataset, it only aligns a single file. Because only a single file is used, many of the optimizations for larger datasets are skipped resulting in faster alignment times, but features like speaker adaptation are not employed. There are a number of :xref:`pretrained_acoustic_models` to use, but you can also adapt a pretrained model to your data (see :ref:`adapt_acoustic_model`) or train an acoustic model from scratch using your dataset (see :ref:`train_acoustic_model`). Command reference ----------------- .. click:: montreal_forced_aligner.command_line.align_one:align_one_cli :prog: mfa align_one :nested: full Configuration reference ----------------------- - :ref:`configuration_global`
PypiClean
/FreePyBX-1.0-RC1.tar.gz/FreePyBX-1.0-RC1/freepybx/public/js/dijit/a11y.js.uncompressed.js
define("dijit/a11y", [ "dojo/_base/array", // array.forEach array.map "dojo/_base/config", // defaultDuration "dojo/_base/declare", // declare "dojo/dom", // dom.byId "dojo/dom-attr", // domAttr.attr domAttr.has "dojo/dom-style", // style.style "dojo/_base/sniff", // has("ie") "./_base/manager", // manager._isElementShown "." // for exporting methods to dijit namespace ], function(array, config, declare, dom, domAttr, domStyle, has, manager, dijit){ // module: // dijit/a11y // summary: // Accessibility utility functions (keyboard, tab stops, etc.) var shown = (dijit._isElementShown = function(/*Element*/ elem){ var s = domStyle.get(elem); return (s.visibility != "hidden") && (s.visibility != "collapsed") && (s.display != "none") && (domAttr.get(elem, "type") != "hidden"); }); dijit.hasDefaultTabStop = function(/*Element*/ elem){ // summary: // Tests if element is tab-navigable even without an explicit tabIndex setting // No explicit tabIndex setting, need to investigate node type switch(elem.nodeName.toLowerCase()){ case "a": // An <a> w/out a tabindex is only navigable if it has an href return domAttr.has(elem, "href"); case "area": case "button": case "input": case "object": case "select": case "textarea": // These are navigable by default return true; case "iframe": // If it's an editor <iframe> then it's tab navigable. var body; try{ // non-IE var contentDocument = elem.contentDocument; if("designMode" in contentDocument && contentDocument.designMode == "on"){ return true; } body = contentDocument.body; }catch(e1){ // contentWindow.document isn't accessible within IE7/8 // if the iframe.src points to a foreign url and this // page contains an element, that could get focus try{ body = elem.contentWindow.document.body; }catch(e2){ return false; } } return body && (body.contentEditable == 'true' || (body.firstChild && body.firstChild.contentEditable == 'true')); default: return elem.contentEditable == 'true'; } }; var isTabNavigable = (dijit.isTabNavigable = function(/*Element*/ elem){ // summary: // Tests if an element is tab-navigable // TODO: convert (and rename method) to return effective tabIndex; will save time in _getTabNavigable() if(domAttr.get(elem, "disabled")){ return false; }else if(domAttr.has(elem, "tabIndex")){ // Explicit tab index setting return domAttr.get(elem, "tabIndex") >= 0; // boolean }else{ // No explicit tabIndex setting, so depends on node type return dijit.hasDefaultTabStop(elem); } }); dijit._getTabNavigable = function(/*DOMNode*/ root){ // summary: // Finds descendants of the specified root node. // // description: // Finds the following descendants of the specified root node: // * the first tab-navigable element in document order // without a tabIndex or with tabIndex="0" // * the last tab-navigable element in document order // without a tabIndex or with tabIndex="0" // * the first element in document order with the lowest // positive tabIndex value // * the last element in document order with the highest // positive tabIndex value var first, last, lowest, lowestTabindex, highest, highestTabindex, radioSelected = {}; function radioName(node){ // If this element is part of a radio button group, return the name for that group. return node && node.tagName.toLowerCase() == "input" && node.type && node.type.toLowerCase() == "radio" && node.name && node.name.toLowerCase(); } var walkTree = function(/*DOMNode*/parent){ for(var child = parent.firstChild; child; child = child.nextSibling){ // Skip text elements, hidden elements, and also non-HTML elements (those in custom namespaces) in IE, // since show() invokes getAttribute("type"), which crash on VML nodes in IE. if(child.nodeType != 1 || (has("ie") && child.scopeName !== "HTML") || !shown(child)){ continue; } if(isTabNavigable(child)){ var tabindex = domAttr.get(child, "tabIndex"); if(!domAttr.has(child, "tabIndex") || tabindex == 0){ if(!first){ first = child; } last = child; }else if(tabindex > 0){ if(!lowest || tabindex < lowestTabindex){ lowestTabindex = tabindex; lowest = child; } if(!highest || tabindex >= highestTabindex){ highestTabindex = tabindex; highest = child; } } var rn = radioName(child); if(domAttr.get(child, "checked") && rn){ radioSelected[rn] = child; } } if(child.nodeName.toUpperCase() != 'SELECT'){ walkTree(child); } } }; if(shown(root)){ walkTree(root); } function rs(node){ // substitute checked radio button for unchecked one, if there is a checked one with the same name. return radioSelected[radioName(node)] || node; } return { first: rs(first), last: rs(last), lowest: rs(lowest), highest: rs(highest) }; }; dijit.getFirstInTabbingOrder = function(/*String|DOMNode*/ root){ // summary: // Finds the descendant of the specified root node // that is first in the tabbing order var elems = dijit._getTabNavigable(dom.byId(root)); return elems.lowest ? elems.lowest : elems.first; // DomNode }; dijit.getLastInTabbingOrder = function(/*String|DOMNode*/ root){ // summary: // Finds the descendant of the specified root node // that is last in the tabbing order var elems = dijit._getTabNavigable(dom.byId(root)); return elems.last ? elems.last : elems.highest; // DomNode }; return { hasDefaultTabStop: dijit.hasDefaultTabStop, isTabNavigable: dijit.isTabNavigable, _getTabNavigable: dijit._getTabNavigable, getFirstInTabbingOrder: dijit.getFirstInTabbingOrder, getLastInTabbingOrder: dijit.getLastInTabbingOrder }; });
PypiClean
/Nuitka-1.8.tar.gz/Nuitka-1.8/nuitka/code_generation/LocalsDictCodes.py
from .CodeHelpers import ( generateExpressionCode, withObjectCodeTemporaryAssignment, ) from .Emission import SourceCodeCollector from .ErrorCodes import ( getErrorExitBoolCode, getErrorExitCode, getNameReferenceErrorCode, ) from .Indentation import indented from .PythonAPICodes import getReferenceExportCode from .templates.CodeTemplatesVariables import ( template_read_locals_dict_with_fallback, template_read_locals_dict_without_fallback, template_read_locals_mapping_with_fallback, template_read_locals_mapping_without_fallback, ) def generateSetLocalsDictCode(statement, emit, context): locals_declaration = context.addLocalsDictName( statement.getLocalsScope().getCodeName() ) emit( """\ %(locals_dict)s = MAKE_DICT_EMPTY();""" % {"locals_dict": locals_declaration} ) def generateSetLocalsMappingCode(statement, emit, context): new_locals_name = context.allocateTempName("set_locals") generateExpressionCode( to_name=new_locals_name, expression=statement.subnode_new_locals, emit=emit, context=context, ) locals_declaration = context.addLocalsDictName( statement.getLocalsScope().getCodeName() ) emit( """\ %(locals_dict)s = %(locals_value)s;""" % {"locals_dict": locals_declaration, "locals_value": new_locals_name} ) getReferenceExportCode(new_locals_name, emit, context) if context.needsCleanup(new_locals_name): context.removeCleanupTempName(new_locals_name) def generateReleaseLocalsDictCode(statement, emit, context): locals_declaration = context.addLocalsDictName( statement.getLocalsScope().getCodeName() ) emit( """\ Py_DECREF(%(locals_dict)s); %(locals_dict)s = NULL;""" % {"locals_dict": locals_declaration} ) def generateLocalsDictSetCode(statement, emit, context): value_arg_name = context.allocateTempName("dictset_value", unique=True) generateExpressionCode( to_name=value_arg_name, expression=statement.subnode_source, emit=emit, context=context, ) context.setCurrentSourceCodeReference(statement.getSourceReference()) locals_scope = statement.getLocalsDictScope() locals_declaration = context.addLocalsDictName(locals_scope.getCodeName()) is_dict = locals_scope.hasShapeDictionaryExact() res_name = context.getIntResName() if is_dict: emit( "%s = PyDict_SetItem(%s, %s, %s);" % ( res_name, locals_declaration, context.getConstantCode(statement.getVariableName()), value_arg_name, ) ) else: emit( "%s = PyObject_SetItem(%s, %s, %s);" % ( res_name, locals_declaration, context.getConstantCode(statement.getVariableName()), value_arg_name, ) ) getErrorExitBoolCode( condition="%s != 0" % res_name, release_name=value_arg_name, needs_check=statement.mayRaiseException(BaseException), emit=emit, context=context, ) def generateLocalsDictDelCode(statement, emit, context): locals_scope = statement.getLocalsDictScope() dict_arg_name = locals_scope.getCodeName() is_dict = locals_scope.hasShapeDictionaryExact() context.setCurrentSourceCodeReference(statement.getSourceReference()) if is_dict: res_name = context.getBoolResName() emit( "%s = DICT_REMOVE_ITEM(%s, %s);" % ( res_name, dict_arg_name, context.getConstantCode(statement.getVariableName()), ) ) getErrorExitBoolCode( condition="%s == false" % res_name, needs_check=statement.mayRaiseException(BaseException), emit=emit, context=context, ) else: res_name = context.getIntResName() emit( "%s = PyObject_DelItem(%s, %s);" % ( res_name, dict_arg_name, context.getConstantCode(statement.getVariableName()), ) ) getErrorExitBoolCode( condition="%s == -1" % res_name, needs_check=statement.mayRaiseException(BaseException), emit=emit, context=context, ) def generateLocalsDictVariableRefOrFallbackCode(to_name, expression, emit, context): variable_name = expression.getVariableName() fallback_emit = SourceCodeCollector() with withObjectCodeTemporaryAssignment( to_name, "locals_lookup_value", expression, emit, context ) as value_name: generateExpressionCode( to_name=value_name, expression=expression.subnode_fallback, emit=fallback_emit, context=context, ) locals_scope = expression.getLocalsDictScope() locals_declaration = context.addLocalsDictName(locals_scope.getCodeName()) is_dict = locals_scope.hasShapeDictionaryExact() assert not context.needsCleanup(value_name) if is_dict: template = template_read_locals_dict_with_fallback fallback_codes = indented(fallback_emit.codes) emit( template % { "to_name": value_name, "locals_dict": locals_declaration, "fallback": fallback_codes, "var_name": context.getConstantCode(constant=variable_name), } ) else: template = template_read_locals_mapping_with_fallback fallback_codes = indented(fallback_emit.codes, 2) emit( template % { "to_name": value_name, "locals_dict": locals_declaration, "fallback": fallback_codes, "var_name": context.getConstantCode(constant=variable_name), "exception_exit": context.getExceptionEscape(), } ) # If the fallback took no reference, then make it do it # anyway. context.addCleanupTempName(value_name) def generateLocalsDictVariableRefCode(to_name, expression, emit, context): variable_name = expression.getVariableName() locals_scope = expression.getLocalsDictScope() locals_declaration = context.addLocalsDictName(locals_scope.getCodeName()) is_dict = locals_scope.hasShapeDictionaryExact() if is_dict: template = template_read_locals_dict_without_fallback else: template = template_read_locals_mapping_without_fallback with withObjectCodeTemporaryAssignment( to_name, "locals_lookup_value", expression, emit, context ) as value_name: emit( template % { "to_name": value_name, "locals_dict": locals_declaration, "var_name": context.getConstantCode(constant=variable_name), } ) getNameReferenceErrorCode( variable_name=variable_name, condition="%s == NULL && CHECK_AND_CLEAR_KEY_ERROR_OCCURRED(tstate)" % value_name, emit=emit, context=context, ) getErrorExitCode(check_name=value_name, emit=emit, context=context) if not is_dict: context.addCleanupTempName(value_name) def generateLocalsDictVariableCheckCode(to_name, expression, emit, context): variable_name = expression.getVariableName() locals_scope = expression.getLocalsDictScope() locals_declaration = context.addLocalsDictName(locals_scope.getCodeName()) is_dict = locals_scope.hasShapeDictionaryExact() if is_dict: to_name.getCType().emitAssignmentCodeFromBoolCondition( to_name=to_name, condition="DICT_HAS_ITEM(tstate, %(locals_dict)s, %(var_name)s) == 1" % { "locals_dict": locals_declaration, "var_name": context.getConstantCode(constant=variable_name), }, emit=emit, ) else: tmp_name = context.getIntResName() template = """\ %(tmp_name)s = MAPPING_HAS_ITEM(tstate, %(locals_dict)s, %(var_name)s); """ emit( template % { "locals_dict": locals_declaration, "var_name": context.getConstantCode(constant=variable_name), "tmp_name": tmp_name, } ) getErrorExitBoolCode( condition="%s == -1" % tmp_name, needs_check=expression.mayRaiseException(BaseException), emit=emit, context=context, ) to_name.getCType().emitAssignmentCodeFromBoolCondition( to_name=to_name, condition="%s == 1" % tmp_name, emit=emit )
PypiClean
/Nodes-1.2.tar.gz/Nodes-1.2/rfNEAT/population.py
# This file is part of Nodes. # # Nodes is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Nodes is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Nodes. If not, see <http://www.gnu.org/licenses/>. # This file is part of NodesEvolution, refactored version of NEAT # <http://www.cs.ucf.edu/~kstanley/neat.html>. # For original version of NEAT-python code, visit # <http://code.google.com/p/neat-python/> import gzip, random import math, time import cPickle as pickle from config import Config import species import chromosome class Population(object): """ Manages all the species """ def evaluate(self): # Evaluates the entire population. for individual in self: iterable=self.itertasks() for task in iterable: #task is just list of input values, so... if len(task)==2: example, answer=task def __init__(self, nodes_dict, checkpoint_file=None): if checkpoint_file: # start from a previous point: creates an 'empty' # population and point its __dict__ to the previous one self.__resume_checkpoint(checkpoint_file) else: # total population size self.__popsize = Config.pop_size # currently living species self.__species = [] # species history self.__species_log = [] # Statistics self.__avg_fitness = [] self.__best_fitness = [] self.__create_population(nodes_dict) self.__generation = -1 stats = property(lambda self: (self.__best_fitness, self.__avg_fitness)) species_log = property(lambda self: self.__species_log) def __resume_checkpoint(self, checkpoint): """ Resumes the simulation from a previous saved point. """ try: #file = open(checkpoint) file = gzip.open(checkpoint) except IOError: raise print 'Resuming from a previous point: %s' %checkpoint # when unpickling __init__ is not called again previous_pop = pickle.load(file) self.__dict__ = previous_pop.__dict__ print 'Loading random state' rstate = pickle.load(file) random.setstate(rstate) #random.jumpahead(1) file.close() def __create_checkpoint(self, report): """ Saves the current simulation state. """ #from time import strftime # get current time #date = strftime("%Y_%m_%d_%Hh%Mm%Ss") if report: print 'Creating checkpoint file at generation: %d' %self.__generation # dumps 'self' #file = open('checkpoint_'+str(self.__generation), 'w') file = gzip.open('checkpoint_'+str(self.__generation), 'w', compresslevel = 5) # dumps the population pickle.dump(self, file, protocol=2) # dumps the current random state pickle.dump(random.getstate(), file, protocol=2) file.close() def __create_population(self, nodes_dict): genotypes = chromosome.Chromosome self.__population = [] for i in xrange(self.__popsize): g = genotypes.create(nodes_dict) self.__population.append(g) def __repr__(self): s = "Population size: %d" %self.__popsize s += "\nTotal species: %d" %len(self.__species) return s def __len__(self): return len(self.__population) def __iter__(self): return iter(self.__population) def __getitem__(self, key): return self.__population[key] #def remove(self, chromo): # ''' Removes a chromosome from the population ''' # self.__population.remove(chromo) def __speciate(self, report): """ Group chromosomes into species by similarity """ # Speciate the population for individual in self: found = False for s in self.__species: if individual.distance(s.representant) < Config.compatibility_threshold: s.add(individual) found = True break if not found: # create a new species for this lone chromosome self.__species.append(species.Species(individual)) # python technical note: # we need a "working copy" list when removing elements while looping # otherwise we might end up having sync issues for s in self.__species[:]: # this happens when no chromosomes are compatible with the species if len(s) == 0: if report: print "Removing species %d for being empty" % s.id # remove empty species self.__species.remove(s) self.__set_compatibility_threshold() def __set_compatibility_threshold(self): ''' Controls compatibility threshold ''' if len(self.__species) > Config.species_size: Config.compatibility_threshold += Config.compatibility_change elif len(self.__species) < Config.species_size: if Config.compatibility_threshold > Config.compatibility_change: Config.compatibility_threshold -= Config.compatibility_change else: print 'Compatibility threshold cannot be changed (minimum value has been reached)' def average_fitness(self): """ Returns the average raw fitness of population """ sum = 0.0 for c in self: sum += c.fitness return sum/len(self) def stdeviation(self): """ Returns the population standard deviation """ # first compute the average u = self.average_fitness() error = 0.0 try: # now compute the distance from average for c in self: error += (u - c.fitness)**2 except OverflowError: #TODO: catch OverflowError: (34, 'Numerical result out of range') print "Overflow - printing population status" print "error = %f \t average = %f" %(error, u) print "Population fitness:" print [c.fitness for c in self] return math.sqrt(error/len(self)) def __compute_spawn_levels(self): """ Compute each species' spawn amount (Stanley, p. 40) """ # 1. Boost if young and penalize if old # TODO: does it really increase the overall performance? species_stats = [] for s in self.__species: if s.age < Config.youth_threshold: species_stats.append(s.average_fitness()*Config.youth_boost) elif s.age > Config.old_threshold: species_stats.append(s.average_fitness()*Config.old_penalty) else: species_stats.append(s.average_fitness()) # 2. Share fitness (only usefull for computing spawn amounts) # More info: http://tech.groups.yahoo.com/group/neat/message/2203 # Sharing the fitness is only meaningful here # we don't really have to change each individual's raw fitness total_average = 0.0 for s in species_stats: total_average += s # 3. Compute spawn for i, s in enumerate(self.__species): s.spawn_amount = int(round((species_stats[i]*self.__popsize/total_average))) def __tournament_selection(self, k=2): """ Tournament selection with size k (default k=2). Make sure the population has at least k individuals """ random.shuffle(self.__population) return max(self.__population[:k]) def __log_species(self): """ Logging species data for visualizing speciation """ higher = max([s.id for s in self.__species]) temp = [] for i in xrange(1, higher+1): found_specie = False for s in self.__species: if i == s.id: temp.append(len(s)) found_specie = True break if not found_specie: temp.append(0) self.__species_log.append(temp) def __population_diversity(self): """ Calculates the diversity of population: total average weights, number of connections, nodes """ num_nodes = 0 num_conns = 0 avg_weights = 0.0 for c in self: num_nodes += len(c.node_genes) num_conns += len(c.conn_genes) for cg in c.conn_genes: avg_weights += cg.weight total = len(self) return (num_nodes/total, num_conns/total, avg_weights/total) def epoch(self, n, report=True, save_best=False, checkpoint_interval = 10, checkpoint_generation = None): """ Runs NEAT's genetic algorithm for n epochs. Keyword arguments: report -- show stats at each epoch (default True) save_best -- save the best chromosome from each epoch (default False) checkpoint_interval -- time in minutes between saving checkpoints (default 10 minutes) checkpoint_generation -- time in generations between saving checkpoints (default 0 -- option disabled) """ t0 = time.time() # for saving checkpoints for g in xrange(n): self.__generation += 1 if report: print '\n ****** Running generation %d ****** \n' % self.__generation # Evaluate individuals self.evaluate() # Speciates the population self.__speciate(report) # Current generation's best chromosome self.__best_fitness.append(max(self.__population)) # Current population's average fitness self.__avg_fitness.append(self.average_fitness()) # Print some statistics best = self.__best_fitness[-1] # Which species has the best chromosome? for s in self.__species: s.hasBest = False if best.species_id == s.id: s.hasBest = True # saves the best chromo from the current generation if save_best: file = open('best_chromo_'+str(self.__generation),'w') pickle.dump(best, file) file.close() # Stops the simulation if best.fitness > Config.max_fitness_threshold: print '\nBest individual found in epoch %s - complexity: %s' %(self.__generation, best.size()) break #----------------------------------------- # Prints chromosome's parents id: {dad_id, mon_id} -> child_id #for chromosome in self.__population: # print '{%3d; %3d} -> %3d' %(chromosome.parent1_id, chromosome.parent2_id, chromosome.id) #----------------------------------------- # Remove stagnated species and its members (except if it has the best chromosome) for s in self.__species[:]: if s.no_improvement_age > Config.max_stagnation: if not s.hasBest: if report: print "\n Species %2d (with %2d individuals) is stagnated: removing it" \ %(s.id, len(s)) # removing species self.__species.remove(s) # removing all the species' members #TODO: can be optimized! for c in self.__population[:]: if c.species_id == s.id: self.__population.remove(c) # Remove "super-stagnated" species (even if it has the best chromosome) # It is not clear if it really avoids local minima for s in self.__species[:]: if s.no_improvement_age > 2*Config.max_stagnation: if report: print "\n Species %2d (with %2d individuals) is super-stagnated: removing it" \ %(s.id, len(s)) # removing species self.__species.remove(s) # removing all the species' members #TODO: can be optimized! for c in self.__population[:]: if c.species_id == s.id: self.__population.remove(c) # Compute spawn levels for each remaining species self.__compute_spawn_levels() # Removing species with spawn amount = 0 for s in self.__species[:]: # This rarely happens if s.spawn_amount == 0: if report: print ' Species %2d age %2s removed: produced no offspring' %(s.id, s.age) for c in self.__population[:]: if c.species_id == s.id: self.__population.remove(c) #self.remove(c) self.__species.remove(s) # Logging speciation stats self.__log_species() if report: #print 'Poluation size: %d \t Divirsity: %s' %(len(self), self.__population_diversity()) print 'Population\'s average fitness: %3.5f stdev: %3.5f' %(self.__avg_fitness[-1], self.stdeviation()) print 'Best fitness: %2.12s - size: %s - species %s - id %s' \ %(best.fitness, best.size(), best.species_id, best.id) # print some "debugging" information print 'Species length: %d totalizing %d individuals' \ %(len(self.__species), sum([len(s) for s in self.__species])) print 'Species ID : %s' % [s.id for s in self.__species] print 'Each species size: %s' % [len(s) for s in self.__species] print 'Amount to spawn : %s' % [s.spawn_amount for s in self.__species] print 'Species age : %s' % [s.age for s in self.__species] print 'Species no improv: %s' % [s.no_improvement_age for s in self.__species] # species no improvement age #for s in self.__species: # print s # -------------------------- Producing new offspring -------------------------- # new_population = [] # next generation's population # Spawning new population for s in self.__species: new_population.extend(s.reproduce()) # ----------------------------# # Controls under or overflow # # ----------------------------# fill = (self.__popsize) - len(new_population) if fill < 0: # overflow if report: print ' Removing %d excess individual(s) from the new population' %-fill # TODO: This is dangerous! I can't remove a species' representant! new_population = new_population[:fill] # Removing the last added members if fill > 0: # underflow if report: print ' Producing %d more individual(s) to fill up the new population' %fill # TODO: # what about producing new individuals instead of reproducing? # increasing diversity from time to time might help while fill > 0: # Selects a random chromosome from population parent1 = random.choice(self.__population) # Search for a mate within the same species found = False for c in self: # what if c is parent1 itself? if c.species_id == parent1.species_id: child = parent1.crossover(c) new_population.append(child.mutate()) found = True break if not found: # If no mate was found, just mutate it new_population.append(parent1.mutate()) #new_population.append(chromosome.FFChromosome.create_fully_connected()) fill -= 1 assert self.__popsize == len(new_population), 'Different population sizes!' # Updates current population self.__population = new_population[:] if checkpoint_interval is not None and time.time() > t0 + 60*checkpoint_interval: self.__create_checkpoint(report) t0 = time.time() # updates the counter elif checkpoint_generation is not None and self.__generation % checkpoint_generation == 0: self.__create_checkpoint(report) if __name__ == '__main__' : # sample fitness function def eval_fitness(population): for individual in population: individual.fitness = 1.0 # set fitness function Population.evaluate = eval_fitness # creates the population pop = Population() # runs the simulation for 250 epochs pop.epoch(250)
PypiClean
/FAT-Forensics-0.1.2.tar.gz/FAT-Forensics-0.1.2/fatf/utils/metrics/subgroup_metrics.py
import inspect from numbers import Number from typing import Callable, List, Optional, Tuple, Union from typing import Dict # pylint: disable=unused-import import numpy as np import fatf.utils.metrics.metrics as fumm import fatf.utils.metrics.tools as fumt __all__ = ['apply_metric_function', 'apply_metric', 'performance_per_subgroup', 'performance_per_subgroup_indexed'] # yapf: disable Index = Union[int, str] # A column index type def apply_metric_function(population_confusion_matrix: List[np.ndarray], metric_function: Callable[[np.ndarray], float], *args, **kwargs) -> List[float]: """ Applies the provided performance metric to every confusion matrix. The performance metric function needs to take a numpy.ndarray confusion matrix as the first parameter followed by any number of unnamed and named parameters provided by ``*args`` and ``**kwargs`` parameters. Parameters ---------- population_confusion_matrix : List[numpy.ndarray] A list of confusion matrices for each sub-population. metric_function : Callable[[numpy.ndarray], Number] A metric function that takes a confusion matrix as a first parameter, followed by any number of unnamed parameters (``*args``) and any number of named parameters (``**kwargs``) and outputs a single number -- the metric value. *args Unnamed arguments passed to the metric function. **kwargs Named arguments passed to the metric function. Raises ------ AttributeError The ``metric_function`` parameter does not require at least one unnamed parameter. IncorrectShapeError The confusion matrix is not a 2-dimensional numpy array, it is not square (equal width and height) or its dimension is not at least 2x2. TypeError The confusion matrix is not of an integer kind (e.g. ``int``, ``numpy.int32``, ``numpy.int64``). One of the ``metric_function`` outputs is not numerical. The ``metric_function`` is not Python callable. The ``population_confusion_matrix`` is not a list. ValueError The confusion matrix is a structured numpy array. The ``population_confusion_matrix`` parameter is an empty list. Returns ------- metrics : List[numbers] A list with the value of the selected metric for every sub-population. """ # Validate the confusion matrices type if isinstance(population_confusion_matrix, list): if not population_confusion_matrix: raise ValueError('The population_confusion_matrix parameter ' 'cannot be an empty list.') for confusion_matrix in population_confusion_matrix: assert fumt.validate_confusion_matrix(confusion_matrix), \ 'Invalid confusion matrix.' else: raise TypeError('The population_confusion_matrix parameter has to be ' 'a list.') # Validate metric_function if callable(metric_function): required_param_n = 0 params = inspect.signature(metric_function).parameters for param in params: if params[param].default is params[param].empty: required_param_n += 1 if not required_param_n: raise AttributeError('The metric_function callable needs to have ' 'at least one required parameter taking a ' 'confusion matrix. 0 were found.') else: raise TypeError('The metric_function parameter has to be a Python ' 'callable.') metrics = [] for cmx in population_confusion_matrix: metrics.append(metric_function(cmx, *args, **kwargs)) # type: ignore for metric_value in metrics: if not isinstance(metric_value, Number): raise TypeError('One of the metric function outputs is not a ' 'number: *{}*.'.format(metric_value)) return metrics def apply_metric(population_confusion_matrix: List[np.ndarray], metric: Optional[str] = None, label_index: int = 0, **kwargs) -> List[float]: """ Applies one of the predefined performance metric to all confusion matrices. Available metrics are: * ``true positive rate``, * ``true negative rate``, * ``false positive rate``, * ``false negative rate``, * ``positive predictive value``, * ``negative predictive value``, * ``accuracy``, and * ``treatment``. Parameters ---------- population_confusion_matrix : List[numpy.ndarray] A list of confusion matrices for each sub-population. metric : string, optional (default='accuracy') A performance metric identifier that will be used. label_index : integer, optional (default=0) The index of a label that should be treated as "positive". All the other labels will be treated as "negative". This is only useful when the confusion matrices are multi-class. Raises ------ TypeError The ``metric`` parameter is not a string. ValueError The ``metric`` parameter specifies an unknown metric. Returns ------- metrics : List[number] A list with the value of the selected metric for every sub-population. """ available_metrics = { 'true positive rate': fumm.multiclass_true_positive_rate, 'true negative rate': fumm.multiclass_true_negative_rate, 'false positive rate': fumm.multiclass_false_positive_rate, 'false negative rate': fumm.multiclass_false_negative_rate, 'positive predictive value': fumm.multiclass_positive_predictive_value, 'negative predictive value': fumm.multiclass_negative_predictive_value, 'accuracy': fumm.accuracy, 'treatment': fumm.multiclass_treatment } # type: Dict[str, Callable] if metric is None: metric = 'accuracy' elif isinstance(metric, str): if metric not in available_metrics: available_metrics_names = sorted(list(available_metrics.keys())) raise ValueError('The selected metric (*{}*) is not recognised. ' 'The following options are available: ' '{}.'.format(metric, available_metrics_names)) else: raise TypeError('The metric parameter has to be a string.') if metric == 'accuracy': metrics = apply_metric_function(population_confusion_matrix, available_metrics[metric], **kwargs) else: metrics = apply_metric_function(population_confusion_matrix, available_metrics[metric], label_index, **kwargs) return metrics def performance_per_subgroup( dataset: np.ndarray, # ground_truth: np.ndarray, predictions: np.ndarray, # column_index: Index, # *args, label_index: int = 0, # groupings: Optional[List[Union[float, Tuple[str]]]] = None, numerical_bins_number: int = 5, treat_as_categorical: Optional[bool] = None, # labels: Optional[List[Union[str, float]]] = None, # metric: Optional[str] = None, metric_function: Optional[Callable[[np.ndarray], float]] = None, # **kwargs) -> Tuple[List[float], List[str]]: """ Computes a chosen metric per sub-population for a data set. This function combines :func:`fatf.utils.metrics.tools.confusion_matrix_per_subgroup` function together with :func:`fatf.utils.metrics.subgroup_metrics.apply_metric` (when using ``metric`` parameter) and :func:`fatf.utils.metrics.subgroup_metrics.apply_metric_function` (when using ``metric_function`` parameter) functions. For the description of parameters, errors and exceptions please see the documentation of these functions. .. note:: The ``metric_function`` parameter takes the precedence over the ``metric`` parameter is both are provided. Returns ------- population_metrics : List[numbers] A list with the value of the selected metric for every sub-population. bin_names : List[strings] The name of every sub-population (binning results) defined by the feature ranges for a numerical feature and feature value sets for a categorical feature. """ # pylint: disable=too-many-locals population_cmxs, bin_names = fumt.confusion_matrix_per_subgroup( dataset, ground_truth, predictions, column_index, groupings, numerical_bins_number, treat_as_categorical, labels) if metric_function is not None: population_metrics = apply_metric_function( population_cmxs, metric_function, *args, **kwargs) else: population_metrics = apply_metric(population_cmxs, metric, label_index, **kwargs) return population_metrics, bin_names def performance_per_subgroup_indexed( indices_per_bin: List[np.ndarray], ground_truth: np.ndarray, predictions: np.ndarray, # *args, label_index: int = 0, # labels: Optional[List[Union[str, float]]] = None, # metric: Optional[str] = None, metric_function: Optional[Callable[[np.ndarray], float]] = None, # **kwargs) -> List[float]: """ Computes a chosen metric per sub-population for index-based grouping. This function combines :func:`fatf.utils.metrics.tools.confusion_matrix_per_subgroup_indexed` function together with :func:`fatf.utils.metrics.subgroup_metrics.apply_metric` (when using ``metric`` parameter) and :func:`fatf.utils.metrics.subgroup_metrics.apply_metric_function` (when using ``metric_function`` parameter) functions. For the description of parameters, errors and exceptions please see the documentation of these functions. .. note:: The ``metric_function`` parameter takes the precedence over the ``metric`` parameter is both are provided. Returns ------- population_metrics : List[numbers] A list with the value of the selected metric for every sub-population. bin_names : List[strings] The name of every sub-population (binning results) defined by the feature ranges for a numerical feature and feature value sets for a categorical feature. """ population_cmxs = fumt.confusion_matrix_per_subgroup_indexed( indices_per_bin, ground_truth, predictions, labels) if metric_function is not None: population_metrics = apply_metric_function( population_cmxs, metric_function, *args, **kwargs) else: population_metrics = apply_metric(population_cmxs, metric, label_index, **kwargs) return population_metrics
PypiClean